From 0588352b4d21369dac8739fd4cbf8c50f675ed22 Mon Sep 17 00:00:00 2001 From: Shicheng Zhou Date: Wed, 4 Mar 2026 22:13:03 +0000 Subject: [PATCH] preview supervisor agent --- .codegen/_openapi_sha | 2 +- .gitattributes | 4 + .github/workflows/tagging.yml | 0 databricks/sdk/__init__.py | 50 +- databricks/sdk/service/apps.py | 155 +- databricks/sdk/service/billing.py | 253 +- databricks/sdk/service/catalog.py | 488 +++- databricks/sdk/service/cleanrooms.py | 27 + databricks/sdk/service/compute.py | 537 +++++ databricks/sdk/service/dashboards.py | 1337 ++++++++++- databricks/sdk/service/database.py | 145 ++ databricks/sdk/service/dataclassification.py | 257 ++ databricks/sdk/service/dataquality.py | 203 +- databricks/sdk/service/environments.py | 994 ++++++++ databricks/sdk/service/iam.py | 58 +- databricks/sdk/service/iamv2.py | 2135 +++++++++++++++-- databricks/sdk/service/jobs.py | 267 ++- databricks/sdk/service/knowledgeassistants.py | 732 ++++++ databricks/sdk/service/marketplace.py | 1 + databricks/sdk/service/ml.py | 48 + databricks/sdk/service/pipelines.py | 955 +++++++- databricks/sdk/service/postgres.py | 2126 ++++++++++++++-- databricks/sdk/service/provisioning.py | 6 + databricks/sdk/service/qualitymonitorv2.py | 209 ++ databricks/sdk/service/serving.py | 15 + databricks/sdk/service/settings.py | 204 +- databricks/sdk/service/sharing.py | 163 +- databricks/sdk/service/sql.py | 26 + databricks/sdk/service/supervisoragents.py | 462 ++++ databricks/sdk/service/tags.py | 118 +- databricks/sdk/service/vectorsearch.py | 380 ++- databricks/sdk/service/workspace.py | 17 +- docs/account/billing/budget_policy.rst | 4 +- docs/account/billing/index.rst | 3 +- docs/account/billing/usage_policy.rst | 69 + docs/account/iam/workspace_assignment.rst | 10 +- docs/account/iamv2/iam_v2.rst | 366 +++ docs/account/provisioning/storage.rst | 6 +- docs/account/provisioning/workspaces.rst | 5 +- .../account/settings/network_connectivity.rst | 3 +- docs/dbdataclasses/apps.rst | 29 + docs/dbdataclasses/billing.rst | 8 + docs/dbdataclasses/catalog.rst | 83 + docs/dbdataclasses/compute.rst | 75 + docs/dbdataclasses/dashboards.rst | 223 ++ docs/dbdataclasses/dataclassification.rst | 40 + docs/dbdataclasses/dataquality.rst | 26 + docs/dbdataclasses/environments.rst | 325 +++ docs/dbdataclasses/iamv2.rst | 69 + docs/dbdataclasses/index.rst | 4 + docs/dbdataclasses/jobs.rst | 32 + docs/dbdataclasses/knowledgeassistants.rst | 68 + docs/dbdataclasses/marketplace.rst | 3 + docs/dbdataclasses/ml.rst | 4 + docs/dbdataclasses/pipelines.rst | 277 +++ docs/dbdataclasses/postgres.rst | 193 ++ docs/dbdataclasses/qualitymonitorv2.rst | 39 + docs/dbdataclasses/settings.rst | 15 + docs/dbdataclasses/sharing.rst | 4 + docs/dbdataclasses/sql.rst | 30 + docs/dbdataclasses/supervisoragents.rst | 34 + docs/dbdataclasses/tags.rst | 12 + docs/dbdataclasses/vectorsearch.rst | 51 + docs/dbdataclasses/workspace.rst | 8 + docs/gen-client-docs.py | 20 + docs/workspace/catalog/catalogs.rst | 19 +- docs/workspace/catalog/connections.rst | 8 +- .../catalog/entity_tag_assignments.rst | 8 +- docs/workspace/catalog/grants.rst | 33 +- .../workspace/catalog/storage_credentials.rst | 14 +- docs/workspace/compute/instance_pools.rst | 14 +- docs/workspace/compute/libraries.rst | 78 + docs/workspace/dashboards/genie.rst | 118 +- docs/workspace/dashboards/index.rst | 3 +- .../dashboards/lakeview_embedded.rst | 10 + docs/workspace/dashboards/query_execution.rst | 3 +- docs/workspace/database/database.rst | 24 + .../data_classification.rst | 71 + docs/workspace/dataclassification/index.rst | 10 + docs/workspace/environments/environments.rst | 134 ++ docs/workspace/environments/index.rst | 10 + docs/workspace/iam/permissions.rst | 24 +- docs/workspace/iamv2/workspace_iam_v2.rst | 248 ++ docs/workspace/index.rst | 4 + docs/workspace/jobs/jobs.rst | 21 +- docs/workspace/knowledgeassistants/index.rst | 10 + .../knowledge_assistants.rst | 143 ++ docs/workspace/ml/experiments.rst | 10 + docs/workspace/ml/model_registry.rst | 11 +- docs/workspace/pipelines/pipelines.rst | 28 +- docs/workspace/postgres/postgres.rst | 195 +- docs/workspace/settings/token_management.rst | 3 +- docs/workspace/settings/tokens.rst | 30 +- .../sharing/recipient_federation_policies.rst | 20 + docs/workspace/sharing/shares.rst | 12 +- docs/workspace/supervisoragents/index.rst | 10 + .../supervisoragents/supervisor_agents.rst | 78 + .../tags/workspace_entity_tag_assignments.rst | 12 +- .../vectorsearch/vector_search_endpoints.rst | 32 +- .../vectorsearch/vector_search_indexes.rst | 12 + docs/workspace/workspace/workspace.rst | 23 +- 101 files changed, 15570 insertions(+), 425 deletions(-) mode change 100755 => 100644 .github/workflows/tagging.yml create mode 100755 databricks/sdk/service/dataclassification.py create mode 100755 databricks/sdk/service/environments.py create mode 100755 databricks/sdk/service/knowledgeassistants.py create mode 100755 databricks/sdk/service/supervisoragents.py create mode 100644 docs/account/billing/usage_policy.rst create mode 100644 docs/dbdataclasses/dataclassification.rst create mode 100644 docs/dbdataclasses/environments.rst create mode 100644 docs/dbdataclasses/knowledgeassistants.rst create mode 100644 docs/dbdataclasses/supervisoragents.rst create mode 100644 docs/workspace/dataclassification/data_classification.rst create mode 100644 docs/workspace/dataclassification/index.rst create mode 100644 docs/workspace/environments/environments.rst create mode 100644 docs/workspace/environments/index.rst create mode 100644 docs/workspace/knowledgeassistants/index.rst create mode 100644 docs/workspace/knowledgeassistants/knowledge_assistants.rst create mode 100644 docs/workspace/supervisoragents/index.rst create mode 100644 docs/workspace/supervisoragents/supervisor_agents.rst diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 12c62ec0b..c34c747b8 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -86481d2fa23e3fb65128ea34b045fe585f7643f1 \ No newline at end of file +file:../universe/bazel-bin/openapi/all-internal.json \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 3d4b7b5ef..72cf9b56f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -9,7 +9,9 @@ databricks/sdk/service/cleanrooms.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true databricks/sdk/service/dashboards.py linguist-generated=true databricks/sdk/service/database.py linguist-generated=true +databricks/sdk/service/dataclassification.py linguist-generated=true databricks/sdk/service/dataquality.py linguist-generated=true +databricks/sdk/service/environments.py linguist-generated=true databricks/sdk/service/files.py linguist-generated=true databricks/sdk/service/httpcallv2.py linguist-generated=true databricks/sdk/service/iam.py linguist-generated=true @@ -17,6 +19,7 @@ databricks/sdk/service/iamv2.py linguist-generated=true databricks/sdk/service/idempotencytesting.py linguist-generated=true databricks/sdk/service/jobs.py linguist-generated=true databricks/sdk/service/jsonmarshallv2.py linguist-generated=true +databricks/sdk/service/knowledgeassistants.py linguist-generated=true databricks/sdk/service/lrotesting.py linguist-generated=true databricks/sdk/service/marketplace.py linguist-generated=true databricks/sdk/service/ml.py linguist-generated=true @@ -31,6 +34,7 @@ databricks/sdk/service/settings.py linguist-generated=true databricks/sdk/service/settingsv2.py linguist-generated=true databricks/sdk/service/sharing.py linguist-generated=true databricks/sdk/service/sql.py linguist-generated=true +databricks/sdk/service/supervisoragents.py linguist-generated=true databricks/sdk/service/tags.py linguist-generated=true databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml old mode 100755 new mode 100644 diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 3f43f518e..b6e811a25 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -23,11 +23,15 @@ from databricks.sdk.service import compute as pkg_compute from databricks.sdk.service import dashboards as pkg_dashboards from databricks.sdk.service import database as pkg_database +from databricks.sdk.service import dataclassification as pkg_dataclassification from databricks.sdk.service import dataquality as pkg_dataquality +from databricks.sdk.service import environments as pkg_environments from databricks.sdk.service import files as pkg_files from databricks.sdk.service import iam as pkg_iam from databricks.sdk.service import iamv2 as pkg_iamv2 from databricks.sdk.service import jobs as pkg_jobs +from databricks.sdk.service import \ + knowledgeassistants as pkg_knowledgeassistants from databricks.sdk.service import marketplace as pkg_marketplace from databricks.sdk.service import ml as pkg_ml from databricks.sdk.service import networking as pkg_networking @@ -41,6 +45,7 @@ from databricks.sdk.service import settingsv2 as pkg_settingsv2 from databricks.sdk.service import sharing as pkg_sharing from databricks.sdk.service import sql as pkg_sql +from databricks.sdk.service import supervisoragents as pkg_supervisoragents from databricks.sdk.service import tags as pkg_tags from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace @@ -48,7 +53,7 @@ from databricks.sdk.service.apps import AppsAPI, AppsSettingsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, BudgetsAPI, LogDeliveryAPI, - UsageDashboardsAPI) + UsageDashboardsAPI, UsagePolicyAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -82,9 +87,12 @@ PolicyComplianceForClustersAPI, PolicyFamiliesAPI) from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, - LakeviewEmbeddedAPI) + LakeviewEmbeddedAPI, + QueryExecutionAPI) from databricks.sdk.service.database import DatabaseAPI +from databricks.sdk.service.dataclassification import DataClassificationAPI from databricks.sdk.service.dataquality import DataQualityAPI +from databricks.sdk.service.environments import EnvironmentsAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccessControlAPI, AccountAccessControlAPI, @@ -100,6 +108,7 @@ UsersV2API, WorkspaceAssignmentAPI) from databricks.sdk.service.iamv2 import AccountIamV2API, WorkspaceIamV2API from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI +from databricks.sdk.service.knowledgeassistants import KnowledgeAssistantsAPI from databricks.sdk.service.marketplace import ( ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI, ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI, @@ -160,6 +169,7 @@ QueryVisualizationsLegacyAPI, RedashConfigAPI, StatementExecutionAPI, WarehousesAPI) +from databricks.sdk.service.supervisoragents import SupervisorAgentsAPI from databricks.sdk.service.tags import (TagPoliciesAPI, WorkspaceEntityTagAssignmentsAPI) from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, @@ -297,12 +307,14 @@ def __init__( self._current_user = pkg_iam.CurrentUserAPI(self._api_client) self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client) self._dashboards = pkg_sql.DashboardsAPI(self._api_client) + self._data_classification = pkg_dataclassification.DataClassificationAPI(self._api_client) self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client) self._data_sources = pkg_sql.DataSourcesAPI(self._api_client) self._database = pkg_database.DatabaseAPI(self._api_client) self._dbfs = DbfsExt(self._api_client) self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._entity_tag_assignments = pkg_catalog.EntityTagAssignmentsAPI(self._api_client) + self._environments = pkg_environments.EnvironmentsAPI(self._api_client) self._experiments = pkg_ml.ExperimentsAPI(self._api_client) self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client) self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client) @@ -321,6 +333,7 @@ def __init__( self._instance_profiles = pkg_compute.InstanceProfilesAPI(self._api_client) self._ip_access_lists = pkg_settings.IpAccessListsAPI(self._api_client) self._jobs = JobsExt(self._api_client) + self._knowledge_assistants = pkg_knowledgeassistants.KnowledgeAssistantsAPI(self._api_client) self._lakeview = pkg_dashboards.LakeviewAPI(self._api_client) self._lakeview_embedded = pkg_dashboards.LakeviewEmbeddedAPI(self._api_client) self._libraries = pkg_compute.LibrariesAPI(self._api_client) @@ -352,6 +365,7 @@ def __init__( self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client) self._queries = pkg_sql.QueriesAPI(self._api_client) self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client) + self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client) self._query_history = pkg_sql.QueryHistoryAPI(self._api_client) self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client) self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client) @@ -378,6 +392,7 @@ def __init__( self._shares = pkg_sharing.SharesAPI(self._api_client) self._statement_execution = pkg_sql.StatementExecutionAPI(self._api_client) self._storage_credentials = pkg_catalog.StorageCredentialsAPI(self._api_client) + self._supervisor_agents = pkg_supervisoragents.SupervisorAgentsAPI(self._api_client) self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client) self._table_constraints = pkg_catalog.TableConstraintsAPI(self._api_client) self._tables = pkg_catalog.TablesAPI(self._api_client) @@ -558,6 +573,11 @@ def dashboards(self) -> pkg_sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards + @property + def data_classification(self) -> pkg_dataclassification.DataClassificationAPI: + """Manage data classification for Unity Catalog catalogs.""" + return self._data_classification + @property def data_quality(self) -> pkg_dataquality.DataQualityAPI: """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`).""" @@ -588,6 +608,11 @@ def entity_tag_assignments(self) -> pkg_catalog.EntityTagAssignmentsAPI: """Tags are attributes that include keys and optional values that you can use to organize and categorize entities in Unity Catalog.""" return self._entity_tag_assignments + @property + def environments(self) -> pkg_environments.EnvironmentsAPI: + """APIs to manage environment resources.""" + return self._environments + @property def experiments(self) -> pkg_ml.ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.""" @@ -673,6 +698,11 @@ def jobs(self) -> JobsExt: """The Jobs API allows you to create, edit, and delete jobs.""" return self._jobs + @property + def knowledge_assistants(self) -> pkg_knowledgeassistants.KnowledgeAssistantsAPI: + """Manage Knowledge Assistants and related resources.""" + return self._knowledge_assistants + @property def lakeview(self) -> pkg_dashboards.LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards.""" @@ -818,6 +848,11 @@ def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy + @property + def query_execution(self) -> pkg_dashboards.QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards.""" + return self._query_execution + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" @@ -923,6 +958,11 @@ def storage_credentials(self) -> pkg_catalog.StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.""" return self._storage_credentials + @property + def supervisor_agents(self) -> pkg_supervisoragents.SupervisorAgentsAPI: + """Manage Supervisor Agents and related resources.""" + return self._supervisor_agents + @property def system_schemas(self) -> pkg_catalog.SystemSchemasAPI: """A system schema is a schema that lives within the system catalog.""" @@ -1143,6 +1183,7 @@ def __init__( self._storage = pkg_provisioning.StorageAPI(self._api_client) self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client) self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client) + self._usage_policy = pkg_billing.UsagePolicyAPI(self._api_client) self._users_v2 = pkg_iam.AccountUsersV2API(self._api_client) self._vpc_endpoints = pkg_provisioning.VpcEndpointsAPI(self._api_client) self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) @@ -1305,6 +1346,11 @@ def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: """These APIs manage usage dashboards for this account.""" return self._usage_dashboards + @property + def usage_policy(self) -> pkg_billing.UsagePolicyAPI: + """A service serves REST API about Usage policies.""" + return self._usage_policy + @property def users_v2(self) -> pkg_iam.AccountUsersV2API: """User identities recognized by Databricks and represented by email addresses.""" diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index b418c0a9c..df12b1146 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -51,6 +51,8 @@ class App: creator: Optional[str] = None """The email of the user that created the app.""" + default_git_source: Optional[GitSource] = None + default_source_code_path: Optional[str] = None """The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment.""" @@ -72,6 +74,9 @@ class App: id: Optional[str] = None """The unique identifier of the app.""" + last_deployment_id: Optional[str] = None + """The ID of the last deployment created for this app.""" + oauth2_app_client_id: Optional[str] = None oauth2_app_integration_id: Optional[str] = None @@ -92,6 +97,9 @@ class App: space: Optional[str] = None """Name of the space this app belongs to.""" + thumbnail_url: Optional[str] = None + """The URL of the thumbnail image for the app.""" + update_time: Optional[str] = None """The update time of the app. Formatted timestamp in ISO 6801.""" @@ -122,6 +130,8 @@ def as_dict(self) -> dict: body["create_time"] = self.create_time if self.creator is not None: body["creator"] = self.creator + if self.default_git_source: + body["default_git_source"] = self.default_git_source.as_dict() if self.default_source_code_path is not None: body["default_source_code_path"] = self.default_source_code_path if self.description is not None: @@ -136,6 +146,8 @@ def as_dict(self) -> dict: body["git_repository"] = self.git_repository.as_dict() if self.id is not None: body["id"] = self.id + if self.last_deployment_id is not None: + body["last_deployment_id"] = self.last_deployment_id if self.name is not None: body["name"] = self.name if self.oauth2_app_client_id is not None: @@ -154,6 +166,8 @@ def as_dict(self) -> dict: body["service_principal_name"] = self.service_principal_name if self.space is not None: body["space"] = self.space + if self.thumbnail_url is not None: + body["thumbnail_url"] = self.thumbnail_url if self.update_time is not None: body["update_time"] = self.update_time if self.updater is not None: @@ -183,6 +197,8 @@ def as_shallow_dict(self) -> dict: body["create_time"] = self.create_time if self.creator is not None: body["creator"] = self.creator + if self.default_git_source: + body["default_git_source"] = self.default_git_source if self.default_source_code_path is not None: body["default_source_code_path"] = self.default_source_code_path if self.description is not None: @@ -197,6 +213,8 @@ def as_shallow_dict(self) -> dict: body["git_repository"] = self.git_repository if self.id is not None: body["id"] = self.id + if self.last_deployment_id is not None: + body["last_deployment_id"] = self.last_deployment_id if self.name is not None: body["name"] = self.name if self.oauth2_app_client_id is not None: @@ -215,6 +233,8 @@ def as_shallow_dict(self) -> dict: body["service_principal_name"] = self.service_principal_name if self.space is not None: body["space"] = self.space + if self.thumbnail_url is not None: + body["thumbnail_url"] = self.thumbnail_url if self.update_time is not None: body["update_time"] = self.update_time if self.updater is not None: @@ -238,6 +258,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: compute_status=_from_dict(d, "compute_status", ComputeStatus), create_time=d.get("create_time", None), creator=d.get("creator", None), + default_git_source=_from_dict(d, "default_git_source", GitSource), default_source_code_path=d.get("default_source_code_path", None), description=d.get("description", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), @@ -245,6 +266,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: effective_user_api_scopes=d.get("effective_user_api_scopes", None), git_repository=_from_dict(d, "git_repository", GitRepository), id=d.get("id", None), + last_deployment_id=d.get("last_deployment_id", None), name=d.get("name", None), oauth2_app_client_id=d.get("oauth2_app_client_id", None), oauth2_app_integration_id=d.get("oauth2_app_integration_id", None), @@ -254,6 +276,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: service_principal_id=d.get("service_principal_id", None), service_principal_name=d.get("service_principal_name", None), space=d.get("space", None), + thumbnail_url=d.get("thumbnail_url", None), update_time=d.get("update_time", None), updater=d.get("updater", None), url=d.get("url", None), @@ -607,6 +630,45 @@ def from_dict(cls, d: Dict[str, Any]) -> AppManifest: ) +@dataclass +class AppManifestAppResourceAppSpec: + name: Optional[str] = None + """Name of the target app to grant access to.""" + + permission: Optional[AppManifestAppResourceAppSpecAppPermission] = None + """Permission to grant on the app. Supported permission: "CAN_USE".""" + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceAppSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceAppSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceAppSpec: + """Deserializes the AppManifestAppResourceAppSpec from a dictionary.""" + return cls( + name=d.get("name", None), permission=_enum(d, "permission", AppManifestAppResourceAppSpecAppPermission) + ) + + +class AppManifestAppResourceAppSpecAppPermission(Enum): + + CAN_USE = "CAN_USE" + + @dataclass class AppManifestAppResourceExperimentSpec: permission: AppManifestAppResourceExperimentSpecExperimentPermission @@ -672,6 +734,51 @@ class AppManifestAppResourceJobSpecJobPermission(Enum): IS_OWNER = "IS_OWNER" +@dataclass +class AppManifestAppResourcePostgresSpec: + branch: Optional[str] = None + + database: Optional[str] = None + + permission: Optional[AppManifestAppResourcePostgresSpecPostgresPermission] = None + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourcePostgresSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.database is not None: + body["database"] = self.database + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourcePostgresSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.database is not None: + body["database"] = self.database + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourcePostgresSpec: + """Deserializes the AppManifestAppResourcePostgresSpec from a dictionary.""" + return cls( + branch=d.get("branch", None), + database=d.get("database", None), + permission=_enum(d, "permission", AppManifestAppResourcePostgresSpecPostgresPermission), + ) + + +class AppManifestAppResourcePostgresSpecPostgresPermission(Enum): + + CAN_CONNECT_AND_CREATE = "CAN_CONNECT_AND_CREATE" + + @dataclass class AppManifestAppResourceSecretSpec: permission: AppManifestAppResourceSecretSpecSecretPermission @@ -749,6 +856,8 @@ class AppManifestAppResourceSpec: name: str """Name of the App Resource.""" + app_spec: Optional[AppManifestAppResourceAppSpec] = None + description: Optional[str] = None """Description of the App Resource.""" @@ -756,6 +865,8 @@ class AppManifestAppResourceSpec: job_spec: Optional[AppManifestAppResourceJobSpec] = None + postgres_spec: Optional[AppManifestAppResourcePostgresSpec] = None + secret_spec: Optional[AppManifestAppResourceSecretSpec] = None serving_endpoint_spec: Optional[AppManifestAppResourceServingEndpointSpec] = None @@ -767,6 +878,8 @@ class AppManifestAppResourceSpec: def as_dict(self) -> dict: """Serializes the AppManifestAppResourceSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.app_spec: + body["app_spec"] = self.app_spec.as_dict() if self.description is not None: body["description"] = self.description if self.experiment_spec: @@ -775,6 +888,8 @@ def as_dict(self) -> dict: body["job_spec"] = self.job_spec.as_dict() if self.name is not None: body["name"] = self.name + if self.postgres_spec: + body["postgres_spec"] = self.postgres_spec.as_dict() if self.secret_spec: body["secret_spec"] = self.secret_spec.as_dict() if self.serving_endpoint_spec: @@ -788,6 +903,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the AppManifestAppResourceSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.app_spec: + body["app_spec"] = self.app_spec if self.description is not None: body["description"] = self.description if self.experiment_spec: @@ -796,6 +913,8 @@ def as_shallow_dict(self) -> dict: body["job_spec"] = self.job_spec if self.name is not None: body["name"] = self.name + if self.postgres_spec: + body["postgres_spec"] = self.postgres_spec if self.secret_spec: body["secret_spec"] = self.secret_spec if self.serving_endpoint_spec: @@ -810,10 +929,12 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSpec: """Deserializes the AppManifestAppResourceSpec from a dictionary.""" return cls( + app_spec=_from_dict(d, "app_spec", AppManifestAppResourceAppSpec), description=d.get("description", None), experiment_spec=_from_dict(d, "experiment_spec", AppManifestAppResourceExperimentSpec), job_spec=_from_dict(d, "job_spec", AppManifestAppResourceJobSpec), name=d.get("name", None), + postgres_spec=_from_dict(d, "postgres_spec", AppManifestAppResourcePostgresSpec), secret_spec=_from_dict(d, "secret_spec", AppManifestAppResourceSecretSpec), serving_endpoint_spec=_from_dict(d, "serving_endpoint_spec", AppManifestAppResourceServingEndpointSpec), sql_warehouse_spec=_from_dict(d, "sql_warehouse_spec", AppManifestAppResourceSqlWarehouseSpec), @@ -891,6 +1012,7 @@ class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum): EXECUTE = "EXECUTE" MANAGE = "MANAGE" + MODIFY = "MODIFY" READ_VOLUME = "READ_VOLUME" SELECT = "SELECT" USE_CONNECTION = "USE_CONNECTION" @@ -1131,20 +1253,37 @@ def from_dict(cls, d: Dict[str, Any]) -> AppResource: @dataclass class AppResourceApp: + name: Optional[str] = None + + permission: Optional[AppResourceAppAppPermission] = None + def as_dict(self) -> dict: """Serializes the AppResourceApp into a dictionary suitable for use as a JSON request body.""" body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceApp into a shallow dictionary of its immediate attributes.""" body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceApp: """Deserializes the AppResourceApp from a dictionary.""" - return cls() + return cls(name=d.get("name", None), permission=_enum(d, "permission", AppResourceAppAppPermission)) + + +class AppResourceAppAppPermission(Enum): + + CAN_USE = "CAN_USE" @dataclass @@ -1696,6 +1835,9 @@ class ApplicationStatus: message: Optional[str] = None """Application status message""" + running_instances: Optional[int] = None + """The number of running instances of this application.""" + state: Optional[ApplicationState] = None """State of the application.""" @@ -1704,6 +1846,8 @@ def as_dict(self) -> dict: body = {} if self.message is not None: body["message"] = self.message + if self.running_instances is not None: + body["running_instances"] = self.running_instances if self.state is not None: body["state"] = self.state.value return body @@ -1713,6 +1857,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.message is not None: body["message"] = self.message + if self.running_instances is not None: + body["running_instances"] = self.running_instances if self.state is not None: body["state"] = self.state return body @@ -1720,12 +1866,17 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: """Deserializes the ApplicationStatus from a dictionary.""" - return cls(message=d.get("message", None), state=_enum(d, "state", ApplicationState)) + return cls( + message=d.get("message", None), + running_instances=d.get("running_instances", None), + state=_enum(d, "state", ApplicationState), + ) class ComputeSize(Enum): LARGE = "LARGE" + LIQUID = "LIQUID" MEDIUM = "MEDIUM" diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index ddd02c457..20455a8f2 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1034,6 +1034,50 @@ def from_dict(cls, d: Dict[str, Any]) -> ListBudgetPoliciesResponse: ) +@dataclass +class ListUsagePoliciesResponse: + """A list of usage policies.""" + + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page.""" + + policies: Optional[List[UsagePolicy]] = None + + previous_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the previous page.""" + + def as_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsagePoliciesResponse: + """Deserializes the ListUsagePoliciesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + policies=_repeated_dict(d, "policies", UsagePolicy), + previous_page_token=d.get("previous_page_token", None), + ) + + class LogDeliveryConfigStatus(Enum): """* Log Delivery Status @@ -1440,6 +1484,59 @@ class UsageDashboardType(Enum): USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" +@dataclass +class UsagePolicy: + """Contains the UsagePolicy details (same structure as BudgetPolicy)""" + + binding_workspace_ids: Optional[List[int]] = None + """List of workspaces that this usage policy will be exclusively bound to.""" + + custom_tags: Optional[List[compute.CustomPolicyTag]] = None + """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" + + policy_id: Optional[str] = None + """The Id of the policy. This field is generated by Databricks and globally unique.""" + + policy_name: Optional[str] = None + """The name of the policy.""" + + def as_dict(self) -> dict: + """Serializes the UsagePolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids] + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UsagePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = self.binding_workspace_ids + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UsagePolicy: + """Deserializes the UsagePolicy from a dictionary.""" + return cls( + binding_workspace_ids=d.get("binding_workspace_ids", None), + custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag), + policy_id=d.get("policy_id", None), + policy_name=d.get("policy_name", None), + ) + + @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None @@ -1675,7 +1772,12 @@ def list( query["page_token"] = json["next_page_token"] def update( - self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None + self, + policy_id: str, + policy: BudgetPolicy, + *, + limit_config: Optional[LimitConfig] = None, + update_mask: Optional[str] = None, ) -> BudgetPolicy: """Updates a policy @@ -1686,6 +1788,8 @@ def update( specified even if not changed. The `policy_id` is used to identify the policy to update. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy + :param update_mask: str (optional) + Field mask specifying which fields to update. When not provided, all fields are updated. :returns: :class:`BudgetPolicy` """ @@ -1694,6 +1798,8 @@ def update( query = {} if limit_config is not None: query["limit_config"] = limit_config.as_dict() + if update_mask is not None: + query["update_mask"] = update_mask headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -2088,3 +2194,148 @@ def get( res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/dashboard", query=query, headers=headers) return GetBillingUsageDashboardResponse.from_dict(res) + + +class UsagePolicyAPI: + """A service serves REST API about Usage policies""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, policy: Optional[UsagePolicy] = None, request_id: Optional[str] = None) -> UsagePolicy: + """Creates a new usage policy. + + :param policy: :class:`UsagePolicy` (optional) + The policy to create. `policy_id` needs to be empty as it will be generated + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. + + :returns: :class:`UsagePolicy` + """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) + body = {} + if policy is not None: + body["policy"] = policy.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", body=body, headers=headers + ) + return UsagePolicy.from_dict(res) + + def delete(self, policy_id: str): + """Deletes a usage policy + + :param policy_id: str + The Id of the policy. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers) + + def get(self, policy_id: str) -> UsagePolicy: + """Retrieves a usage policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`UsagePolicy` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers + ) + return UsagePolicy.from_dict(res) + + def list( + self, + *, + filter_by: Optional[Filter] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_spec: Optional[SortSpec] = None, + ) -> Iterator[UsagePolicy]: + """Lists all usage policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of usage policies to return. + :param page_token: str (optional) + A page token, received from a previous `ListUsagePolicies` call. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`UsagePolicy` + """ + + query = {} + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if sort_spec is not None: + query["sort_spec"] = sort_spec.as_dict() + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", query=query, headers=headers + ) + if "policies" in json: + for v in json["policies"]: + yield UsagePolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, policy_id: str, policy: UsagePolicy, *, limit_config: Optional[LimitConfig] = None) -> UsagePolicy: + """Updates a usage policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param policy: :class:`UsagePolicy` + The policy to update. `creator_user_id` cannot be specified in the request. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy + + :returns: :class:`UsagePolicy` + """ + + body = policy.as_dict() + query = {} + if limit_config is not None: + query["limit_config"] = limit_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", + query=query, + body=body, + headers=headers, + ) + return UsagePolicy.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index de5e01aa9..dde8dd88d 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1074,12 +1074,18 @@ class CatalogInfo: connection_name: Optional[str] = None """The name of the connection to an external data source.""" + conversion_info: Optional[ConversionInfo] = None + """Status of conversion of FOREIGN catalog to UC Native catalog.""" + created_at: Optional[int] = None """Time at which this catalog was created, in epoch milliseconds.""" created_by: Optional[str] = None """Username of catalog creator.""" + dr_replication_info: Optional[DrReplicationInfo] = None + """Disaster Recovery replication state snapshot.""" + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None @@ -1141,10 +1147,14 @@ def as_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info.as_dict() if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info.as_dict() if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() if self.enable_predictive_optimization is not None: @@ -1192,10 +1202,14 @@ def as_shallow_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag if self.enable_predictive_optimization is not None: @@ -1240,8 +1254,10 @@ def from_dict(cls, d: Dict[str, Any]) -> CatalogInfo: catalog_type=_enum(d, "catalog_type", CatalogType), comment=d.get("comment", None), connection_name=d.get("connection_name", None), + conversion_info=_from_dict(d, "conversion_info", ConversionInfo), created_at=d.get("created_at", None), created_by=d.get("created_by", None), + dr_replication_info=_from_dict(d, "dr_replication_info", DrReplicationInfo), effective_predictive_optimization_flag=_from_dict( d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag ), @@ -1588,12 +1604,56 @@ class ColumnTypeName(Enum): STRING = "STRING" STRUCT = "STRUCT" TABLE_TYPE = "TABLE_TYPE" + TIME = "TIME" TIMESTAMP = "TIMESTAMP" TIMESTAMP_NTZ = "TIMESTAMP_NTZ" USER_DEFINED_TYPE = "USER_DEFINED_TYPE" VARIANT = "VARIANT" +@dataclass +class ConditionalDisplay: + """Defines when an option should be hidden based on another option's value. For example, for + pre-created OAuth connections, some options are conditionally hidden. This field works in + conjunction with OptionSpec.is_hidden: - If OptionSpec.is_hidden is true, the option is always + hidden regardless of ConditionalDisplay. - If OptionSpec.is_hidden is false (or unset), + ConditionalDisplay determines visibility: - If depends_on_option matches any value in + hidden_when_values, hide this option. - Otherwise, show this option.""" + + depends_on_option: Optional[str] = None + """The name of the option whose value determines visibility of this option.""" + + hidden_when_values: Optional[List[str]] = None + """The values of the depends_on_option that will hide this option. If empty or not set, this option + follows default visibility (shown unless is_hidden is true). If depends_on_option has any of + these values, this option is hidden.""" + + def as_dict(self) -> dict: + """Serializes the ConditionalDisplay into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.depends_on_option is not None: + body["depends_on_option"] = self.depends_on_option + if self.hidden_when_values: + body["hidden_when_values"] = [v for v in self.hidden_when_values] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConditionalDisplay into a shallow dictionary of its immediate attributes.""" + body = {} + if self.depends_on_option is not None: + body["depends_on_option"] = self.depends_on_option + if self.hidden_when_values: + body["hidden_when_values"] = self.hidden_when_values + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConditionalDisplay: + """Deserializes the ConditionalDisplay from a dictionary.""" + return cls( + depends_on_option=d.get("depends_on_option", None), hidden_when_values=d.get("hidden_when_values", None) + ) + + @dataclass class ConnectionDependency: """A connection that is dependent on a SQL object.""" @@ -1643,6 +1703,9 @@ class ConnectionInfo: credential_type: Optional[CredentialType] = None """The type of credential.""" + environment_settings: Optional[EnvironmentSettings] = None + """[Create,Update:OPT] Connection environment settings as EnvironmentSettings object.""" + full_name: Optional[str] = None """Full name of connection.""" @@ -1692,6 +1755,8 @@ def as_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type.value + if self.environment_settings: + body["environment_settings"] = self.environment_settings.as_dict() if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1733,6 +1798,8 @@ def as_shallow_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type + if self.environment_settings: + body["environment_settings"] = self.environment_settings if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1769,6 +1836,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: created_at=d.get("created_at", None), created_by=d.get("created_by", None), credential_type=_enum(d, "credential_type", CredentialType), + environment_settings=_from_dict(d, "environment_settings", EnvironmentSettings), full_name=d.get("full_name", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), @@ -1795,6 +1863,7 @@ class ConnectionType(Enum): HTTP = "HTTP" MYSQL = "MYSQL" ORACLE = "ORACLE" + PALANTIR = "PALANTIR" POSTGRESQL = "POSTGRESQL" POWER_BI = "POWER_BI" REDSHIFT = "REDSHIFT" @@ -1857,6 +1926,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: ) +@dataclass +class ConversionInfo: + """Status of conversion of FOREIGN entity into UC Native entity.""" + + state: Optional[ConversionInfoState] = None + """The conversion state of the resource.""" + + def as_dict(self) -> dict: + """Serializes the ConversionInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConversionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConversionInfo: + """Deserializes the ConversionInfo from a dictionary.""" + return cls(state=_enum(d, "state", ConversionInfoState)) + + +class ConversionInfoState(Enum): + + COMPLETED = "COMPLETED" + IN_PROGRESS = "IN_PROGRESS" + + @dataclass class CreateAccessRequest: behalf_of: Optional[Principal] = None @@ -2951,6 +3053,32 @@ class DeltaSharingScopeEnum(Enum): INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" +@dataclass +class DenyOptions: + privileges: List[str] + """List of privileges to deny. When any of these privileges are requested, the policy will deny + access if the principal and condition match. Required on create and update.""" + + def as_dict(self) -> dict: + """Serializes the DenyOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.privileges: + body["privileges"] = [v for v in self.privileges] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DenyOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privileges: + body["privileges"] = self.privileges + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DenyOptions: + """Deserializes the DenyOptions from a dictionary.""" + return cls(privileges=d.get("privileges", None)) + + @dataclass class Dependency: """A dependency of a SQL object. One of the following fields must be defined: __table__, @@ -3055,6 +3183,47 @@ def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: return cls() +@dataclass +class DrReplicationInfo: + """Metadata related to Disaster Recovery.""" + + replicated_entities: Optional[str] = None + """See https://docs.google.com/document/d/1X0A_3hMhzuS2V1E3zB0x5wxPsFx70bVYK5rHep2AjW8.""" + + status: Optional[DrReplicationStatus] = None + + def as_dict(self) -> dict: + """Serializes the DrReplicationInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.replicated_entities is not None: + body["replicated_entities"] = self.replicated_entities + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DrReplicationInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.replicated_entities is not None: + body["replicated_entities"] = self.replicated_entities + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DrReplicationInfo: + """Deserializes the DrReplicationInfo from a dictionary.""" + return cls( + replicated_entities=d.get("replicated_entities", None), status=_enum(d, "status", DrReplicationStatus) + ) + + +class DrReplicationStatus(Enum): + + DR_REPLICATION_STATUS_PRIMARY = "DR_REPLICATION_STATUS_PRIMARY" + DR_REPLICATION_STATUS_SECONDARY = "DR_REPLICATION_STATUS_SECONDARY" + + @dataclass class EffectivePermissionsList: next_page_token: Optional[str] = None @@ -3285,6 +3454,9 @@ class EntityTagAssignment: """The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, columns, volumes.""" + inherited: Optional[bool] = None + """Boolean which indicates whether this tag is inherited.""" + source_type: Optional[TagAssignmentSourceType] = None """The source type of the tag assignment, e.g., user-assigned or system-assigned""" @@ -3304,6 +3476,8 @@ def as_dict(self) -> dict: body["entity_name"] = self.entity_name if self.entity_type is not None: body["entity_type"] = self.entity_type + if self.inherited is not None: + body["inherited"] = self.inherited if self.source_type is not None: body["source_type"] = self.source_type.value if self.tag_key is not None: @@ -3323,6 +3497,8 @@ def as_shallow_dict(self) -> dict: body["entity_name"] = self.entity_name if self.entity_type is not None: body["entity_type"] = self.entity_type + if self.inherited is not None: + body["inherited"] = self.inherited if self.source_type is not None: body["source_type"] = self.source_type if self.tag_key is not None: @@ -3341,6 +3517,7 @@ def from_dict(cls, d: Dict[str, Any]) -> EntityTagAssignment: return cls( entity_name=d.get("entity_name", None), entity_type=d.get("entity_type", None), + inherited=d.get("inherited", None), source_type=_enum(d, "source_type", TagAssignmentSourceType), tag_key=d.get("tag_key", None), tag_value=d.get("tag_value", None), @@ -3349,6 +3526,38 @@ def from_dict(cls, d: Dict[str, Any]) -> EntityTagAssignment: ) +@dataclass +class EnvironmentSettings: + environment_version: Optional[str] = None + + java_dependencies: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the EnvironmentSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = [v for v in self.java_dependencies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EnvironmentSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = self.java_dependencies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EnvironmentSettings: + """Deserializes the EnvironmentSettings from a dictionary.""" + return cls( + environment_version=d.get("environment_version", None), java_dependencies=d.get("java_dependencies", None) + ) + + @dataclass class ExternalLineageExternalMetadata: name: Optional[str] = None @@ -4928,6 +5137,8 @@ class GenerateTemporaryPathCredentialResponse: r2_temp_credentials: Optional[R2Credentials] = None + uc_encrypted_token: Optional[UcEncryptedToken] = None + url: Optional[str] = None """The URL of the storage path accessible by the temporary credential.""" @@ -4946,6 +5157,8 @@ def as_dict(self) -> dict: body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() if self.r2_temp_credentials: body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() + if self.uc_encrypted_token: + body["uc_encrypted_token"] = self.uc_encrypted_token.as_dict() if self.url is not None: body["url"] = self.url return body @@ -4965,6 +5178,8 @@ def as_shallow_dict(self) -> dict: body["gcp_oauth_token"] = self.gcp_oauth_token if self.r2_temp_credentials: body["r2_temp_credentials"] = self.r2_temp_credentials + if self.uc_encrypted_token: + body["uc_encrypted_token"] = self.uc_encrypted_token if self.url is not None: body["url"] = self.url return body @@ -4979,6 +5194,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryPathCredentialResponse expiration_time=d.get("expiration_time", None), gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), + uc_encrypted_token=_from_dict(d, "uc_encrypted_token", UcEncryptedToken), url=d.get("url", None), ) @@ -5057,6 +5273,8 @@ class GenerateTemporaryTableCredentialResponse: r2_temp_credentials: Optional[R2Credentials] = None + uc_encrypted_token: Optional[UcEncryptedToken] = None + url: Optional[str] = None """The URL of the storage path accessible by the temporary credential.""" @@ -5075,6 +5293,8 @@ def as_dict(self) -> dict: body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() if self.r2_temp_credentials: body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() + if self.uc_encrypted_token: + body["uc_encrypted_token"] = self.uc_encrypted_token.as_dict() if self.url is not None: body["url"] = self.url return body @@ -5094,6 +5314,8 @@ def as_shallow_dict(self) -> dict: body["gcp_oauth_token"] = self.gcp_oauth_token if self.r2_temp_credentials: body["r2_temp_credentials"] = self.r2_temp_credentials + if self.uc_encrypted_token: + body["uc_encrypted_token"] = self.uc_encrypted_token if self.url is not None: body["url"] = self.url return body @@ -5108,6 +5330,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRespons expiration_time=d.get("expiration_time", None), gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), + uc_encrypted_token=_from_dict(d, "uc_encrypted_token", UcEncryptedToken), url=d.get("url", None), ) @@ -5853,6 +6076,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: return cls(next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", PolicyInfo)) +@dataclass +class ListPrivilegeAssignmentsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + + def as_dict(self) -> dict: + """Serializes the ListPrivilegeAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListPrivilegeAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListPrivilegeAssignmentsResponse: + """Deserializes the ListPrivilegeAssignmentsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), + ) + + @dataclass class ListQuotasResponse: next_page_token: Optional[str] = None @@ -7563,6 +7821,10 @@ class OptionSpec: """For drop down / radio button selections, UI will want to know the possible input values, it can also be used by other option types to limit input selections.""" + conditional_display: Optional[ConditionalDisplay] = None + """Conditional display configuration. Specifies when this option should be hidden based on another + option's value.""" + default_value: Optional[str] = None """The default value of the option, for example, value '443' for 'port' option.""" @@ -7609,6 +7871,8 @@ def as_dict(self) -> dict: body = {} if self.allowed_values: body["allowed_values"] = [v for v in self.allowed_values] + if self.conditional_display: + body["conditional_display"] = self.conditional_display.as_dict() if self.default_value is not None: body["default_value"] = self.default_value if self.description is not None: @@ -7642,6 +7906,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.allowed_values: body["allowed_values"] = self.allowed_values + if self.conditional_display: + body["conditional_display"] = self.conditional_display if self.default_value is not None: body["default_value"] = self.default_value if self.description is not None: @@ -7675,6 +7941,7 @@ def from_dict(cls, d: Dict[str, Any]) -> OptionSpec: """Deserializes the OptionSpec from a dictionary.""" return cls( allowed_values=d.get("allowed_values", None), + conditional_display=_from_dict(d, "conditional_display", ConditionalDisplay), default_value=d.get("default_value", None), description=d.get("description", None), hint=d.get("hint", None), @@ -7713,6 +7980,7 @@ class OptionSpecOptionType(Enum): OPTION_MULTILINE_STRING = "OPTION_MULTILINE_STRING" OPTION_NUMBER = "OPTION_NUMBER" OPTION_SERVICE_CREDENTIAL = "OPTION_SERVICE_CREDENTIAL" + OPTION_STORAGE_CREDENTIAL = "OPTION_STORAGE_CREDENTIAL" OPTION_STRING = "OPTION_STRING" @@ -7732,6 +8000,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" @@ -7742,6 +8019,8 @@ def as_dict(self) -> dict: body["add"] = [v.value for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v.value for v in self.remove] return body @@ -7753,6 +8032,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -7763,6 +8044,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: return cls( add=_repeated_enum(d, "add", Privilege), principal=d.get("principal", None), + principal_id=d.get("principal_id", None), remove=_repeated_enum(d, "remove", Privilege), ) @@ -7891,6 +8173,11 @@ class PolicyInfo: created_by: Optional[str] = None """Username of the user who created the policy. Output only.""" + deny: Optional[DenyOptions] = None + """Options for deny policies. Valid only if `policy_type` is `POLICY_TYPE_DENY`. Required on create + and optional on update. When specified on update, the new options will replace the existing + options as a whole.""" + except_principals: Optional[List[str]] = None """Optional list of user or group names that should be excluded from the policy.""" @@ -7924,6 +8211,13 @@ class PolicyInfo: updated_by: Optional[str] = None """Username of the user who last modified the policy. Output only.""" + use_session_identity: Optional[bool] = None + """Temporary for migrating customers to session identity. Customers not currently using ABAC will + not be able to set this field to false and all new policies will have this field default to + true. Existing customers will have this field default to false, but can set it to true to opt in + to session identity. after a grace period, this field will be removed and all policies will use + session identity. Only for row filter and column mask policies. Not applicable to deny policies.""" + when_condition: Optional[str] = None """Optional condition when the policy should take effect.""" @@ -7938,6 +8232,8 @@ def as_dict(self) -> dict: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.deny: + body["deny"] = self.deny.as_dict() if self.except_principals: body["except_principals"] = [v for v in self.except_principals] if self.for_securable_type is not None: @@ -7962,6 +8258,8 @@ def as_dict(self) -> dict: body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by + if self.use_session_identity is not None: + body["use_session_identity"] = self.use_session_identity if self.when_condition is not None: body["when_condition"] = self.when_condition return body @@ -7977,6 +8275,8 @@ def as_shallow_dict(self) -> dict: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.deny: + body["deny"] = self.deny if self.except_principals: body["except_principals"] = self.except_principals if self.for_securable_type is not None: @@ -8001,6 +8301,8 @@ def as_shallow_dict(self) -> dict: body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by + if self.use_session_identity is not None: + body["use_session_identity"] = self.use_session_identity if self.when_condition is not None: body["when_condition"] = self.when_condition return body @@ -8013,6 +8315,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: comment=d.get("comment", None), created_at=d.get("created_at", None), created_by=d.get("created_by", None), + deny=_from_dict(d, "deny", DenyOptions), except_principals=d.get("except_principals", None), for_securable_type=_enum(d, "for_securable_type", SecurableType), id=d.get("id", None), @@ -8025,6 +8328,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: to_principals=d.get("to_principals", None), updated_at=d.get("updated_at", None), updated_by=d.get("updated_by", None), + use_session_identity=d.get("use_session_identity", None), when_condition=d.get("when_condition", None), ) @@ -8032,6 +8336,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: class PolicyType(Enum): POLICY_TYPE_COLUMN_MASK = "POLICY_TYPE_COLUMN_MASK" + POLICY_TYPE_DENY = "POLICY_TYPE_DENY" POLICY_TYPE_ROW_FILTER = "POLICY_TYPE_ROW_FILTER" @@ -8152,11 +8457,14 @@ class Privilege(Enum): CREATE_TABLE = "CREATE_TABLE" CREATE_VIEW = "CREATE_VIEW" CREATE_VOLUME = "CREATE_VOLUME" + DELETE = "DELETE" EXECUTE = "EXECUTE" EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" EXTERNAL_USE_SCHEMA = "EXTERNAL_USE_SCHEMA" + INSERT = "INSERT" MANAGE = "MANAGE" MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" + MANAGE_GRANTS = "MANAGE_GRANTS" MODIFY = "MODIFY" MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" READ_FILES = "READ_FILES" @@ -8165,6 +8473,7 @@ class Privilege(Enum): REFRESH = "REFRESH" SELECT = "SELECT" SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" + UPDATE = "UPDATE" USAGE = "USAGE" USE_CATALOG = "USE_CATALOG" USE_CONNECTION = "USE_CONNECTION" @@ -8173,6 +8482,10 @@ class Privilege(Enum): USE_RECIPIENT = "USE_RECIPIENT" USE_SCHEMA = "USE_SCHEMA" USE_SHARE = "USE_SHARE" + USE_VOLUME = "USE_VOLUME" + VIEW_ADMIN_METADATA = "VIEW_ADMIN_METADATA" + VIEW_METADATA = "VIEW_METADATA" + VIEW_OBJECT = "VIEW_OBJECT" WRITE_FILES = "WRITE_FILES" WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" WRITE_VOLUME = "WRITE_VOLUME" @@ -8184,6 +8497,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -8192,6 +8509,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -8201,6 +8520,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -8208,7 +8529,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -8914,6 +9239,7 @@ class SecurableKind(Enum): TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" + TABLE_FOREIGN_PALANTIR = "TABLE_FOREIGN_PALANTIR" TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" @@ -9968,6 +10294,34 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: ) +@dataclass +class UcEncryptedToken: + """Encrypted token used when we cannot downscope the cloud provider token appropriately See: + https://docs.google.com/document/d/1hEKDnSckuU5PIS798CtfqBElrMR6OJuR2wgz_BjhMSY""" + + encrypted_payload: Optional[str] = None + """Stores encrypted ScopedCloudToken as a base64-encoded string""" + + def as_dict(self) -> dict: + """Serializes the UcEncryptedToken into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.encrypted_payload is not None: + body["encrypted_payload"] = self.encrypted_payload + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UcEncryptedToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.encrypted_payload is not None: + body["encrypted_payload"] = self.encrypted_payload + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UcEncryptedToken: + """Deserializes the UcEncryptedToken from a dictionary.""" + return cls(encrypted_payload=d.get("encrypted_payload", None)) + + @dataclass class UnassignResponse: def as_dict(self) -> dict: @@ -11219,6 +11573,8 @@ def create( *, comment: Optional[str] = None, connection_name: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, options: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, provider_name: Optional[str] = None, @@ -11234,6 +11590,10 @@ def create( User-provided free-form text description. :param connection_name: str (optional) The name of the connection to an external data source. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param options: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param properties: Dict[str,str] (optional) @@ -11255,6 +11615,10 @@ def create( body["comment"] = comment if connection_name is not None: body["connection_name"] = connection_name + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if name is not None: body["name"] = name if options is not None: @@ -11404,6 +11768,8 @@ def update( name: str, *, comment: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, @@ -11418,6 +11784,10 @@ def update( The name of the catalog. :param comment: str (optional) User-provided free-form text description. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) Whether predictive optimization should be enabled for this object and objects under it. :param isolation_mode: :class:`CatalogIsolationMode` (optional) @@ -11437,6 +11807,10 @@ def update( body = {} if comment is not None: body["comment"] = comment + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if enable_predictive_optimization is not None: body["enable_predictive_optimization"] = enable_predictive_optimization.value if isolation_mode is not None: @@ -11482,6 +11856,7 @@ def create( options: Dict[str, str], *, comment: Optional[str] = None, + environment_settings: Optional[EnvironmentSettings] = None, properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None, ) -> ConnectionInfo: @@ -11498,6 +11873,8 @@ def create( A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -11511,6 +11888,8 @@ def create( body["comment"] = comment if connection_type is not None: body["connection_type"] = connection_type.value + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if name is not None: body["name"] = name if options is not None: @@ -11616,7 +11995,13 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = query["page_token"] = json["next_page_token"] def update( - self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None + self, + name: str, + options: Dict[str, str], + *, + environment_settings: Optional[EnvironmentSettings] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, ) -> ConnectionInfo: """Updates the connection that matches the supplied name. @@ -11624,6 +12009,8 @@ def update( Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) @@ -11633,6 +12020,8 @@ def update( """ body = {} + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if new_name is not None: body["new_name"] = new_name if options is not None: @@ -12119,7 +12508,9 @@ def delete(self, entity_type: str, entity_name: str, tag_key: str): headers=headers, ) - def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssignment: + def get( + self, entity_type: str, entity_name: str, tag_key: str, *, include_inherited: Optional[bool] = None + ) -> EntityTagAssignment: """Gets a tag assignment for an Unity Catalog entity by tag key. :param entity_type: str @@ -12129,10 +12520,15 @@ def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssi The fully qualified name of the entity to which the tag is assigned :param tag_key: str Required. The key of the tag + :param include_inherited: bool (optional) + Boolean which indicates whether this tag is inherited. :returns: :class:`EntityTagAssignment` """ + query = {} + if include_inherited is not None: + query["include_inherited"] = include_inherited headers = { "Accept": "application/json", } @@ -12144,12 +12540,19 @@ def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssi res = self._api.do( "GET", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", + query=query, headers=headers, ) return EntityTagAssignment.from_dict(res) def list( - self, entity_type: str, entity_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + self, + entity_type: str, + entity_name: str, + *, + include_inherited: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, ) -> Iterator[EntityTagAssignment]: """List tag assignments for an Unity Catalog entity @@ -12162,6 +12565,8 @@ def list( columns, volumes. :param entity_name: str The fully qualified name of the entity to which the tag is assigned + :param include_inherited: bool (optional) + Boolean which indicates whether this tag is inherited. :param max_results: int (optional) Optional. Maximum number of tag assignments to return in a single page :param page_token: str (optional) @@ -12171,6 +12576,8 @@ def list( """ query = {} + if include_inherited is not None: + query["include_inherited"] = include_inherited if max_results is not None: query["max_results"] = max_results if page_token is not None: @@ -13099,6 +13506,7 @@ def get( securable_type: str, full_name: str, *, + include_deleted_principals: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None, @@ -13116,6 +13524,8 @@ def get( Type of securable. :param full_name: str Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. :param max_results: int (optional) Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the @@ -13135,6 +13545,8 @@ def get( """ query = {} + if include_deleted_principals is not None: + query["include_deleted_principals"] = include_deleted_principals if max_results is not None: query["max_results"] = max_results if page_token is not None: @@ -13220,6 +13632,74 @@ def get_effective( ) return EffectivePermissionsList.from_dict(res) + def list( + self, + securable_type: str, + full_name: str, + *, + include_deleted_principals: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + principal: Optional[str] = None, + ) -> Iterator[PrivilegeAssignment]: + """Lists the privilege assignments for a securable. Does not include inherited privileges. Paginated + version of Get Permissions API. + + :param securable_type: str + Type of securable. + :param full_name: str + Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. + :param page_size: int (optional) + Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment + present in a single page response is guaranteed to contain all the privileges granted on the + requested Securable for the respective principal. + + If not set, page length is the server configured value. If set to - lesser than 0: invalid parameter + error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: + invalid parameter error (this is to ensure that server is able to return at least one complete + PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page length is the + minimum of this value and a server configured value + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :param principal: str (optional) + If provided, only the permissions for the specified principal (user or group) are returned. + + :returns: Iterator over :class:`PrivilegeAssignment` + """ + + query = {} + if include_deleted_principals is not None: + query["include_deleted_principals"] = include_deleted_principals + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if principal is not None: + query["principal"] = principal + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do( + "GET", + f"/api/2.1/unity-catalog/privilege-assignments/{securable_type}/{full_name}", + query=query, + headers=headers, + ) + if "privilege_assignments" in json: + for v in json["privilege_assignments"]: + yield PrivilegeAssignment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + def update( self, securable_type: str, full_name: str, *, changes: Optional[List[PermissionsChange]] = None ) -> UpdatePermissionsResponse: diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index ab08c8855..29c24b9d5 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -33,6 +33,11 @@ class CleanRoom: created_at: Optional[int] = None """When the clean room was created, in epoch milliseconds.""" + enable_shared_output: Optional[bool] = None + """Whether allow task to write to shared output schema. When enabled, clean room task runs + triggered by the current collaborator can write to the run-scoped shared output schema which is + accessible by all collaborators. TODO: deprecate this field once shared output PrPr is finalized""" + local_collaborator_alias: Optional[str] = None """The alias of the collaborator tied to the local clean room.""" @@ -68,6 +73,8 @@ def as_dict(self) -> dict: body["comment"] = self.comment if self.created_at is not None: body["created_at"] = self.created_at + if self.enable_shared_output is not None: + body["enable_shared_output"] = self.enable_shared_output if self.local_collaborator_alias is not None: body["local_collaborator_alias"] = self.local_collaborator_alias if self.name is not None: @@ -93,6 +100,8 @@ def as_shallow_dict(self) -> dict: body["comment"] = self.comment if self.created_at is not None: body["created_at"] = self.created_at + if self.enable_shared_output is not None: + body["enable_shared_output"] = self.enable_shared_output if self.local_collaborator_alias is not None: body["local_collaborator_alias"] = self.local_collaborator_alias if self.name is not None: @@ -116,6 +125,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoom: access_restricted=_enum(d, "access_restricted", CleanRoomAccessRestricted), comment=d.get("comment", None), created_at=d.get("created_at", None), + enable_shared_output=d.get("enable_shared_output", None), local_collaborator_alias=d.get("local_collaborator_alias", None), name=d.get("name", None), output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog), @@ -810,6 +820,13 @@ class CleanRoomNotebookTaskRun: run_duration: Optional[int] = None """Duration of the task run, in milliseconds.""" + shared_output_schema_expiration_time: Optional[int] = None + """Expiration time of the shared output schema of the task run (if any), in epoch milliseconds.""" + + shared_output_schema_name: Optional[str] = None + """Name of the shared output schema associated with the clean rooms notebook task run. This schema + is accessible by all collaborators when enable_shared_output is true.""" + start_time: Optional[int] = None """When the task run started, in epoch milliseconds.""" @@ -832,6 +849,10 @@ def as_dict(self) -> dict: body["output_schema_name"] = self.output_schema_name if self.run_duration is not None: body["run_duration"] = self.run_duration + if self.shared_output_schema_expiration_time is not None: + body["shared_output_schema_expiration_time"] = self.shared_output_schema_expiration_time + if self.shared_output_schema_name is not None: + body["shared_output_schema_name"] = self.shared_output_schema_name if self.start_time is not None: body["start_time"] = self.start_time return body @@ -855,6 +876,10 @@ def as_shallow_dict(self) -> dict: body["output_schema_name"] = self.output_schema_name if self.run_duration is not None: body["run_duration"] = self.run_duration + if self.shared_output_schema_expiration_time is not None: + body["shared_output_schema_expiration_time"] = self.shared_output_schema_expiration_time + if self.shared_output_schema_name is not None: + body["shared_output_schema_name"] = self.shared_output_schema_name if self.start_time is not None: body["start_time"] = self.start_time return body @@ -871,6 +896,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomNotebookTaskRun: output_schema_expiration_time=d.get("output_schema_expiration_time", None), output_schema_name=d.get("output_schema_name", None), run_duration=d.get("run_duration", None), + shared_output_schema_expiration_time=d.get("shared_output_schema_expiration_time", None), + shared_output_schema_name=d.get("shared_output_schema_name", None), start_time=d.get("start_time", None), ) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 5a613ac31..ea4669e84 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -5,6 +5,7 @@ import logging import random import time +import uuid from dataclasses import dataclass from datetime import timedelta from enum import Enum @@ -328,6 +329,13 @@ class AzureAvailability(Enum): SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" +class BaseEnvironmentType(Enum): + """If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto""" + + CPU = "CPU" + GPU = "GPU" + + @dataclass class CancelResponse: def as_dict(self) -> dict: @@ -2454,6 +2462,15 @@ def from_dict(cls, d: Dict[str, Any]) -> CommandStatusResponse: ) +class ConfidentialComputeType(Enum): + """Confidential computing technology for GCP instances. Aligns with gcloud's + --confidential-compute-type flag and the REST API's + confidentialInstanceConfig.confidentialInstanceType field. See: + https://cloud.google.com/confidential-computing/confidential-vm/docs/create-a-confidential-vm-instance""" + + SEV_SNP = "SEV_SNP" + + class ContextStatus(Enum): ERROR = "Error" @@ -2766,6 +2783,184 @@ def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: return cls(destination=d.get("destination", None)) +@dataclass +class DefaultBaseEnvironment: + base_environment_cache: Optional[List[DefaultBaseEnvironmentCache]] = None + + base_environment_type: Optional[BaseEnvironmentType] = None + + created_timestamp: Optional[int] = None + + creator_user_id: Optional[int] = None + + environment: Optional[Environment] = None + """Note: we made `environment` non-internal because we need to expose its `client` field. All other + fields should be treated as internal.""" + + filepath: Optional[str] = None + + id: Optional[str] = None + + is_default: Optional[bool] = None + + last_updated_timestamp: Optional[int] = None + + last_updated_user_id: Optional[int] = None + + message: Optional[str] = None + + name: Optional[str] = None + + principal_ids: Optional[List[int]] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type.value + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment.as_dict() + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = [v for v in self.principal_ids] + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = self.base_environment_cache + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = self.principal_ids + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironment: + """Deserializes the DefaultBaseEnvironment from a dictionary.""" + return cls( + base_environment_cache=_repeated_dict(d, "base_environment_cache", DefaultBaseEnvironmentCache), + base_environment_type=_enum(d, "base_environment_type", BaseEnvironmentType), + created_timestamp=d.get("created_timestamp", None), + creator_user_id=d.get("creator_user_id", None), + environment=_from_dict(d, "environment", Environment), + filepath=d.get("filepath", None), + id=d.get("id", None), + is_default=d.get("is_default", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + last_updated_user_id=d.get("last_updated_user_id", None), + message=d.get("message", None), + name=d.get("name", None), + principal_ids=d.get("principal_ids", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +@dataclass +class DefaultBaseEnvironmentCache: + indefinite_materialized_environment: Optional[MaterializedEnvironment] = None + + materialized_environment: Optional[MaterializedEnvironment] = None + + message: Optional[str] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment.as_dict() + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a shallow dictionary of its immediate attributes.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironmentCache: + """Deserializes the DefaultBaseEnvironmentCache from a dictionary.""" + return cls( + indefinite_materialized_environment=_from_dict( + d, "indefinite_materialized_environment", MaterializedEnvironment + ), + materialized_environment=_from_dict(d, "materialized_environment", MaterializedEnvironment), + message=d.get("message", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +class DefaultBaseEnvironmentCacheStatus(Enum): + + CREATED = "CREATED" + EXPIRED = "EXPIRED" + FAILED = "FAILED" + INVALID = "INVALID" + PENDING = "PENDING" + REFRESHING = "REFRESHING" + + @dataclass class DeleteClusterResponse: def as_dict(self) -> dict: @@ -3440,6 +3635,8 @@ class EventType(Enum): DBFS_DOWN = "DBFS_DOWN" DECOMMISSION_ENDED = "DECOMMISSION_ENDED" DECOMMISSION_STARTED = "DECOMMISSION_STARTED" + DEFERRED_POLICY_ENFORCEMENT_FAILED = "DEFERRED_POLICY_ENFORCEMENT_FAILED" + DEFERRED_POLICY_ENFORCEMENT_SCHEDULED = "DEFERRED_POLICY_ENFORCEMENT_SCHEDULED" DID_NOT_EXPAND_DISK = "DID_NOT_EXPAND_DISK" DRIVER_HEALTHY = "DRIVER_HEALTHY" DRIVER_NOT_RESPONDING = "DRIVER_NOT_RESPONDING" @@ -3476,6 +3673,10 @@ class GcpAttributes: boot_disk_size: Optional[int] = None """Boot disk size in GB""" + confidential_compute_type: Optional[ConfidentialComputeType] = None + """The confidential computing technology for this cluster's instances. Currently only SEV_SNP is + supported, and only on N2D instance types. When not set, no confidential computing is applied.""" + first_on_demand: Optional[int] = None """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This value should be greater than 0, to make sure the cluster driver node is placed on an on-demand @@ -3516,6 +3717,8 @@ def as_dict(self) -> dict: body["availability"] = self.availability.value if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.confidential_compute_type is not None: + body["confidential_compute_type"] = self.confidential_compute_type.value if self.first_on_demand is not None: body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: @@ -3535,6 +3738,8 @@ def as_shallow_dict(self) -> dict: body["availability"] = self.availability if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.confidential_compute_type is not None: + body["confidential_compute_type"] = self.confidential_compute_type if self.first_on_demand is not None: body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: @@ -3553,6 +3758,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: return cls( availability=_enum(d, "availability", GcpAvailability), boot_disk_size=d.get("boot_disk_size", None), + confidential_compute_type=_enum(d, "confidential_compute_type", ConfidentialComputeType), first_on_demand=d.get("first_on_demand", None), google_service_account=d.get("google_service_account", None), local_ssd_count=d.get("local_ssd_count", None), @@ -3889,6 +4095,12 @@ class GetInstancePool: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This + field was deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove + this field after sufficient time has passed for all clients to migrate.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -3965,6 +4177,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4012,6 +4226,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4055,6 +4271,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -4726,6 +4943,12 @@ class InstancePoolAndStats: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This + field was deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove + this field after sufficient time has passed for all clients to migrate.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -4805,6 +5028,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4852,6 +5077,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4895,6 +5122,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -5783,6 +6011,39 @@ class ListClustersSortByField(Enum): DEFAULT = "DEFAULT" +@dataclass +class ListDefaultBaseEnvironmentsResponse: + default_base_environments: Optional[List[DefaultBaseEnvironment]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = [v.as_dict() for v in self.default_base_environments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = self.default_base_environments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDefaultBaseEnvironmentsResponse: + """Deserializes the ListDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls( + default_base_environments=_repeated_dict(d, "default_base_environments", DefaultBaseEnvironment), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None @@ -6051,6 +6312,44 @@ def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: MapAny = Dict[str, Any] +@dataclass +class MaterializedEnvironment: + """Materialized Environment information enables environment sharing and reuse via Environment + Caching during library installations. Currently this feature is only supported for Python + libraries. + + - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and + environment materialization will occur. A new Materialized Environment metadata will be sent + from DP upon successful library installations and env materialization, and is persisted into + database by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment + will be sent to DP by LMv2, and DP will restore the cached environment from a store instead of + reinstalling libraries from scratch. + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new version""" + + last_updated_timestamp: Optional[int] = None + """The timestamp (in epoch milliseconds) when the materialized env is updated.""" + + def as_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MaterializedEnvironment: + """Deserializes the MaterializedEnvironment from a dictionary.""" + return cls(last_updated_timestamp=d.get("last_updated_timestamp", None)) + + @dataclass class MavenLibrary: coordinates: str @@ -6685,6 +6984,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) +@dataclass +class RefreshDefaultBaseEnvironmentsResponse: + def as_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RefreshDefaultBaseEnvironmentsResponse: + """Deserializes the RefreshDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls() + + @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -7237,6 +7554,7 @@ class TerminationReasonCode(Enum): DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" DRIVER_EVICTION = "DRIVER_EVICTION" DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" @@ -7317,6 +7635,8 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" + NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -7330,6 +7650,7 @@ class TerminationReasonCode(Enum): SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" @@ -10211,6 +10532,7 @@ def create( azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str, str]] = None, disk_spec: Optional[DiskSpec] = None, + enable_auto_alternate_node_types: Optional[bool] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, @@ -10245,6 +10567,11 @@ def create( - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This field was + deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove this field + after sufficient time has passed for all clients to migrate. :param enable_elastic_disk: bool (optional) Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -10291,6 +10618,8 @@ def create( body["custom_tags"] = custom_tags if disk_spec is not None: body["disk_spec"] = disk_spec.as_dict() + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if enable_elastic_disk is not None: body["enable_elastic_disk"] = enable_elastic_disk if gcp_attributes is not None: @@ -10357,6 +10686,7 @@ def edit( node_type_id: str, *, custom_tags: Optional[Dict[str, str]] = None, + enable_auto_alternate_node_types: Optional[bool] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, @@ -10380,6 +10710,11 @@ def edit( EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This field was + deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove this field + after sufficient time has passed for all clients to migrate. :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances @@ -10405,6 +10740,8 @@ def edit( body = {} if custom_tags is not None: body["custom_tags"] = custom_tags + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if idle_instance_autotermination_minutes is not None: body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes if instance_pool_id is not None: @@ -10816,6 +11153,94 @@ def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + def create_default_base_environment( + self, + default_base_environment: DefaultBaseEnvironment, + *, + request_id: Optional[str] = None, + workspace_base_environment_id: Optional[str] = None, + ) -> DefaultBaseEnvironment: + """Create a default base environment within workspaces to define the environment version and a list of + dependencies to be used in serverless notebooks and jobs. This process will asynchronously generate a + cache to optimize dependency resolution. + + :param default_base_environment: :class:`DefaultBaseEnvironment` + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a `request_id` is provided. + :param workspace_base_environment_id: str (optional) + + :returns: :class:`DefaultBaseEnvironment` + """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + if request_id is not None: + body["request_id"] = request_id + if workspace_base_environment_id is not None: + body["workspace_base_environment_id"] = workspace_base_environment_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/default-base-environments", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def delete_default_base_environment(self, id: str): + """Delete the default base environment given an ID. The default base environment may be used by + downstream workloads. Please ensure that the deletion is intentional. + + :param id: str + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/default-base-environments/{id}", headers=headers) + + def get_default_base_environment(self, id: str, *, trace_id: Optional[str] = None) -> DefaultBaseEnvironment: + """Return the default base environment details for a given ID. + + :param id: str + :param trace_id: str (optional) + Deprecated: use ctx.requestId instead + + :returns: :class:`DefaultBaseEnvironment` + """ + + query = {} + if id is not None: + query["id"] = id + if trace_id is not None: + query["trace_id"] = trace_id + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", "/api/2.0/default-base-environments:getDefaultBaseEnvironment", query=query, headers=headers + ) + return DefaultBaseEnvironment.from_dict(res) + def install(self, cluster_id: str, libraries: List[Library]): """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -10844,6 +11269,62 @@ def install(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) + def list_default_base_environments( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DefaultBaseEnvironment]: + """List default base environments defined in the workspaces for the requested user. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`DefaultBaseEnvironment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", "/api/2.0/default-base-environments", query=query, headers=headers) + if "default_base_environments" in json: + for v in json["default_base_environments"]: + yield DefaultBaseEnvironment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def refresh_default_base_environments(self, ids: List[str]): + """Refresh the cached default base environments for the given IDs. This process will asynchronously + regenerate the caches. The existing caches remains available until it expires. + + :param ids: List[str] + + + """ + + body = {} + if ids is not None: + body["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("POST", "/api/2.0/default-base-environments/refresh", body=body, headers=headers) + def uninstall(self, cluster_id: str, libraries: List[Library]): """Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. @@ -10872,6 +11353,62 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) + def update_default_base_environment( + self, id: str, default_base_environment: DefaultBaseEnvironment + ) -> DefaultBaseEnvironment: + """Update the default base environment for the given ID. This process will asynchronously regenerate the + cache. The existing cache remains available until it expires. + + :param id: str + :param default_base_environment: :class:`DefaultBaseEnvironment` + + :returns: :class:`DefaultBaseEnvironment` + """ + + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.0/default-base-environments/{id}", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def update_default_default_base_environment( + self, *, base_environment_type: Optional[BaseEnvironmentType] = None, id: Optional[str] = None + ) -> DefaultBaseEnvironment: + """Set the default base environment for the workspace. This marks the specified DBE as the workspace + default. + + :param base_environment_type: :class:`BaseEnvironmentType` (optional) + :param id: str (optional) + + :returns: :class:`DefaultBaseEnvironment` + """ + + body = {} + if base_environment_type is not None: + body["base_environment_type"] = base_environment_type.value + if id is not None: + body["id"] = id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/default-base-environments:setDefault", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 7da50818f..339f4b43f 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -13,7 +13,7 @@ from databricks.sdk.client_types import HostType from databricks.sdk.service import sql from databricks.sdk.service._internal import (Wait, _enum, _from_dict, - _repeated_dict) + _repeated_dict, _repeated_enum) from ..errors import OperationFailed @@ -104,6 +104,72 @@ def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: return cls(permission_set=d.get("permission_set", None)) +@dataclass +class CancelQueryExecutionResponse: + status: Optional[List[CancelQueryExecutionResponseStatus]] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = [v.as_dict() for v in self.status] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponse: + """Deserializes the CancelQueryExecutionResponse from a dictionary.""" + return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus)) + + +@dataclass +class CancelQueryExecutionResponseStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + pending: Optional[Empty] = None + + success: Optional[Empty] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending.as_dict() + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponseStatus: + """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" + return cls( + data_token=d.get("data_token", None), + pending=_from_dict(d, "pending", Empty), + success=_from_dict(d, "success", Empty), + ) + + @dataclass class CronSchedule: quartz_cron_expression: str @@ -255,6 +321,55 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +class EvaluationStatusType(Enum): + + DONE = "DONE" + EVALUATION_CANCELLED = "EVALUATION_CANCELLED" + EVALUATION_FAILED = "EVALUATION_FAILED" + EVALUATION_TIMEOUT = "EVALUATION_TIMEOUT" + NOT_STARTED = "NOT_STARTED" + RUNNING = "RUNNING" + + +@dataclass +class ExecuteQueryResponse: + def as_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExecuteQueryResponse: + """Deserializes the ExecuteQueryResponse from a dictionary.""" + return cls() + + @dataclass class GenieAttachment: """Genie AI Response""" @@ -423,16 +538,340 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: ) +class GenieEvalAssessment(Enum): + + BAD = "BAD" + GOOD = "GOOD" + NEEDS_REVIEW = "NEEDS_REVIEW" + + +@dataclass +class GenieEvalResponse: + response: Optional[str] = None + """The response content (either text or SQL query).""" + + response_type: Optional[GenieEvalResponseType] = None + """Type of response""" + + sql_execution_result: Optional[sql.StatementResponse] = None + """SQL Statement Execution response.""" + + def as_dict(self) -> dict: + """Serializes the GenieEvalResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.response is not None: + body["response"] = self.response + if self.response_type is not None: + body["response_type"] = self.response_type.value + if self.sql_execution_result: + body["sql_execution_result"] = self.sql_execution_result.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieEvalResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.response is not None: + body["response"] = self.response + if self.response_type is not None: + body["response_type"] = self.response_type + if self.sql_execution_result: + body["sql_execution_result"] = self.sql_execution_result + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieEvalResponse: + """Deserializes the GenieEvalResponse from a dictionary.""" + return cls( + response=d.get("response", None), + response_type=_enum(d, "response_type", GenieEvalResponseType), + sql_execution_result=_from_dict(d, "sql_execution_result", sql.StatementResponse), + ) + + +class GenieEvalResponseType(Enum): + + SQL = "SQL" + TEXT = "TEXT" + + +@dataclass +class GenieEvalResult: + """Shows summary information for an evaluation result. For detailed information including SQL + execution results, actual/expected responses, and assessment scores, use + GenieGetEvalResultDetails.""" + + result_id: str + """Unique identifier for this evaluation result.""" + + space_id: str + """The ID of the space the evaluation result belongs to.""" + + benchmark_question_id: str + """The ID of the benchmark question that was evaluated.""" + + benchmark_answer: Optional[str] = None + """Stored snapshot of original benchmark answer text.""" + + created_by_user: Optional[int] = None + """User ID who created evaluation result.""" + + question: Optional[str] = None + """Stored snapshot of original benchmark question text.""" + + status: Optional[EvaluationStatusType] = None + """Current status of this evaluation result.""" + + def as_dict(self) -> dict: + """Serializes the GenieEvalResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.benchmark_answer is not None: + body["benchmark_answer"] = self.benchmark_answer + if self.benchmark_question_id is not None: + body["benchmark_question_id"] = self.benchmark_question_id + if self.created_by_user is not None: + body["created_by_user"] = self.created_by_user + if self.question is not None: + body["question"] = self.question + if self.result_id is not None: + body["result_id"] = self.result_id + if self.space_id is not None: + body["space_id"] = self.space_id + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieEvalResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.benchmark_answer is not None: + body["benchmark_answer"] = self.benchmark_answer + if self.benchmark_question_id is not None: + body["benchmark_question_id"] = self.benchmark_question_id + if self.created_by_user is not None: + body["created_by_user"] = self.created_by_user + if self.question is not None: + body["question"] = self.question + if self.result_id is not None: + body["result_id"] = self.result_id + if self.space_id is not None: + body["space_id"] = self.space_id + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieEvalResult: + """Deserializes the GenieEvalResult from a dictionary.""" + return cls( + benchmark_answer=d.get("benchmark_answer", None), + benchmark_question_id=d.get("benchmark_question_id", None), + created_by_user=d.get("created_by_user", None), + question=d.get("question", None), + result_id=d.get("result_id", None), + space_id=d.get("space_id", None), + status=_enum(d, "status", EvaluationStatusType), + ) + + +@dataclass +class GenieEvalResultDetails: + """Shows detailed information for an evaluation result.""" + + result_id: str + """The unique identifier for the evaluation result.""" + + space_id: str + """The ID of the space the evaluation result belongs to.""" + + benchmark_question_id: str + """The ID of the benchmark question that was evaluated.""" + + actual_response: Optional[List[GenieEvalResponse]] = None + """The actual response generated by Genie.""" + + assessment: Optional[GenieEvalAssessment] = None + """Assessment of the evaluation result: good, bad, or needs review""" + + assessment_reasons: Optional[List[ScoreReason]] = None + """Reasons for the assessment score.""" + + eval_run_status: Optional[EvaluationStatusType] = None + """Current status of the evaluation run.""" + + expected_response: Optional[List[GenieEvalResponse]] = None + """The expected responses from the benchmark.""" + + manual_assessment: Optional[bool] = None + """Whether this evaluation was manually assessed.""" + + def as_dict(self) -> dict: + """Serializes the GenieEvalResultDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.actual_response: + body["actual_response"] = [v.as_dict() for v in self.actual_response] + if self.assessment is not None: + body["assessment"] = self.assessment.value + if self.assessment_reasons: + body["assessment_reasons"] = [v.value for v in self.assessment_reasons] + if self.benchmark_question_id is not None: + body["benchmark_question_id"] = self.benchmark_question_id + if self.eval_run_status is not None: + body["eval_run_status"] = self.eval_run_status.value + if self.expected_response: + body["expected_response"] = [v.as_dict() for v in self.expected_response] + if self.manual_assessment is not None: + body["manual_assessment"] = self.manual_assessment + if self.result_id is not None: + body["result_id"] = self.result_id + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieEvalResultDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.actual_response: + body["actual_response"] = self.actual_response + if self.assessment is not None: + body["assessment"] = self.assessment + if self.assessment_reasons: + body["assessment_reasons"] = self.assessment_reasons + if self.benchmark_question_id is not None: + body["benchmark_question_id"] = self.benchmark_question_id + if self.eval_run_status is not None: + body["eval_run_status"] = self.eval_run_status + if self.expected_response: + body["expected_response"] = self.expected_response + if self.manual_assessment is not None: + body["manual_assessment"] = self.manual_assessment + if self.result_id is not None: + body["result_id"] = self.result_id + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieEvalResultDetails: + """Deserializes the GenieEvalResultDetails from a dictionary.""" + return cls( + actual_response=_repeated_dict(d, "actual_response", GenieEvalResponse), + assessment=_enum(d, "assessment", GenieEvalAssessment), + assessment_reasons=_repeated_enum(d, "assessment_reasons", ScoreReason), + benchmark_question_id=d.get("benchmark_question_id", None), + eval_run_status=_enum(d, "eval_run_status", EvaluationStatusType), + expected_response=_repeated_dict(d, "expected_response", GenieEvalResponse), + manual_assessment=d.get("manual_assessment", None), + result_id=d.get("result_id", None), + space_id=d.get("space_id", None), + ) + + +@dataclass +class GenieEvalRunResponse: + eval_run_id: str + """The unique identifier for the evaluation run.""" + + created_timestamp: Optional[int] = None + """Timestamp when the evaluation run was created (milliseconds since epoch).""" + + eval_run_status: Optional[EvaluationStatusType] = None + """Current status of the evaluation run.""" + + last_updated_timestamp: Optional[int] = None + """Timestamp when the evaluation run was last updated (milliseconds since epoch).""" + + num_correct: Optional[int] = None + """Number of questions answered correctly.""" + + num_done: Optional[int] = None + """Number of questions that have been completed.""" + + num_needs_review: Optional[int] = None + """Number of questions that need manual review.""" + + num_questions: Optional[int] = None + """Total number of questions in the evaluation run.""" + + run_by_user: Optional[int] = None + """User ID who initiated the evaluation run.""" + + def as_dict(self) -> dict: + """Serializes the GenieEvalRunResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.eval_run_id is not None: + body["eval_run_id"] = self.eval_run_id + if self.eval_run_status is not None: + body["eval_run_status"] = self.eval_run_status.value + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.num_correct is not None: + body["num_correct"] = self.num_correct + if self.num_done is not None: + body["num_done"] = self.num_done + if self.num_needs_review is not None: + body["num_needs_review"] = self.num_needs_review + if self.num_questions is not None: + body["num_questions"] = self.num_questions + if self.run_by_user is not None: + body["run_by_user"] = self.run_by_user + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieEvalRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.eval_run_id is not None: + body["eval_run_id"] = self.eval_run_id + if self.eval_run_status is not None: + body["eval_run_status"] = self.eval_run_status + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.num_correct is not None: + body["num_correct"] = self.num_correct + if self.num_done is not None: + body["num_done"] = self.num_done + if self.num_needs_review is not None: + body["num_needs_review"] = self.num_needs_review + if self.num_questions is not None: + body["num_questions"] = self.num_questions + if self.run_by_user is not None: + body["run_by_user"] = self.run_by_user + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieEvalRunResponse: + """Deserializes the GenieEvalRunResponse from a dictionary.""" + return cls( + created_timestamp=d.get("created_timestamp", None), + eval_run_id=d.get("eval_run_id", None), + eval_run_status=_enum(d, "eval_run_status", EvaluationStatusType), + last_updated_timestamp=d.get("last_updated_timestamp", None), + num_correct=d.get("num_correct", None), + num_done=d.get("num_done", None), + num_needs_review=d.get("num_needs_review", None), + num_questions=d.get("num_questions", None), + run_by_user=d.get("run_by_user", None), + ) + + @dataclass class GenieFeedback: """Feedback containing rating and optional comment""" + comment: Optional[str] = None + """Optional feedback comment text""" + rating: Optional[GenieFeedbackRating] = None """The feedback rating""" def as_dict(self) -> dict: """Serializes the GenieFeedback into a dictionary suitable for use as a JSON request body.""" body = {} + if self.comment is not None: + body["comment"] = self.comment if self.rating is not None: body["rating"] = self.rating.value return body @@ -440,6 +879,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the GenieFeedback into a shallow dictionary of its immediate attributes.""" body = {} + if self.comment is not None: + body["comment"] = self.comment if self.rating is not None: body["rating"] = self.rating return body @@ -447,7 +888,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieFeedback: """Deserializes the GenieFeedback from a dictionary.""" - return cls(rating=_enum(d, "rating", GenieFeedbackRating)) + return cls(comment=d.get("comment", None), rating=_enum(d, "rating", GenieFeedbackRating)) class GenieFeedbackRating(Enum): @@ -542,6 +983,40 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieGetMessageQueryResultResponse: return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) +@dataclass +class GenieListConversationCommentsResponse: + comments: Optional[List[GenieMessageComment]] = None + """List of comments in the conversation.""" + + next_page_token: Optional[str] = None + """Token to get the next page of results.""" + + def as_dict(self) -> dict: + """Serializes the GenieListConversationCommentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.comments: + body["comments"] = [v.as_dict() for v in self.comments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieListConversationCommentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comments: + body["comments"] = self.comments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieListConversationCommentsResponse: + """Deserializes the GenieListConversationCommentsResponse from a dictionary.""" + return cls( + comments=_repeated_dict(d, "comments", GenieMessageComment), next_page_token=d.get("next_page_token", None) + ) + + @dataclass class GenieListConversationMessagesResponse: messages: Optional[List[GenieMessage]] = None @@ -609,6 +1084,100 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieListConversationsResponse: ) +@dataclass +class GenieListEvalResultsResponse: + eval_results: Optional[List[GenieEvalResult]] = None + """List of evaluation results for the specified run.""" + + def as_dict(self) -> dict: + """Serializes the GenieListEvalResultsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.eval_results: + body["eval_results"] = [v.as_dict() for v in self.eval_results] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieListEvalResultsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.eval_results: + body["eval_results"] = self.eval_results + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieListEvalResultsResponse: + """Deserializes the GenieListEvalResultsResponse from a dictionary.""" + return cls(eval_results=_repeated_dict(d, "eval_results", GenieEvalResult)) + + +@dataclass +class GenieListEvalRunsResponse: + eval_runs: Optional[List[GenieEvalRunResponse]] = None + """List of evaluation runs for a space on provided page token and page size""" + + next_page_token: Optional[str] = None + """The token to use for retrieving the next page of results.""" + + def as_dict(self) -> dict: + """Serializes the GenieListEvalRunsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.eval_runs: + body["eval_runs"] = [v.as_dict() for v in self.eval_runs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieListEvalRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.eval_runs: + body["eval_runs"] = self.eval_runs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieListEvalRunsResponse: + """Deserializes the GenieListEvalRunsResponse from a dictionary.""" + return cls( + eval_runs=_repeated_dict(d, "eval_runs", GenieEvalRunResponse), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class GenieListMessageCommentsResponse: + comments: Optional[List[GenieMessageComment]] = None + """List of comments on the message.""" + + next_page_token: Optional[str] = None + """Token to get the next page of results.""" + + def as_dict(self) -> dict: + """Serializes the GenieListMessageCommentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.comments: + body["comments"] = [v.as_dict() for v in self.comments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieListMessageCommentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comments: + body["comments"] = self.comments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieListMessageCommentsResponse: + """Deserializes the GenieListMessageCommentsResponse from a dictionary.""" + return cls( + comments=_repeated_dict(d, "comments", GenieMessageComment), next_page_token=d.get("next_page_token", None) + ) + + @dataclass class GenieListSpacesResponse: next_page_token: Optional[str] = None @@ -764,6 +1333,83 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieMessage: ) +@dataclass +class GenieMessageComment: + """A comment on a Genie conversation message.""" + + space_id: str + """Genie space ID""" + + conversation_id: str + """Conversation ID""" + + message_id: str + """Message ID""" + + message_comment_id: str + """Comment ID""" + + content: str + """Comment text content""" + + created_timestamp: Optional[int] = None + """Timestamp when the comment was created""" + + user_id: Optional[int] = None + """ID of the user who created the comment""" + + def as_dict(self) -> dict: + """Serializes the GenieMessageComment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.message_comment_id is not None: + body["message_comment_id"] = self.message_comment_id + if self.message_id is not None: + body["message_id"] = self.message_id + if self.space_id is not None: + body["space_id"] = self.space_id + if self.user_id is not None: + body["user_id"] = self.user_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieMessageComment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.message_comment_id is not None: + body["message_comment_id"] = self.message_comment_id + if self.message_id is not None: + body["message_id"] = self.message_id + if self.space_id is not None: + body["space_id"] = self.space_id + if self.user_id is not None: + body["user_id"] = self.user_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieMessageComment: + """Deserializes the GenieMessageComment from a dictionary.""" + return cls( + content=d.get("content", None), + conversation_id=d.get("conversation_id", None), + created_timestamp=d.get("created_timestamp", None), + message_comment_id=d.get("message_comment_id", None), + message_id=d.get("message_id", None), + space_id=d.get("space_id", None), + user_id=d.get("user_id", None), + ) + + @dataclass class GenieQueryAttachment: description: Optional[str] = None @@ -889,6 +1535,9 @@ class GenieSpace: description: Optional[str] = None """Description of the Genie Space""" + parent_path: Optional[str] = None + """Parent folder path of the Genie Space""" + serialized_space: Optional[str] = None """The contents of the Genie Space in serialized string form. This field is excluded in List Genie spaces responses. Use the [Get Genie Space](:method:genie/getspace) API to retrieve an example @@ -903,6 +1552,8 @@ def as_dict(self) -> dict: body = {} if self.description is not None: body["description"] = self.description + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.serialized_space is not None: body["serialized_space"] = self.serialized_space if self.space_id is not None: @@ -918,6 +1569,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.description is not None: body["description"] = self.description + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.serialized_space is not None: body["serialized_space"] = self.serialized_space if self.space_id is not None: @@ -933,6 +1586,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: """Deserializes the GenieSpace from a dictionary.""" return cls( description=d.get("description", None), + parent_path=d.get("parent_path", None), serialized_space=d.get("serialized_space", None), space_id=d.get("space_id", None), title=d.get("title", None), @@ -1016,6 +1670,24 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieSuggestedQuestionsAttachment: return cls(questions=d.get("questions", None)) +@dataclass +class GetPublishedDashboardEmbeddedResponse: + def as_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardEmbeddedResponse: + """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" + return cls() + + @dataclass class GetPublishedDashboardTokenInfoResponse: authorization_details: Optional[List[AuthorizationDetails]] = None @@ -1232,6 +1904,7 @@ class MessageErrorType(Enum): INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION" INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" + INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" INVALID_CHAT_COMPLETION_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" INVALID_COMPLETION_REQUEST_EXCEPTION = "INVALID_COMPLETION_REQUEST_EXCEPTION" INVALID_FUNCTION_CALL_EXCEPTION = "INVALID_FUNCTION_CALL_EXCEPTION" @@ -1292,6 +1965,80 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" +@dataclass +class PendingStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + def as_dict(self) -> dict: + """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PendingStatus: + """Deserializes the PendingStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None)) + + +@dataclass +class PollQueryStatusResponse: + data: Optional[List[PollQueryStatusResponseData]] = None + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data: + body["data"] = [v.as_dict() for v in self.data] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: + body["data"] = self.data + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponse: + """Deserializes the PollQueryStatusResponse from a dictionary.""" + return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData)) + + +@dataclass +class PollQueryStatusResponseData: + status: QueryResponseStatus + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = self.status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponseData: + """Deserializes the PollQueryStatusResponseData from a dictionary.""" + return cls(status=_from_dict(d, "status", QueryResponseStatus)) + + @dataclass class PublishedDashboard: display_name: Optional[str] = None @@ -1379,6 +2126,69 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter: return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None)) +@dataclass +class QueryResponseStatus: + canceled: Optional[Empty] = None + + closed: Optional[Empty] = None + + pending: Optional[PendingStatus] = None + + statement_id: Optional[str] = None + """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + logging purpose to record the statement_id of all QueryResponseStatus.""" + + success: Optional[SuccessStatus] = None + + def as_dict(self) -> dict: + """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled.as_dict() + if self.closed: + body["closed"] = self.closed.as_dict() + if self.pending: + body["pending"] = self.pending.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled + if self.closed: + body["closed"] = self.closed + if self.pending: + body["pending"] = self.pending + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QueryResponseStatus: + """Deserializes the QueryResponseStatus from a dictionary.""" + return cls( + canceled=_from_dict(d, "canceled", Empty), + closed=_from_dict(d, "closed", Empty), + pending=_from_dict(d, "pending", PendingStatus), + statement_id=d.get("statement_id", None), + success=_from_dict(d, "success", SuccessStatus), + ) + + +class ResponsePhase(Enum): + + RESPONSE_PHASE_THINKING = "RESPONSE_PHASE_THINKING" + RESPONSE_PHASE_VERIFYING = "RESPONSE_PHASE_VERIFYING" + + @dataclass class Result: is_truncated: Optional[bool] = None @@ -1530,6 +2340,37 @@ class SchedulePauseStatus(Enum): UNPAUSED = "UNPAUSED" +class ScoreReason(Enum): + + COLUMN_TYPE_DIFFERENCE = "COLUMN_TYPE_DIFFERENCE" + EMPTY_GOOD_SQL = "EMPTY_GOOD_SQL" + EMPTY_RESULT = "EMPTY_RESULT" + LLM_JUDGE_FORMATTING_ERROR = "LLM_JUDGE_FORMATTING_ERROR" + LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT = "LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT" + LLM_JUDGE_INCORRECT_FUNCTION_USAGE = "LLM_JUDGE_INCORRECT_FUNCTION_USAGE" + LLM_JUDGE_INCORRECT_METRIC_CALCULATION = "LLM_JUDGE_INCORRECT_METRIC_CALCULATION" + LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE = "LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE" + LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC = ( + "LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC" + ) + LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST = "LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST" + LLM_JUDGE_MISSING_JOIN = "LLM_JUDGE_MISSING_JOIN" + LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION = "LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION" + LLM_JUDGE_MISSING_OR_INCORRECT_FILTER = "LLM_JUDGE_MISSING_OR_INCORRECT_FILTER" + LLM_JUDGE_MISSING_OR_INCORRECT_JOIN = "LLM_JUDGE_MISSING_OR_INCORRECT_JOIN" + LLM_JUDGE_OTHER = "LLM_JUDGE_OTHER" + LLM_JUDGE_SEMANTIC_ERROR = "LLM_JUDGE_SEMANTIC_ERROR" + LLM_JUDGE_SYNTAX_ERROR = "LLM_JUDGE_SYNTAX_ERROR" + LLM_JUDGE_WRONG_AGGREGATION = "LLM_JUDGE_WRONG_AGGREGATION" + LLM_JUDGE_WRONG_COLUMNS = "LLM_JUDGE_WRONG_COLUMNS" + LLM_JUDGE_WRONG_FILTER = "LLM_JUDGE_WRONG_FILTER" + RESULT_EXTRA_COLUMNS = "RESULT_EXTRA_COLUMNS" + RESULT_EXTRA_ROWS = "RESULT_EXTRA_ROWS" + RESULT_MISSING_COLUMNS = "RESULT_MISSING_COLUMNS" + RESULT_MISSING_ROWS = "RESULT_MISSING_ROWS" + SINGLE_CELL_DIFFERENCE = "SINGLE_CELL_DIFFERENCE" + + @dataclass class Subscriber: destination_subscriber: Optional[SubscriptionSubscriberDestination] = None @@ -1711,6 +2552,39 @@ def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get("user_id", None)) +@dataclass +class SuccessStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + truncated: Optional[bool] = None + """Whether the query result is truncated (either by byte limit or row limit)""" + + def as_dict(self) -> dict: + """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SuccessStatus: + """Deserializes the SuccessStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None), truncated=d.get("truncated", None)) + + @dataclass class TextAttachment: content: Optional[str] = None @@ -1718,9 +2592,14 @@ class TextAttachment: id: Optional[str] = None + phase: Optional[ResponsePhase] = None + purpose: Optional[TextAttachmentPurpose] = None """Purpose/intent of this text attachment""" + verification_metadata: Optional[VerificationMetadata] = None + """Metadata for verification phase attachments. Only set when phase = RESPONSE_PHASE_VERIFYING.""" + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1728,8 +2607,12 @@ def as_dict(self) -> dict: body["content"] = self.content if self.id is not None: body["id"] = self.id + if self.phase is not None: + body["phase"] = self.phase.value if self.purpose is not None: body["purpose"] = self.purpose.value + if self.verification_metadata: + body["verification_metadata"] = self.verification_metadata.as_dict() return body def as_shallow_dict(self) -> dict: @@ -1739,15 +2622,23 @@ def as_shallow_dict(self) -> dict: body["content"] = self.content if self.id is not None: body["id"] = self.id + if self.phase is not None: + body["phase"] = self.phase if self.purpose is not None: body["purpose"] = self.purpose + if self.verification_metadata: + body["verification_metadata"] = self.verification_metadata return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" return cls( - content=d.get("content", None), id=d.get("id", None), purpose=_enum(d, "purpose", TextAttachmentPurpose) + content=d.get("content", None), + id=d.get("id", None), + phase=_enum(d, "phase", ResponsePhase), + purpose=_enum(d, "purpose", TextAttachmentPurpose), + verification_metadata=_from_dict(d, "verification_metadata", VerificationMetadata), ) @@ -1778,19 +2669,62 @@ def from_dict(cls, d: Dict[str, Any]) -> TrashDashboardResponse: @dataclass class UnpublishDashboardResponse: def as_dict(self) -> dict: - """Serializes the UnpublishDashboardResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the UnpublishDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UnpublishDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UnpublishDashboardResponse: + """Deserializes the UnpublishDashboardResponse from a dictionary.""" + return cls() + + +@dataclass +class VerificationMetadata: + """Metadata for verification phase attachments""" + + index: Optional[int] = None + """Optional index to help order attachments within the same section""" + + section: Optional[VerificationSection] = None + + def as_dict(self) -> dict: + """Serializes the VerificationMetadata into a dictionary suitable for use as a JSON request body.""" body = {} + if self.index is not None: + body["index"] = self.index + if self.section is not None: + body["section"] = self.section.value return body def as_shallow_dict(self) -> dict: - """Serializes the UnpublishDashboardResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the VerificationMetadata into a shallow dictionary of its immediate attributes.""" body = {} + if self.index is not None: + body["index"] = self.index + if self.section is not None: + body["section"] = self.section return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UnpublishDashboardResponse: - """Deserializes the UnpublishDashboardResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> VerificationMetadata: + """Deserializes the VerificationMetadata from a dictionary.""" + return cls(index=d.get("index", None), section=_enum(d, "section", VerificationSection)) + + +class VerificationSection(Enum): + """Verification workflow section - indicates which stage of verification this attachment belongs to + These sections are used for grouping and ordering attachments in the frontend UI""" + + VERIFICATION_SECTION_FINAL_DECISION = "VERIFICATION_SECTION_FINAL_DECISION" + VERIFICATION_SECTION_PROPOSED_IMPROVEMENT = "VERIFICATION_SECTION_PROPOSED_IMPROVEMENT" + VERIFICATION_SECTION_SQL_EXAMPLES_VALIDATION = "VERIFICATION_SECTION_SQL_EXAMPLES_VALIDATION" + VERIFICATION_SECTION_VERIFICATION_QUERIES = "VERIFICATION_SECTION_VERIFICATION_QUERIES" class GenieAPI: @@ -1885,6 +2819,43 @@ def create_message_and_wait( timeout=timeout ) + def create_message_comment( + self, space_id: str, conversation_id: str, message_id: str, content: str + ) -> GenieMessageComment: + """Create a comment on a conversation message. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param message_id: str + The ID associated with the message. + :param content: str + Comment text content. + + :returns: :class:`GenieMessageComment` + """ + + body = {} + if content is not None: + body["content"] = content + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/comments", + body=body, + headers=headers, + ) + return GenieMessageComment.from_dict(res) + def create_space( self, warehouse_id: str, @@ -2099,6 +3070,135 @@ def generate_download_full_query_result( ) return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) + def genie_create_eval_run( + self, space_id: str, *, benchmark_question_ids: Optional[List[str]] = None + ) -> GenieEvalRunResponse: + """Create and run evaluations for multiple benchmark questions in a Genie space. + + :param space_id: str + The ID associated with the Genie space where the evaluations will be executed. + :param benchmark_question_ids: List[str] (optional) + List of benchmark question IDs to evaluate. These questions must exist in the specified Genie space. + If none are specified, then all benchmark questions are evaluated. + + :returns: :class:`GenieEvalRunResponse` + """ + + body = {} + if benchmark_question_ids is not None: + body["benchmark_question_ids"] = [v for v in benchmark_question_ids] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.0/genie/spaces/{space_id}/eval-runs", body=body, headers=headers) + return GenieEvalRunResponse.from_dict(res) + + def genie_get_eval_result_details(self, space_id: str, eval_run_id: str, result_id: str) -> GenieEvalResultDetails: + """Get details for evaluation results + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + The unique identifier for the evaluation run. + :param result_id: str + The unique identifier for the evaluation result. + + :returns: :class:`GenieEvalResultDetails` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", f"/api/2.0/genie/spaces/{space_id}/eval-runs/{eval_run_id}/results/{result_id}", headers=headers + ) + return GenieEvalResultDetails.from_dict(res) + + def genie_get_eval_run(self, space_id: str, eval_run_id: str) -> GenieEvalRunResponse: + """Get evaluation run details + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + + :returns: :class:`GenieEvalRunResponse` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}/eval-runs/{eval_run_id}", headers=headers) + return GenieEvalRunResponse.from_dict(res) + + def genie_list_eval_results(self, space_id: str, eval_run_id: str) -> GenieListEvalResultsResponse: + """List evaluation results for a specific evaluation run + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + The unique identifier for the evaluation run. + + :returns: :class:`GenieListEvalResultsResponse` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}/eval-runs/{eval_run_id}/results", headers=headers) + return GenieListEvalResultsResponse.from_dict(res) + + def genie_list_eval_runs( + self, space_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> GenieListEvalRunsResponse: + """Lists all evaluation runs in a space + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param page_size: int (optional) + Maximum number of messages to return per page + :param page_token: str (optional) + Token to get the next page of results + + :returns: :class:`GenieListEvalRunsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}/eval-runs", query=query, headers=headers) + return GenieListEvalRunsResponse.from_dict(res) + def get_download_full_query_result( self, space_id: str, @@ -2317,6 +3417,44 @@ def get_space(self, space_id: str, *, include_serialized_space: Optional[bool] = res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", query=query, headers=headers) return GenieSpace.from_dict(res) + def list_conversation_comments( + self, space_id: str, conversation_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> GenieListConversationCommentsResponse: + """List all comments across all messages in a conversation. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param page_size: int (optional) + Maximum number of comments to return per page. + :param page_token: str (optional) + Pagination token for getting the next page of results. + + :returns: :class:`GenieListConversationCommentsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/list-comments", + query=query, + headers=headers, + ) + return GenieListConversationCommentsResponse.from_dict(res) + def list_conversation_messages( self, space_id: str, conversation_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> GenieListConversationMessagesResponse: @@ -2396,6 +3534,52 @@ def list_conversations( res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}/conversations", query=query, headers=headers) return GenieListConversationsResponse.from_dict(res) + def list_message_comments( + self, + space_id: str, + conversation_id: str, + message_id: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + ) -> GenieListMessageCommentsResponse: + """List comments on a specific conversation message. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param message_id: str + The ID associated with the message. + :param page_size: int (optional) + Maximum number of comments to return per page. + :param page_token: str (optional) + Pagination token for getting the next page of results. + + :returns: :class:`GenieListMessageCommentsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/comments", + query=query, + headers=headers, + ) + return GenieListMessageCommentsResponse.from_dict(res) + def list_spaces( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> GenieListSpacesResponse: @@ -2425,7 +3609,15 @@ def list_spaces( res = self._api.do("GET", "/api/2.0/genie/spaces", query=query, headers=headers) return GenieListSpacesResponse.from_dict(res) - def send_message_feedback(self, space_id: str, conversation_id: str, message_id: str, rating: GenieFeedbackRating): + def send_message_feedback( + self, + space_id: str, + conversation_id: str, + message_id: str, + rating: GenieFeedbackRating, + *, + comment: Optional[str] = None, + ): """Send feedback for a message. :param space_id: str @@ -2436,11 +3628,15 @@ def send_message_feedback(self, space_id: str, conversation_id: str, message_id: The ID associated with the message to provide feedback for. :param rating: :class:`GenieFeedbackRating` The rating (POSITIVE, NEGATIVE, or NONE). + :param comment: str (optional) + Optional text feedback that will be stored as a comment. """ body = {} + if comment is not None: + body["comment"] = comment if rating is not None: body["rating"] = rating.value headers = { @@ -3155,6 +4351,25 @@ class LakeviewEmbeddedAPI: def __init__(self, api_client): self._api = api_client + def get_published_dashboard_embedded(self, dashboard_id: str): + """Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded", headers=headers) + def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: @@ -3187,3 +4402,107 @@ def get_published_dashboard_token_info( "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) + + +class QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_published_query_execution( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> CancelQueryExecutionResponse: + """Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("DELETE", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return CancelQueryExecutionResponse.from_dict(res) + + def execute_published_dashboard_query( + self, dashboard_name: str, dashboard_revision_id: str, *, override_warehouse_id: Optional[str] = None + ): + """Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + """ + + body = {} + if dashboard_name is not None: + body["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + body["dashboard_revision_id"] = dashboard_revision_id + if override_warehouse_id is not None: + body["override_warehouse_id"] = override_warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("POST", "/api/2.0/lakeview-query/query/published", body=body, headers=headers) + + def poll_published_query_status( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> PollQueryStatusResponse: + """Poll the results for the a query for a published, embedded dashboard. Supports both GET and POST + methods. POST is recommended for polling many tokens to avoid URL length limitations. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return PollQueryStatusResponse.from_dict(res) diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index b6dff6520..011e5e44a 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -66,6 +66,12 @@ class DatabaseCatalog: create_database_if_not_exists: Optional[bool] = None + database_branch_id: Optional[str] = None + """The branch_id of the database branch associated with the catalog.""" + + database_project_id: Optional[str] = None + """The project_id of the database project associated with the catalog.""" + uid: Optional[str] = None def as_dict(self) -> dict: @@ -73,10 +79,14 @@ def as_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -88,10 +98,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -103,8 +117,10 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: """Deserializes the DatabaseCatalog from a dictionary.""" return cls( create_database_if_not_exists=d.get("create_database_if_not_exists", None), + database_branch_id=d.get("database_branch_id", None), database_instance_name=d.get("database_instance_name", None), database_name=d.get("database_name", None), + database_project_id=d.get("database_project_id", None), name=d.get("name", None), uid=d.get("uid", None), ) @@ -632,6 +648,9 @@ class DatabaseTable: Registration of database tables via /database/tables is currently only supported in standard catalogs.""" + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} @@ -641,6 +660,8 @@ def as_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: @@ -652,6 +673,8 @@ def as_shallow_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body @classmethod @@ -661,6 +684,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: database_instance_name=d.get("database_instance_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), + table_serving_url=d.get("table_serving_url", None), ) @@ -988,6 +1012,9 @@ class SyncedDatabaseTable: data_synchronization_status: Optional[SyncedTableStatus] = None """Synced Table data synchronization status""" + database_branch_id: Optional[str] = None + """The branch_id of the database branch associated with the table.""" + database_instance_name: Optional[str] = None """Name of the target database instance. This is required when creating synced database tables in standard catalogs. This is optional when creating synced database tables in registered catalogs. @@ -995,6 +1022,14 @@ class SyncedDatabaseTable: database instance name MUST match that of the registered catalog (or the request will be rejected).""" + database_project_id: Optional[str] = None + """The project_id of the database project associated with the table.""" + + effective_database_branch_id: Optional[str] = None + """The branch_id of the database branch associated with the table. This is an output only field + that contains the value computed from the input field combined with server side defaults. Use + the field without the effective_ prefix to set the value.""" + effective_database_instance_name: Optional[str] = None """The name of the database instance that this table is registered to. This field is always returned, and for tables inside database catalogs is inferred database instance associated with @@ -1002,6 +1037,11 @@ class SyncedDatabaseTable: combined with server side defaults. Use the field without the effective_ prefix to set the value.""" + effective_database_project_id: Optional[str] = None + """The project_id of the database project associated with the table. This is an output only field + that contains the value computed from the input field combined with server side defaults. Use + the field without the effective_ prefix to set the value.""" + effective_logical_database_name: Optional[str] = None """The name of the logical database that this table is registered to. This is an output only field that contains the value computed from the input field combined with server side defaults. Use @@ -1021,6 +1061,9 @@ class SyncedDatabaseTable: spec: Optional[SyncedTableSpec] = None + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -1031,10 +1074,18 @@ def as_dict(self) -> dict: body = {} if self.data_synchronization_status: body["data_synchronization_status"] = self.data_synchronization_status.as_dict() + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id + if self.effective_database_branch_id is not None: + body["effective_database_branch_id"] = self.effective_database_branch_id if self.effective_database_instance_name is not None: body["effective_database_instance_name"] = self.effective_database_instance_name + if self.effective_database_project_id is not None: + body["effective_database_project_id"] = self.effective_database_project_id if self.effective_logical_database_name is not None: body["effective_logical_database_name"] = self.effective_logical_database_name if self.logical_database_name is not None: @@ -1043,6 +1094,8 @@ def as_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body @@ -1052,10 +1105,18 @@ def as_shallow_dict(self) -> dict: body = {} if self.data_synchronization_status: body["data_synchronization_status"] = self.data_synchronization_status + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id + if self.effective_database_branch_id is not None: + body["effective_database_branch_id"] = self.effective_database_branch_id if self.effective_database_instance_name is not None: body["effective_database_instance_name"] = self.effective_database_instance_name + if self.effective_database_project_id is not None: + body["effective_database_project_id"] = self.effective_database_project_id if self.effective_logical_database_name is not None: body["effective_logical_database_name"] = self.effective_logical_database_name if self.logical_database_name is not None: @@ -1064,6 +1125,8 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @@ -1073,12 +1136,17 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: """Deserializes the SyncedDatabaseTable from a dictionary.""" return cls( data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus), + database_branch_id=d.get("database_branch_id", None), database_instance_name=d.get("database_instance_name", None), + database_project_id=d.get("database_project_id", None), + effective_database_branch_id=d.get("effective_database_branch_id", None), effective_database_instance_name=d.get("effective_database_instance_name", None), + effective_database_project_id=d.get("effective_database_project_id", None), effective_logical_database_name=d.get("effective_logical_database_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), ) @@ -1328,6 +1396,10 @@ class SyncedTableSchedulingPolicy(Enum): class SyncedTableSpec: """Specification of a synced database table.""" + accelerated_sync: Optional[bool] = None + """When true, enables accelerated sync mode for the initial data load. This significantly improves + performance for large tables. Requires workspace-level enablement.""" + create_database_objects_if_missing: Optional[bool] = None """If true, the synced table's logical database and schema resources in PG will be created if they do not already exist.""" @@ -1363,6 +1435,8 @@ class SyncedTableSpec: def as_dict(self) -> dict: """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.accelerated_sync is not None: + body["accelerated_sync"] = self.accelerated_sync if self.create_database_objects_if_missing is not None: body["create_database_objects_if_missing"] = self.create_database_objects_if_missing if self.existing_pipeline_id is not None: @@ -1382,6 +1456,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.accelerated_sync is not None: + body["accelerated_sync"] = self.accelerated_sync if self.create_database_objects_if_missing is not None: body["create_database_objects_if_missing"] = self.create_database_objects_if_missing if self.existing_pipeline_id is not None: @@ -1402,6 +1478,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: """Deserializes the SyncedTableSpec from a dictionary.""" return cls( + accelerated_sync=d.get("accelerated_sync", None), create_database_objects_if_missing=d.get("create_database_objects_if_missing", None), existing_pipeline_id=d.get("existing_pipeline_id", None), new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec), @@ -1854,6 +1931,33 @@ def delete_synced_database_table(self, name: str, *, purge_data: Optional[bool] self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", query=query, headers=headers) + def failover_database_instance( + self, name: str, *, failover_target_database_instance_name: Optional[str] = None + ) -> DatabaseInstance: + """Failover the primary node of a Database Instance to a secondary. + + :param name: str + Name of the instance to failover. + :param failover_target_database_instance_name: str (optional) + + :returns: :class:`DatabaseInstance` + """ + + body = {} + if failover_target_database_instance_name is not None: + body["failover_target_database_instance_name"] = failover_target_database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.0/database/instances/{name}/failover", body=body, headers=headers) + return DatabaseInstance.from_dict(res) + def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: """Find a Database Instance by uid. @@ -2232,6 +2336,47 @@ def update_database_instance( res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) + def update_database_instance_role( + self, + instance_name: str, + name: str, + database_instance_role: DatabaseInstanceRole, + *, + database_instance_name: Optional[str] = None, + ) -> DatabaseInstanceRole: + """Update a role for a Database Instance. + + :param instance_name: str + :param name: str + The name of the role. This is the unique identifier for the role in an instance. + :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) + + :returns: :class:`DatabaseInstanceRole` + """ + + body = database_instance_role.as_dict() + query = {} + if database_instance_name is not None: + query["database_instance_name"] = database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "PATCH", + f"/api/2.0/database/instances/{instance_name}/roles/{name}", + query=query, + body=body, + headers=headers, + ) + return DatabaseInstanceRole.from_dict(res) + def update_synced_database_table( self, name: str, synced_table: SyncedDatabaseTable, update_mask: str ) -> SyncedDatabaseTable: diff --git a/databricks/sdk/service/dataclassification.py b/databricks/sdk/service/dataclassification.py new file mode 100755 index 000000000..dc5fee823 --- /dev/null +++ b/databricks/sdk/service/dataclassification.py @@ -0,0 +1,257 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional + +from databricks.sdk.client_types import HostType +from databricks.sdk.common.types.fieldmask import FieldMask +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class AutoTaggingConfig: + """Auto-tagging configuration for a classification tag. When enabled, detected columns are + automatically tagged with Unity Catalog tags.""" + + classification_tag: str + """The Classification Tag (e.g., "class.name", "class.location")""" + + auto_tagging_mode: AutoTaggingConfigAutoTaggingMode + """Whether auto-tagging is enabled or disabled for this classification tag.""" + + def as_dict(self) -> dict: + """Serializes the AutoTaggingConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_tagging_mode is not None: + body["auto_tagging_mode"] = self.auto_tagging_mode.value + if self.classification_tag is not None: + body["classification_tag"] = self.classification_tag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AutoTaggingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_tagging_mode is not None: + body["auto_tagging_mode"] = self.auto_tagging_mode + if self.classification_tag is not None: + body["classification_tag"] = self.classification_tag + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AutoTaggingConfig: + """Deserializes the AutoTaggingConfig from a dictionary.""" + return cls( + auto_tagging_mode=_enum(d, "auto_tagging_mode", AutoTaggingConfigAutoTaggingMode), + classification_tag=d.get("classification_tag", None), + ) + + +class AutoTaggingConfigAutoTaggingMode(Enum): + """Auto-tagging mode.""" + + AUTO_TAGGING_DISABLED = "AUTO_TAGGING_DISABLED" + AUTO_TAGGING_ENABLED = "AUTO_TAGGING_ENABLED" + + +@dataclass +class CatalogConfig: + """Data Classification configuration for a Unity Catalog catalog. + + This message follows the "At Most One Resource" pattern: at most one CatalogConfig exists per + catalog. It combines aspects of both singleton and CRUD resources: - Full CRUD operations are + supported: Create enables Data Classification, Delete disables it - Like a singleton, it has no + unique identifier of its own and uses its parent catalog's identifier (catalog_name)""" + + auto_tag_configs: Optional[List[AutoTaggingConfig]] = None + """List of auto-tagging configurations for this catalog. Empty list means no auto-tagging is + enabled.""" + + included_schemas: Optional[CatalogConfigSchemaNames] = None + + name: Optional[str] = None + """Resource name in the format: catalogs/{catalog_name}/config Set by server in responses. Required + in Update requests to identify the resource.""" + + def as_dict(self) -> dict: + """Serializes the CatalogConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_tag_configs: + body["auto_tag_configs"] = [v.as_dict() for v in self.auto_tag_configs] + if self.included_schemas: + body["included_schemas"] = self.included_schemas.as_dict() + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CatalogConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_tag_configs: + body["auto_tag_configs"] = self.auto_tag_configs + if self.included_schemas: + body["included_schemas"] = self.included_schemas + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CatalogConfig: + """Deserializes the CatalogConfig from a dictionary.""" + return cls( + auto_tag_configs=_repeated_dict(d, "auto_tag_configs", AutoTaggingConfig), + included_schemas=_from_dict(d, "included_schemas", CatalogConfigSchemaNames), + name=d.get("name", None), + ) + + +@dataclass +class CatalogConfigSchemaNames: + """Wrapper message for a list of schema names. Required because proto2 doesn't allow repeated + fields directly in oneof.""" + + names: List[str] + + def as_dict(self) -> dict: + """Serializes the CatalogConfigSchemaNames into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.names: + body["names"] = [v for v in self.names] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CatalogConfigSchemaNames into a shallow dictionary of its immediate attributes.""" + body = {} + if self.names: + body["names"] = self.names + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CatalogConfigSchemaNames: + """Deserializes the CatalogConfigSchemaNames from a dictionary.""" + return cls(names=d.get("names", None)) + + +class DataClassificationAPI: + """Manage data classification for Unity Catalog catalogs. Data classification automatically identifies and + tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most one configuration + resource that controls scanning behavior and auto-tagging rules.""" + + def __init__(self, api_client): + self._api = api_client + + def create_catalog_config(self, parent: str, catalog_config: CatalogConfig) -> CatalogConfig: + """Create Data Classification configuration for a catalog. + + Creates a new config resource, which enables Data Classification for the specified catalog. The config + must not already exist. + + :param parent: str + Parent resource in the format: catalogs/{catalog_name} + :param catalog_config: :class:`CatalogConfig` + The configuration to create. + + :returns: :class:`CatalogConfig` + """ + + body = catalog_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/data-classification/v1/{parent}/config", body=body, headers=headers) + return CatalogConfig.from_dict(res) + + def delete_catalog_config(self, name: str): + """Delete Data Classification configuration for a catalog. + + Deletes the config resource, which disables Data Classification for the specified catalog. + + This operation is idempotent: deleting a non-existent config succeeds. However, if the catalog itself + doesn't exist, returns RESOURCE_DOES_NOT_EXIST. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/data-classification/v1/{name}", headers=headers) + + def get_catalog_config(self, name: str) -> CatalogConfig: + """Get the Data Classification configuration for a catalog. + + Returns the CatalogConfig resource for the specified catalog. If the config doesn't exist (Data + Classification not enabled), returns RESOURCE_DOES_NOT_EXIST error. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config + + :returns: :class:`CatalogConfig` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/data-classification/v1/{name}", headers=headers) + return CatalogConfig.from_dict(res) + + def update_catalog_config(self, name: str, catalog_config: CatalogConfig, update_mask: FieldMask) -> CatalogConfig: + """Update the Data Classification configuration for a catalog. + + Updates fields specified in the update_mask. The config must already exist. Use field mask to perform + partial updates of the configuration. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config Set by server in responses. Required in + Update requests to identify the resource. + :param catalog_config: :class:`CatalogConfig` + The configuration to apply to the catalog. The name field in catalog_config identifies which + resource to update. + :param update_mask: FieldMask + Field mask specifying which fields to update. + + :returns: :class:`CatalogConfig` + """ + + body = catalog_config.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/data-classification/v1/{name}", query=query, body=body, headers=headers) + return CatalogConfig.from_dict(res) diff --git a/databricks/sdk/service/dataquality.py b/databricks/sdk/service/dataquality.py index 77c55fb9e..eb70a326e 100644 --- a/databricks/sdk/service/dataquality.py +++ b/databricks/sdk/service/dataquality.py @@ -36,27 +36,72 @@ class AggregationGranularity(Enum): class AnomalyDetectionConfig: """Anomaly Detection Configurations.""" + anomaly_detection_workflow_id: Optional[int] = None + """The id of the workflow that detects the anomaly. This field will only be returned in the + Get/Update response, if the request comes from the workspace where this anomaly detection job is + created.""" + excluded_table_full_names: Optional[List[str]] = None """List of fully qualified table names to exclude from anomaly detection.""" + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + + publish_health_indicator: Optional[bool] = None + """If the health indicator should be shown.""" + + validity_check_configurations: Optional[List[ValidityCheckConfiguration]] = None + """Validity check configurations for anomaly detection.""" + def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id if self.excluded_table_full_names: body["excluded_table_full_names"] = [v for v in self.excluded_table_full_names] + if self.job_type is not None: + body["job_type"] = self.job_type.value + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + if self.validity_check_configurations: + body["validity_check_configurations"] = [v.as_dict() for v in self.validity_check_configurations] return body def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id if self.excluded_table_full_names: body["excluded_table_full_names"] = self.excluded_table_full_names + if self.job_type is not None: + body["job_type"] = self.job_type + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + if self.validity_check_configurations: + body["validity_check_configurations"] = self.validity_check_configurations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" - return cls(excluded_table_full_names=d.get("excluded_table_full_names", None)) + return cls( + anomaly_detection_workflow_id=d.get("anomaly_detection_workflow_id", None), + excluded_table_full_names=d.get("excluded_table_full_names", None), + job_type=_enum(d, "job_type", AnomalyDetectionJobType), + publish_health_indicator=d.get("publish_health_indicator", None), + validity_check_configurations=_repeated_dict( + d, "validity_check_configurations", ValidityCheckConfiguration + ), + ) + + +class AnomalyDetectionJobType(Enum): + """Anomaly Detection job type.""" + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" @dataclass @@ -438,6 +483,9 @@ class InferenceLogConfig: label_column: Optional[str] = None """Column for the label.""" + prediction_probability_column: Optional[str] = None + """Column for prediction probabilities""" + def as_dict(self) -> dict: """Serializes the InferenceLogConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -449,6 +497,8 @@ def as_dict(self) -> dict: body["model_id_column"] = self.model_id_column if self.prediction_column is not None: body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column if self.problem_type is not None: body["problem_type"] = self.problem_type.value if self.timestamp_column is not None: @@ -466,6 +516,8 @@ def as_shallow_dict(self) -> dict: body["model_id_column"] = self.model_id_column if self.prediction_column is not None: body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column if self.problem_type is not None: body["problem_type"] = self.problem_type if self.timestamp_column is not None: @@ -480,6 +532,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InferenceLogConfig: label_column=d.get("label_column", None), model_id_column=d.get("model_id_column", None), prediction_column=d.get("prediction_column", None), + prediction_probability_column=d.get("prediction_probability_column", None), problem_type=_enum(d, "problem_type", InferenceProblemType), timestamp_column=d.get("timestamp_column", None), ) @@ -675,6 +728,81 @@ def from_dict(cls, d: Dict[str, Any]) -> NotificationSettings: return cls(on_failure=_from_dict(d, "on_failure", NotificationDestination)) +@dataclass +class PercentNullValidityCheck: + column_names: Optional[List[str]] = None + """List of column names to check for null percentage""" + + upper_bound: Optional[float] = None + """Optional upper bound; we should use auto determined bounds for now""" + + def as_dict(self) -> dict: + """Serializes the PercentNullValidityCheck into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column_names: + body["column_names"] = [v for v in self.column_names] + if self.upper_bound is not None: + body["upper_bound"] = self.upper_bound + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PercentNullValidityCheck into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_names: + body["column_names"] = self.column_names + if self.upper_bound is not None: + body["upper_bound"] = self.upper_bound + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PercentNullValidityCheck: + """Deserializes the PercentNullValidityCheck from a dictionary.""" + return cls(column_names=d.get("column_names", None), upper_bound=d.get("upper_bound", None)) + + +@dataclass +class RangeValidityCheck: + column_names: Optional[List[str]] = None + """List of column names to check for range validity""" + + lower_bound: Optional[float] = None + """Lower bound for the range""" + + upper_bound: Optional[float] = None + """Upper bound for the range""" + + def as_dict(self) -> dict: + """Serializes the RangeValidityCheck into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column_names: + body["column_names"] = [v for v in self.column_names] + if self.lower_bound is not None: + body["lower_bound"] = self.lower_bound + if self.upper_bound is not None: + body["upper_bound"] = self.upper_bound + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RangeValidityCheck into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_names: + body["column_names"] = self.column_names + if self.lower_bound is not None: + body["lower_bound"] = self.lower_bound + if self.upper_bound is not None: + body["upper_bound"] = self.upper_bound + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RangeValidityCheck: + """Deserializes the RangeValidityCheck from a dictionary.""" + return cls( + column_names=d.get("column_names", None), + lower_bound=d.get("lower_bound", None), + upper_bound=d.get("upper_bound", None), + ) + + @dataclass class Refresh: """The Refresh object gives information on a refresh of the data quality monitoring pipeline.""" @@ -848,6 +976,79 @@ def from_dict(cls, d: Dict[str, Any]) -> TimeSeriesConfig: ) +@dataclass +class UniquenessValidityCheck: + column_names: Optional[List[str]] = None + """List of column names to check for uniqueness""" + + def as_dict(self) -> dict: + """Serializes the UniquenessValidityCheck into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column_names: + body["column_names"] = [v for v in self.column_names] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UniquenessValidityCheck into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_names: + body["column_names"] = self.column_names + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UniquenessValidityCheck: + """Deserializes the UniquenessValidityCheck from a dictionary.""" + return cls(column_names=d.get("column_names", None)) + + +@dataclass +class ValidityCheckConfiguration: + name: Optional[str] = None + """Can be set by system. Does not need to be user facing.""" + + percent_null_validity_check: Optional[PercentNullValidityCheck] = None + + range_validity_check: Optional[RangeValidityCheck] = None + + uniqueness_validity_check: Optional[UniquenessValidityCheck] = None + + def as_dict(self) -> dict: + """Serializes the ValidityCheckConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.percent_null_validity_check: + body["percent_null_validity_check"] = self.percent_null_validity_check.as_dict() + if self.range_validity_check: + body["range_validity_check"] = self.range_validity_check.as_dict() + if self.uniqueness_validity_check: + body["uniqueness_validity_check"] = self.uniqueness_validity_check.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ValidityCheckConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.percent_null_validity_check: + body["percent_null_validity_check"] = self.percent_null_validity_check + if self.range_validity_check: + body["range_validity_check"] = self.range_validity_check + if self.uniqueness_validity_check: + body["uniqueness_validity_check"] = self.uniqueness_validity_check + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ValidityCheckConfiguration: + """Deserializes the ValidityCheckConfiguration from a dictionary.""" + return cls( + name=d.get("name", None), + percent_null_validity_check=_from_dict(d, "percent_null_validity_check", PercentNullValidityCheck), + range_validity_check=_from_dict(d, "range_validity_check", RangeValidityCheck), + uniqueness_validity_check=_from_dict(d, "uniqueness_validity_check", UniquenessValidityCheck), + ) + + class DataQualityAPI: """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)""" diff --git a/databricks/sdk/service/environments.py b/databricks/sdk/service/environments.py new file mode 100755 index 000000000..c227b1c11 --- /dev/null +++ b/databricks/sdk/service/environments.py @@ -0,0 +1,994 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +import uuid +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Iterator, List, Optional + +from google.protobuf.timestamp_pb2 import Timestamp + +from databricks.sdk.client_types import HostType +from databricks.sdk.common import lro +from databricks.sdk.common.types.fieldmask import FieldMask +from databricks.sdk.retries import RetryError, poll +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _timestamp) + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +class BaseEnvironmentType(Enum): + """If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto""" + + CPU = "CPU" + GPU = "GPU" + + +@dataclass +class DatabricksServiceExceptionWithDetailsProto: + """Databricks Error that is returned by all Databricks APIs.""" + + details: Optional[List[dict]] = None + + error_code: Optional[ErrorCode] = None + + message: Optional[str] = None + + stack_trace: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the DatabricksServiceExceptionWithDetailsProto into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.details: + body["details"] = [v for v in self.details] + if self.error_code is not None: + body["error_code"] = self.error_code.value + if self.message is not None: + body["message"] = self.message + if self.stack_trace is not None: + body["stack_trace"] = self.stack_trace + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksServiceExceptionWithDetailsProto into a shallow dictionary of its immediate attributes.""" + body = {} + if self.details: + body["details"] = self.details + if self.error_code is not None: + body["error_code"] = self.error_code + if self.message is not None: + body["message"] = self.message + if self.stack_trace is not None: + body["stack_trace"] = self.stack_trace + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabricksServiceExceptionWithDetailsProto: + """Deserializes the DatabricksServiceExceptionWithDetailsProto from a dictionary.""" + return cls( + details=d.get("details", None), + error_code=_enum(d, "error_code", ErrorCode), + message=d.get("message", None), + stack_trace=d.get("stack_trace", None), + ) + + +@dataclass +class DefaultWorkspaceBaseEnvironment: + """A singleton resource representing the default workspace base environment configuration. This + resource contains the workspace base environments that are used as defaults for serverless + notebooks and jobs in the workspace, for both CPU and GPU compute types.""" + + cpu_workspace_base_environment: Optional[str] = None + """The default workspace base environment for CPU compute. Format: + workspace-base-environments/{workspace_base_environment}""" + + gpu_workspace_base_environment: Optional[str] = None + """The default workspace base environment for GPU compute. Format: + workspace-base-environments/{workspace_base_environment}""" + + name: Optional[str] = None + """The resource name of this singleton resource. Format: default-workspace-base-environment""" + + def as_dict(self) -> dict: + """Serializes the DefaultWorkspaceBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cpu_workspace_base_environment is not None: + body["cpu_workspace_base_environment"] = self.cpu_workspace_base_environment + if self.gpu_workspace_base_environment is not None: + body["gpu_workspace_base_environment"] = self.gpu_workspace_base_environment + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultWorkspaceBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cpu_workspace_base_environment is not None: + body["cpu_workspace_base_environment"] = self.cpu_workspace_base_environment + if self.gpu_workspace_base_environment is not None: + body["gpu_workspace_base_environment"] = self.gpu_workspace_base_environment + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultWorkspaceBaseEnvironment: + """Deserializes the DefaultWorkspaceBaseEnvironment from a dictionary.""" + return cls( + cpu_workspace_base_environment=d.get("cpu_workspace_base_environment", None), + gpu_workspace_base_environment=d.get("gpu_workspace_base_environment", None), + name=d.get("name", None), + ) + + +class ErrorCode(Enum): + """Error codes returned by Databricks APIs to indicate specific failure conditions.""" + + ABORTED = "ABORTED" + ALREADY_EXISTS = "ALREADY_EXISTS" + BAD_REQUEST = "BAD_REQUEST" + CANCELLED = "CANCELLED" + CATALOG_ALREADY_EXISTS = "CATALOG_ALREADY_EXISTS" + CATALOG_DOES_NOT_EXIST = "CATALOG_DOES_NOT_EXIST" + CATALOG_NOT_EMPTY = "CATALOG_NOT_EMPTY" + COULD_NOT_ACQUIRE_LOCK = "COULD_NOT_ACQUIRE_LOCK" + CUSTOMER_UNAUTHORIZED = "CUSTOMER_UNAUTHORIZED" + DAC_ALREADY_EXISTS = "DAC_ALREADY_EXISTS" + DAC_DOES_NOT_EXIST = "DAC_DOES_NOT_EXIST" + DATA_LOSS = "DATA_LOSS" + DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED" + DEPLOYMENT_TIMEOUT = "DEPLOYMENT_TIMEOUT" + DIRECTORY_NOT_EMPTY = "DIRECTORY_NOT_EMPTY" + DIRECTORY_PROTECTED = "DIRECTORY_PROTECTED" + DRY_RUN_FAILED = "DRY_RUN_FAILED" + ENDPOINT_NOT_FOUND = "ENDPOINT_NOT_FOUND" + EXTERNAL_LOCATION_ALREADY_EXISTS = "EXTERNAL_LOCATION_ALREADY_EXISTS" + EXTERNAL_LOCATION_DOES_NOT_EXIST = "EXTERNAL_LOCATION_DOES_NOT_EXIST" + FEATURE_DISABLED = "FEATURE_DISABLED" + GIT_CONFLICT = "GIT_CONFLICT" + GIT_REMOTE_ERROR = "GIT_REMOTE_ERROR" + GIT_SENSITIVE_TOKEN_DETECTED = "GIT_SENSITIVE_TOKEN_DETECTED" + GIT_UNKNOWN_REF = "GIT_UNKNOWN_REF" + GIT_URL_NOT_ON_ALLOW_LIST = "GIT_URL_NOT_ON_ALLOW_LIST" + INSECURE_PARTNER_RESPONSE = "INSECURE_PARTNER_RESPONSE" + INTERNAL_ERROR = "INTERNAL_ERROR" + INVALID_PARAMETER_VALUE = "INVALID_PARAMETER_VALUE" + INVALID_STATE = "INVALID_STATE" + INVALID_STATE_TRANSITION = "INVALID_STATE_TRANSITION" + IO_ERROR = "IO_ERROR" + IPYNB_FILE_IN_REPO = "IPYNB_FILE_IN_REPO" + MALFORMED_PARTNER_RESPONSE = "MALFORMED_PARTNER_RESPONSE" + MALFORMED_REQUEST = "MALFORMED_REQUEST" + MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST = "MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST" + MAX_BLOCK_SIZE_EXCEEDED = "MAX_BLOCK_SIZE_EXCEEDED" + MAX_CHILD_NODE_SIZE_EXCEEDED = "MAX_CHILD_NODE_SIZE_EXCEEDED" + MAX_LIST_SIZE_EXCEEDED = "MAX_LIST_SIZE_EXCEEDED" + MAX_NOTEBOOK_SIZE_EXCEEDED = "MAX_NOTEBOOK_SIZE_EXCEEDED" + MAX_READ_SIZE_EXCEEDED = "MAX_READ_SIZE_EXCEEDED" + METASTORE_ALREADY_EXISTS = "METASTORE_ALREADY_EXISTS" + METASTORE_DOES_NOT_EXIST = "METASTORE_DOES_NOT_EXIST" + METASTORE_NOT_EMPTY = "METASTORE_NOT_EMPTY" + NOT_FOUND = "NOT_FOUND" + NOT_IMPLEMENTED = "NOT_IMPLEMENTED" + PARTIAL_DELETE = "PARTIAL_DELETE" + PERMISSION_DENIED = "PERMISSION_DENIED" + PERMISSION_NOT_PROPAGATED = "PERMISSION_NOT_PROPAGATED" + PRINCIPAL_DOES_NOT_EXIST = "PRINCIPAL_DOES_NOT_EXIST" + PROJECTS_OPERATION_TIMEOUT = "PROJECTS_OPERATION_TIMEOUT" + PROVIDER_ALREADY_EXISTS = "PROVIDER_ALREADY_EXISTS" + PROVIDER_DOES_NOT_EXIST = "PROVIDER_DOES_NOT_EXIST" + PROVIDER_SHARE_NOT_ACCESSIBLE = "PROVIDER_SHARE_NOT_ACCESSIBLE" + QUOTA_EXCEEDED = "QUOTA_EXCEEDED" + RECIPIENT_ALREADY_EXISTS = "RECIPIENT_ALREADY_EXISTS" + RECIPIENT_DOES_NOT_EXIST = "RECIPIENT_DOES_NOT_EXIST" + REQUEST_LIMIT_EXCEEDED = "REQUEST_LIMIT_EXCEEDED" + RESOURCE_ALREADY_EXISTS = "RESOURCE_ALREADY_EXISTS" + RESOURCE_CONFLICT = "RESOURCE_CONFLICT" + RESOURCE_DOES_NOT_EXIST = "RESOURCE_DOES_NOT_EXIST" + RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED" + RESOURCE_LIMIT_EXCEEDED = "RESOURCE_LIMIT_EXCEEDED" + SCHEMA_ALREADY_EXISTS = "SCHEMA_ALREADY_EXISTS" + SCHEMA_DOES_NOT_EXIST = "SCHEMA_DOES_NOT_EXIST" + SCHEMA_NOT_EMPTY = "SCHEMA_NOT_EMPTY" + SEARCH_QUERY_TOO_LONG = "SEARCH_QUERY_TOO_LONG" + SEARCH_QUERY_TOO_SHORT = "SEARCH_QUERY_TOO_SHORT" + SERVICE_UNDER_MAINTENANCE = "SERVICE_UNDER_MAINTENANCE" + SHARE_ALREADY_EXISTS = "SHARE_ALREADY_EXISTS" + SHARE_DOES_NOT_EXIST = "SHARE_DOES_NOT_EXIST" + STORAGE_CREDENTIAL_ALREADY_EXISTS = "STORAGE_CREDENTIAL_ALREADY_EXISTS" + STORAGE_CREDENTIAL_DOES_NOT_EXIST = "STORAGE_CREDENTIAL_DOES_NOT_EXIST" + TABLE_ALREADY_EXISTS = "TABLE_ALREADY_EXISTS" + TABLE_DOES_NOT_EXIST = "TABLE_DOES_NOT_EXIST" + TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" + UNAUTHENTICATED = "UNAUTHENTICATED" + UNAVAILABLE = "UNAVAILABLE" + UNKNOWN = "UNKNOWN" + UNPARSEABLE_HTTP_ERROR = "UNPARSEABLE_HTTP_ERROR" + WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" + + +@dataclass +class ListWorkspaceBaseEnvironmentsResponse: + """Response message for ListWorkspaceBaseEnvironments.""" + + next_page_token: Optional[str] = None + """Token to retrieve the next page of results. Empty if there are no more results.""" + + workspace_base_environments: Optional[List[WorkspaceBaseEnvironment]] = None + """The list of workspace base environments.""" + + def as_dict(self) -> dict: + """Serializes the ListWorkspaceBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_base_environments: + body["workspace_base_environments"] = [v.as_dict() for v in self.workspace_base_environments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListWorkspaceBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_base_environments: + body["workspace_base_environments"] = self.workspace_base_environments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceBaseEnvironmentsResponse: + """Deserializes the ListWorkspaceBaseEnvironmentsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + workspace_base_environments=_repeated_dict(d, "workspace_base_environments", WorkspaceBaseEnvironment), + ) + + +@dataclass +class Operation: + """This resource represents a long-running operation that is the result of a network API call.""" + + done: Optional[bool] = None + """If the value is `false`, it means the operation is still in progress. If `true`, the operation + is completed, and either `error` or `response` is available.""" + + error: Optional[DatabricksServiceExceptionWithDetailsProto] = None + """The error result of the operation in case of failure or cancellation.""" + + metadata: Optional[dict] = None + """Service-specific metadata associated with the operation. It typically contains progress + information and common metadata such as create time. Some services might not provide such + metadata.""" + + name: Optional[str] = None + """The server-assigned name, which is only unique within the same service that originally returns + it. If you use the default HTTP mapping, the `name` should be a resource name ending with + `operations/{unique_id}`.""" + + response: Optional[dict] = None + """The normal, successful response of the operation.""" + + def as_dict(self) -> dict: + """Serializes the Operation into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.done is not None: + body["done"] = self.done + if self.error: + body["error"] = self.error.as_dict() + if self.metadata: + body["metadata"] = self.metadata + if self.name is not None: + body["name"] = self.name + if self.response: + body["response"] = self.response + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Operation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.done is not None: + body["done"] = self.done + if self.error: + body["error"] = self.error + if self.metadata: + body["metadata"] = self.metadata + if self.name is not None: + body["name"] = self.name + if self.response: + body["response"] = self.response + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Operation: + """Deserializes the Operation from a dictionary.""" + return cls( + done=d.get("done", None), + error=_from_dict(d, "error", DatabricksServiceExceptionWithDetailsProto), + metadata=d.get("metadata", None), + name=d.get("name", None), + response=d.get("response", None), + ) + + +@dataclass +class WorkspaceBaseEnvironment: + """A WorkspaceBaseEnvironment defines a workspace-level environment configuration consisting of an + environment version and a list of dependencies.""" + + display_name: str + """Human-readable display name for the workspace base environment.""" + + base_environment_cache: Optional[List[WorkspaceBaseEnvironmentCache]] = None + """List of materialized environments for different architectures.""" + + base_environment_type: Optional[BaseEnvironmentType] = None + """The type of base environment (CPU or GPU).""" + + create_time: Optional[Timestamp] = None + """Timestamp when the environment was created.""" + + creator_user_id: Optional[str] = None + """User ID of the creator.""" + + filepath: Optional[str] = None + """The WSFS or UC Volumes path to the environment YAML file.""" + + is_default: Optional[bool] = None + """Whether this is the default environment for the workspace.""" + + last_updated_user_id: Optional[str] = None + """User ID of the last user who updated the environment.""" + + message: Optional[str] = None + """Status message providing additional details about the environment status.""" + + name: Optional[str] = None + """The resource name of the workspace base environment. Format: + workspace-base-environments/{workspace-base-environment}""" + + status: Optional[WorkspaceBaseEnvironmentCacheStatus] = None + """The status of the materialized workspace base environment.""" + + update_time: Optional[Timestamp] = None + """Timestamp when the environment was last updated.""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type.value + if self.create_time is not None: + body["create_time"] = self.create_time.ToJsonString() + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filepath is not None: + body["filepath"] = self.filepath + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status.value + if self.update_time is not None: + body["update_time"] = self.update_time.ToJsonString() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = self.base_environment_cache + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filepath is not None: + body["filepath"] = self.filepath + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBaseEnvironment: + """Deserializes the WorkspaceBaseEnvironment from a dictionary.""" + return cls( + base_environment_cache=_repeated_dict(d, "base_environment_cache", WorkspaceBaseEnvironmentCache), + base_environment_type=_enum(d, "base_environment_type", BaseEnvironmentType), + create_time=_timestamp(d, "create_time"), + creator_user_id=d.get("creator_user_id", None), + display_name=d.get("display_name", None), + filepath=d.get("filepath", None), + is_default=d.get("is_default", None), + last_updated_user_id=d.get("last_updated_user_id", None), + message=d.get("message", None), + name=d.get("name", None), + status=_enum(d, "status", WorkspaceBaseEnvironmentCacheStatus), + update_time=_timestamp(d, "update_time"), + ) + + +@dataclass +class WorkspaceBaseEnvironmentCache: + """Materialized environment information for a WorkspaceBaseEnvironment.""" + + status: Optional[WorkspaceBaseEnvironmentCacheStatus] = None + """Status of this materialized environment entry.""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironmentCache into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironmentCache into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBaseEnvironmentCache: + """Deserializes the WorkspaceBaseEnvironmentCache from a dictionary.""" + return cls(status=_enum(d, "status", WorkspaceBaseEnvironmentCacheStatus)) + + +class WorkspaceBaseEnvironmentCacheStatus(Enum): + """Status of the environment materialization.""" + + CREATED = "CREATED" + EXPIRED = "EXPIRED" + FAILED = "FAILED" + INVALID = "INVALID" + PENDING = "PENDING" + REFRESHING = "REFRESHING" + + +@dataclass +class WorkspaceBaseEnvironmentOperationMetadata: + """Metadata for the WorkspaceBaseEnvironment long-running operations. This message tracks the + progress of the workspace base environment long-running process.""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironmentOperationMetadata into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceBaseEnvironmentOperationMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBaseEnvironmentOperationMetadata: + """Deserializes the WorkspaceBaseEnvironmentOperationMetadata from a dictionary.""" + return cls() + + +class EnvironmentsAPI: + """APIs to manage environment resources. + + The Environments API provides management capabilities for different types of environments including + workspace-level base environments that define the environment version and dependencies to be used in + serverless notebooks and jobs.""" + + def __init__(self, api_client): + self._api = api_client + + def create_workspace_base_environment( + self, + workspace_base_environment: WorkspaceBaseEnvironment, + *, + request_id: Optional[str] = None, + workspace_base_environment_id: Optional[str] = None, + ) -> CreateWorkspaceBaseEnvironmentOperation: + """Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The operation will + asynchronously generate a materialized environment to optimize dependency resolution and is only + marked as done when the materialized environment has been successfully generated or has failed. + + :param workspace_base_environment: :class:`WorkspaceBaseEnvironment` + Required. The workspace base environment to create. + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a request_id is provided. + :param workspace_base_environment_id: str (optional) + The ID to use for the workspace base environment, which will become the final component of the + resource name. This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + + :returns: :class:`Operation` + """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) + body = workspace_base_environment.as_dict() + query = {} + if request_id is not None: + query["request_id"] = request_id + if workspace_base_environment_id is not None: + query["workspace_base_environment_id"] = workspace_base_environment_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "POST", "/api/environments/v1/workspace-base-environments", query=query, body=body, headers=headers + ) + operation = Operation.from_dict(res) + return CreateWorkspaceBaseEnvironmentOperation(self, operation) + + def delete_workspace_base_environment(self, name: str): + """Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact linked notebooks and jobs. + This operation is irreversible and should be performed only when you are certain the environment is no + longer needed. + + :param name: str + Required. The resource name of the workspace base environment to delete. Format: + workspace-base-environments/{workspace_base_environment} + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/environments/v1/{name}", headers=headers) + + def get_default_workspace_base_environment(self, name: str) -> DefaultWorkspaceBaseEnvironment: + """Gets the default WorkspaceBaseEnvironment configuration for the workspace. Returns the current default + base environment settings for both CPU and GPU compute. + + :param name: str + A static resource name of the default workspace base environment. Format: + default-workspace-base-environment + + :returns: :class:`DefaultWorkspaceBaseEnvironment` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/environments/v1/{name}", headers=headers) + return DefaultWorkspaceBaseEnvironment.from_dict(res) + + def get_operation(self, name: str) -> Operation: + """Gets the status of a long-running operation. Clients can use this method to poll the operation result. + + :param name: str + The name of the operation resource. + + :returns: :class:`Operation` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/environments/v1/{name}", headers=headers) + return Operation.from_dict(res) + + def get_workspace_base_environment(self, name: str) -> WorkspaceBaseEnvironment: + """Retrieves a WorkspaceBaseEnvironment by its name. + + :param name: str + Required. The resource name of the workspace base environment to retrieve. Format: + workspace-base-environments/{workspace_base_environment} + + :returns: :class:`WorkspaceBaseEnvironment` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/environments/v1/{name}", headers=headers) + return WorkspaceBaseEnvironment.from_dict(res) + + def list_workspace_base_environments( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[WorkspaceBaseEnvironment]: + """Lists all WorkspaceBaseEnvironments in the workspace. + + :param page_size: int (optional) + The maximum number of environments to return per page. Default is 1000. + :param page_token: str (optional) + Page token for pagination. Received from a previous ListWorkspaceBaseEnvironments call. + + :returns: Iterator over :class:`WorkspaceBaseEnvironment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", "/api/environments/v1/workspace-base-environments", query=query, headers=headers) + if "workspace_base_environments" in json: + for v in json["workspace_base_environments"]: + yield WorkspaceBaseEnvironment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def refresh_workspace_base_environment(self, name: str) -> RefreshWorkspaceBaseEnvironmentOperation: + """Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is a long-running + operation. The operation will asynchronously regenerate the materialized environment and is only + marked as done when the materialized environment has been successfully generated or has failed. The + existing materialized environment remains available until it expires. + + :param name: str + Required. The resource name of the workspace base environment to delete. Format: + workspace-base-environments/{workspace_base_environment} + + :returns: :class:`Operation` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/environments/v1/{name}/refresh", headers=headers) + operation = Operation.from_dict(res) + return RefreshWorkspaceBaseEnvironmentOperation(self, operation) + + def update_default_workspace_base_environment( + self, name: str, default_workspace_base_environment: DefaultWorkspaceBaseEnvironment, update_mask: FieldMask + ) -> DefaultWorkspaceBaseEnvironment: + """Updates the default WorkspaceBaseEnvironment configuration for the workspace. Sets the specified base + environments as the workspace defaults for CPU and/or GPU compute. + + :param name: str + The resource name of this singleton resource. Format: default-workspace-base-environment + :param default_workspace_base_environment: :class:`DefaultWorkspaceBaseEnvironment` + Required. The default workspace base environment configuration to update. + :param update_mask: FieldMask + Field mask specifying which fields to update. To specify multiple fields in the field mask, use + comma as the separator (no space). The special value '*' indicate that all fields should be updated + (full replacement). Valid field paths: cpu_workspace_base_environment, + gpu_workspace_base_environment + + :returns: :class:`DefaultWorkspaceBaseEnvironment` + """ + + body = default_workspace_base_environment.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/environments/v1/{name}", query=query, body=body, headers=headers) + return DefaultWorkspaceBaseEnvironment.from_dict(res) + + def update_workspace_base_environment( + self, name: str, workspace_base_environment: WorkspaceBaseEnvironment + ) -> UpdateWorkspaceBaseEnvironmentOperation: + """Updates an existing WorkspaceBaseEnvironment. This is a long-running operation. The operation will + asynchronously regenerate the materialized environment and is only marked as done when the + materialized environment has been successfully generated or has failed. The existing materialized + environment remains available until it expires. + + :param name: str + :param workspace_base_environment: :class:`WorkspaceBaseEnvironment` + Required. The workspace base environment with updated fields. The name field is used to identify the + environment to update. + + :returns: :class:`Operation` + """ + + body = workspace_base_environment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/environments/v1/{name}", body=body, headers=headers) + operation = Operation.from_dict(res) + return UpdateWorkspaceBaseEnvironmentOperation(self, operation) + + +class CreateWorkspaceBaseEnvironmentOperation: + """Long-running operation for create_workspace_base_environment""" + + def __init__(self, impl: EnvironmentsAPI, operation: Operation): + self._impl = impl + self._operation = operation + + def wait(self, opts: Optional[lro.LroOptions] = None) -> WorkspaceBaseEnvironment: + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. + + :param opts: :class:`LroOptions` + Timeout options (default: polls indefinitely) + + :returns: :class:`WorkspaceBaseEnvironment` + """ + + def poll_operation(): + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + if not operation.done: + return None, RetryError.continues("operation still in progress") + + if operation.error: + error_msg = operation.error.message if operation.error.message else "unknown error" + if operation.error.error_code: + error_msg = f"[{operation.error.error_code}] {error_msg}" + return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) + + # Operation completed successfully, unmarshal response. + if operation.response is None: + return None, RetryError.halt(Exception("operation completed but no response available")) + + workspace_base_environment = WorkspaceBaseEnvironment.from_dict(operation.response) + + return workspace_base_environment, None + + return poll(poll_operation, timeout=opts.timeout if opts is not None else None) + + def name(self) -> str: + """Name returns the name of the long-running operation. The name is assigned + by the server and is unique within the service from which the operation is created. + + :returns: str + """ + return self._operation.name + + def metadata(self) -> WorkspaceBaseEnvironmentOperationMetadata: + """Metadata returns metadata associated with the long-running operation. + If the metadata is not available, the returned metadata is None. + + :returns: :class:`WorkspaceBaseEnvironmentOperationMetadata` or None + """ + if self._operation.metadata is None: + return None + + return WorkspaceBaseEnvironmentOperationMetadata.from_dict(self._operation.metadata) + + def done(self) -> bool: + """Done reports whether the long-running operation has completed. + + :returns: bool + """ + # Refresh the operation state first + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + return operation.done + + +class RefreshWorkspaceBaseEnvironmentOperation: + """Long-running operation for refresh_workspace_base_environment""" + + def __init__(self, impl: EnvironmentsAPI, operation: Operation): + self._impl = impl + self._operation = operation + + def wait(self, opts: Optional[lro.LroOptions] = None) -> WorkspaceBaseEnvironment: + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. + + :param opts: :class:`LroOptions` + Timeout options (default: polls indefinitely) + + :returns: :class:`WorkspaceBaseEnvironment` + """ + + def poll_operation(): + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + if not operation.done: + return None, RetryError.continues("operation still in progress") + + if operation.error: + error_msg = operation.error.message if operation.error.message else "unknown error" + if operation.error.error_code: + error_msg = f"[{operation.error.error_code}] {error_msg}" + return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) + + # Operation completed successfully, unmarshal response. + if operation.response is None: + return None, RetryError.halt(Exception("operation completed but no response available")) + + workspace_base_environment = WorkspaceBaseEnvironment.from_dict(operation.response) + + return workspace_base_environment, None + + return poll(poll_operation, timeout=opts.timeout if opts is not None else None) + + def name(self) -> str: + """Name returns the name of the long-running operation. The name is assigned + by the server and is unique within the service from which the operation is created. + + :returns: str + """ + return self._operation.name + + def metadata(self) -> WorkspaceBaseEnvironmentOperationMetadata: + """Metadata returns metadata associated with the long-running operation. + If the metadata is not available, the returned metadata is None. + + :returns: :class:`WorkspaceBaseEnvironmentOperationMetadata` or None + """ + if self._operation.metadata is None: + return None + + return WorkspaceBaseEnvironmentOperationMetadata.from_dict(self._operation.metadata) + + def done(self) -> bool: + """Done reports whether the long-running operation has completed. + + :returns: bool + """ + # Refresh the operation state first + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + return operation.done + + +class UpdateWorkspaceBaseEnvironmentOperation: + """Long-running operation for update_workspace_base_environment""" + + def __init__(self, impl: EnvironmentsAPI, operation: Operation): + self._impl = impl + self._operation = operation + + def wait(self, opts: Optional[lro.LroOptions] = None) -> WorkspaceBaseEnvironment: + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. + + :param opts: :class:`LroOptions` + Timeout options (default: polls indefinitely) + + :returns: :class:`WorkspaceBaseEnvironment` + """ + + def poll_operation(): + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + if not operation.done: + return None, RetryError.continues("operation still in progress") + + if operation.error: + error_msg = operation.error.message if operation.error.message else "unknown error" + if operation.error.error_code: + error_msg = f"[{operation.error.error_code}] {error_msg}" + return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) + + # Operation completed successfully, unmarshal response. + if operation.response is None: + return None, RetryError.halt(Exception("operation completed but no response available")) + + workspace_base_environment = WorkspaceBaseEnvironment.from_dict(operation.response) + + return workspace_base_environment, None + + return poll(poll_operation, timeout=opts.timeout if opts is not None else None) + + def name(self) -> str: + """Name returns the name of the long-running operation. The name is assigned + by the server and is unique within the service from which the operation is created. + + :returns: str + """ + return self._operation.name + + def metadata(self) -> WorkspaceBaseEnvironmentOperationMetadata: + """Metadata returns metadata associated with the long-running operation. + If the metadata is not available, the returned metadata is None. + + :returns: :class:`WorkspaceBaseEnvironmentOperationMetadata` or None + """ + if self._operation.metadata is None: + return None + + return WorkspaceBaseEnvironmentOperationMetadata.from_dict(self._operation.metadata) + + def done(self) -> bool: + """Done reports whether the long-running operation has completed. + + :returns: bool + """ + # Refresh the operation state first + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + return operation.done diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 984262c6b..0481a287a 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1541,27 +1541,45 @@ def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignment: @dataclass class PermissionAssignments: + next_page_token: Optional[str] = None + """Token to retrieve the next page of results.""" + permission_assignments: Optional[List[PermissionAssignment]] = None """Array of permissions assignments defined for a workspace.""" + prev_page_token: Optional[str] = None + """Token to retrieve the previous page of results.""" + def as_dict(self) -> dict: """Serializes the PermissionAssignments into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.permission_assignments: body["permission_assignments"] = [v.as_dict() for v in self.permission_assignments] + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.permission_assignments: body["permission_assignments"] = self.permission_assignments + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignments: """Deserializes the PermissionAssignments from a dictionary.""" - return cls(permission_assignments=_repeated_dict(d, "permission_assignments", PermissionAssignment)) + return cls( + next_page_token=d.get("next_page_token", None), + permission_assignments=_repeated_dict(d, "permission_assignments", PermissionAssignment), + prev_page_token=d.get("prev_page_token", None), + ) class PermissionLevel(Enum): @@ -3613,9 +3631,9 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -3638,9 +3656,9 @@ def get_permission_levels(self, request_object_type: str, request_object_id: str :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str :returns: :class:`GetPermissionLevelsResponse` @@ -3672,9 +3690,9 @@ def set( :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -3711,9 +3729,9 @@ def update( :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -4520,15 +4538,26 @@ def get(self, workspace_id: int) -> WorkspacePermissions: ) return WorkspacePermissions.from_dict(res) - def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: + def list( + self, workspace_id: int, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[PermissionAssignment]: """Get the permission assignments for the specified Databricks account and Databricks workspace. :param workspace_id: int The workspace ID for the account. + :param max_results: int (optional) + Maximum number of permission assignments to return. + :param page_token: str (optional) + Page token returned by previous call to retrieve the next page of results. :returns: Iterator over :class:`PermissionAssignment` """ + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token headers = { "Accept": "application/json", } @@ -4536,6 +4565,7 @@ def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: json = self._api.do( "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", + query=query, headers=headers, ) parsed = PermissionAssignments.from_dict(json).permission_assignments diff --git a/databricks/sdk/service/iamv2.py b/databricks/sdk/service/iamv2.py index 9afd1ef2a..150f18e45 100755 --- a/databricks/sdk/service/iamv2.py +++ b/databricks/sdk/service/iamv2.py @@ -8,7 +8,9 @@ from typing import Any, Dict, List, Optional from databricks.sdk.client_types import HostType -from databricks.sdk.service._internal import _enum, _from_dict, _repeated_enum +from databricks.sdk.common.types.fieldmask import FieldMask +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -16,6 +18,127 @@ # all definitions in this file are in alphabetical order +@dataclass +class AccountAccessIdentityRule: + """An identity rule that controls which principals can access an account.""" + + action: AccountAccessRuleAction + """Currently, only DENY action is supported.""" + + external_id: str + """External ID of the principal in the customer's IdP.""" + + display_name: Optional[str] = None + """Display name of the principal.""" + + principal_type: Optional[PrincipalType] = None + """The type of the principal (user/service principal/group). This field is populated by the server + based on the external_id.""" + + def as_dict(self) -> dict: + """Serializes the AccountAccessIdentityRule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action is not None: + body["action"] = self.action.value + if self.display_name is not None: + body["display_name"] = self.display_name + if self.external_id is not None: + body["external_id"] = self.external_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountAccessIdentityRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action is not None: + body["action"] = self.action + if self.display_name is not None: + body["display_name"] = self.display_name + if self.external_id is not None: + body["external_id"] = self.external_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountAccessIdentityRule: + """Deserializes the AccountAccessIdentityRule from a dictionary.""" + return cls( + action=_enum(d, "action", AccountAccessRuleAction), + display_name=d.get("display_name", None), + external_id=d.get("external_id", None), + principal_type=_enum(d, "principal_type", PrincipalType), + ) + + +class AccountAccessRuleAction(Enum): + """The action type for an account access identity rule (currently DENY only).""" + + DENY = "DENY" + + +@dataclass +class DirectGroupMember: + """Represents a principal that is a direct member of a group, with its source of membership.""" + + display_name: Optional[str] = None + """Display name of the principal.""" + + external_id: Optional[str] = None + """The external ID of the principal in Databricks.""" + + membership_source: Optional[GroupMembershipSource] = None + """The source of group membership (internal or from identity provider).""" + + principal_id: Optional[int] = None + """Internal ID of the principal in Databricks.""" + + principal_type: Optional[PrincipalType] = None + """The type of the principal (user/service principal/group).""" + + def as_dict(self) -> dict: + """Serializes the DirectGroupMember into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.display_name is not None: + body["display_name"] = self.display_name + if self.external_id is not None: + body["external_id"] = self.external_id + if self.membership_source is not None: + body["membership_source"] = self.membership_source.value + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DirectGroupMember into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: + body["display_name"] = self.display_name + if self.external_id is not None: + body["external_id"] = self.external_id + if self.membership_source is not None: + body["membership_source"] = self.membership_source + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DirectGroupMember: + """Deserializes the DirectGroupMember from a dictionary.""" + return cls( + display_name=d.get("display_name", None), + external_id=d.get("external_id", None), + membership_source=_enum(d, "membership_source", GroupMembershipSource), + principal_id=d.get("principal_id", None), + principal_type=_enum(d, "principal_type", PrincipalType), + ) + + @dataclass class Group: """The details of a Group resource.""" @@ -69,6 +192,348 @@ def from_dict(cls, d: Dict[str, Any]) -> Group: ) +@dataclass +class GroupMembership: + """Represents membership of a principal (group/user/service principal) in a group.""" + + principal_id: int + """Internal ID of the principal (group/user/service principal) in Databricks.""" + + account_id: Optional[str] = None + """The parent account ID for the group membership in Databricks.""" + + group_id: Optional[int] = None + """Internal ID of the group in Databricks.""" + + def as_dict(self) -> dict: + """Serializes the GroupMembership into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.group_id is not None: + body["group_id"] = self.group_id + if self.principal_id is not None: + body["principal_id"] = self.principal_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GroupMembership into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.group_id is not None: + body["group_id"] = self.group_id + if self.principal_id is not None: + body["principal_id"] = self.principal_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GroupMembership: + """Deserializes the GroupMembership from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + group_id=d.get("group_id", None), + principal_id=d.get("principal_id", None), + ) + + +class GroupMembershipSource(Enum): + """The source of the group membership (internal or from identity provider).""" + + IDENTITY_PROVIDER = "IDENTITY_PROVIDER" + INTERNAL = "INTERNAL" + + +@dataclass +class ListAccountAccessIdentityRulesResponse: + """Response message for listing account access identity rules.""" + + account_access_identity_rules: Optional[List[AccountAccessIdentityRule]] = None + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListAccountAccessIdentityRulesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_access_identity_rules: + body["account_access_identity_rules"] = [v.as_dict() for v in self.account_access_identity_rules] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListAccountAccessIdentityRulesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_access_identity_rules: + body["account_access_identity_rules"] = self.account_access_identity_rules + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListAccountAccessIdentityRulesResponse: + """Deserializes the ListAccountAccessIdentityRulesResponse from a dictionary.""" + return cls( + account_access_identity_rules=_repeated_dict(d, "account_access_identity_rules", AccountAccessIdentityRule), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListDirectGroupMembersResponse: + """Response message for listing direct group members.""" + + direct_group_members: Optional[List[DirectGroupMember]] = None + """The list of direct group members with their membership source type.""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListDirectGroupMembersResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.direct_group_members: + body["direct_group_members"] = [v.as_dict() for v in self.direct_group_members] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDirectGroupMembersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.direct_group_members: + body["direct_group_members"] = self.direct_group_members + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDirectGroupMembersResponse: + """Deserializes the ListDirectGroupMembersResponse from a dictionary.""" + return cls( + direct_group_members=_repeated_dict(d, "direct_group_members", DirectGroupMember), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListGroupsResponse: + """TODO: Write description later when this method is implemented""" + + groups: Optional[List[Group]] = None + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.groups: + body["groups"] = self.groups + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: + """Deserializes the ListGroupsResponse from a dictionary.""" + return cls(groups=_repeated_dict(d, "groups", Group), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + service_principals: Optional[List[ServicePrincipal]] = None + + def as_dict(self) -> dict: + """Serializes the ListServicePrincipalsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = [v.as_dict() for v in self.service_principals] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListServicePrincipalsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = self.service_principals + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalsResponse: + """Deserializes the ListServicePrincipalsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + service_principals=_repeated_dict(d, "service_principals", ServicePrincipal), + ) + + +@dataclass +class ListTransitiveParentGroupsResponse: + """Response message for listing all transitive parent groups of a principal.""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + transitive_parent_groups: Optional[List[TransitiveParentGroup]] = None + """The list of transitive parent groups.""" + + def as_dict(self) -> dict: + """Serializes the ListTransitiveParentGroupsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.transitive_parent_groups: + body["transitive_parent_groups"] = [v.as_dict() for v in self.transitive_parent_groups] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListTransitiveParentGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.transitive_parent_groups: + body["transitive_parent_groups"] = self.transitive_parent_groups + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListTransitiveParentGroupsResponse: + """Deserializes the ListTransitiveParentGroupsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + transitive_parent_groups=_repeated_dict(d, "transitive_parent_groups", TransitiveParentGroup), + ) + + +@dataclass +class ListUsersResponse: + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + users: Optional[List[User]] = None + + def as_dict(self) -> dict: + """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = [v.as_dict() for v in self.users] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = self.users + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: + """Deserializes the ListUsersResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), users=_repeated_dict(d, "users", User)) + + +@dataclass +class ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + workspace_access_details: Optional[List[WorkspaceAccessDetail]] = None + + def as_dict(self) -> dict: + """Serializes the ListWorkspaceAccessDetailsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = [v.as_dict() for v in self.workspace_access_details] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListWorkspaceAccessDetailsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = self.workspace_access_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceAccessDetailsResponse: + """Deserializes the ListWorkspaceAccessDetailsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + workspace_access_details=_repeated_dict(d, "workspace_access_details", WorkspaceAccessDetail), + ) + + +@dataclass +class ListWorkspaceAssignmentDetailsResponse: + """Response message for listing workspace assignment details.""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + workspace_assignment_details: Optional[List[WorkspaceAssignmentDetail]] = None + + def as_dict(self) -> dict: + """Serializes the ListWorkspaceAssignmentDetailsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_assignment_details: + body["workspace_assignment_details"] = [v.as_dict() for v in self.workspace_assignment_details] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListWorkspaceAssignmentDetailsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_assignment_details: + body["workspace_assignment_details"] = self.workspace_assignment_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceAssignmentDetailsResponse: + """Deserializes the ListWorkspaceAssignmentDetailsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + workspace_assignment_details=_repeated_dict(d, "workspace_assignment_details", WorkspaceAssignmentDetail), + ) + + class PrincipalType(Enum): """The type of the principal (user/sp/group).""" @@ -229,20 +694,65 @@ class State(Enum): @dataclass -class User: - """The details of a User resource.""" +class TransitiveParentGroup: + """Represents a group that is a transitive parent of a principal.""" account_id: Optional[str] = None - """The accountId parent of the user in Databricks.""" - - account_user_status: Optional[State] = None - """The activity status of a user in a Databricks account.""" + """The parent account ID for group in Databricks.""" external_id: Optional[str] = None - """ExternalId of the user in the customer's IdP.""" + """ExternalId of the group in the customer's IdP.""" internal_id: Optional[int] = None - """Internal userId of the user in Databricks.""" + """Internal group ID of the group in Databricks.""" + + def as_dict(self) -> dict: + """Serializes the TransitiveParentGroup into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.external_id is not None: + body["external_id"] = self.external_id + if self.internal_id is not None: + body["internal_id"] = self.internal_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TransitiveParentGroup into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.external_id is not None: + body["external_id"] = self.external_id + if self.internal_id is not None: + body["internal_id"] = self.internal_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TransitiveParentGroup: + """Deserializes the TransitiveParentGroup from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + external_id=d.get("external_id", None), + internal_id=d.get("internal_id", None), + ) + + +@dataclass +class User: + """The details of a User resource.""" + + account_id: Optional[str] = None + """The accountId parent of the user in Databricks.""" + + account_user_status: Optional[State] = None + """The activity status of a user in a Databricks account.""" + + external_id: Optional[str] = None + """ExternalId of the user in the customer's IdP.""" + + internal_id: Optional[int] = None + """Internal userId of the user in Databricks.""" name: Optional[UserName] = None @@ -415,6 +925,58 @@ class WorkspaceAccessDetailView(Enum): FULL = "FULL" +@dataclass +class WorkspaceAssignmentDetail: + """The details of a principal's assignment to a workspace.""" + + account_id: Optional[str] = None + """The account ID parent of the workspace where the principal is assigned""" + + principal_id: Optional[int] = None + """The internal ID of the principal (user/sp/group) in Databricks.""" + + principal_type: Optional[PrincipalType] = None + + workspace_id: Optional[int] = None + """The workspace ID where the principal is assigned""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceAssignmentDetail into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type.value + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceAssignmentDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.principal_type is not None: + body["principal_type"] = self.principal_type + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceAssignmentDetail: + """Deserializes the WorkspaceAssignmentDetail from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + principal_id=d.get("principal_id", None), + principal_type=_enum(d, "principal_type", PrincipalType), + workspace_id=d.get("workspace_id", None), + ) + + class WorkspacePermission(Enum): """The type of permission a principal has to a workspace (admin/user).""" @@ -428,82 +990,64 @@ class AccountIamV2API: def __init__(self, api_client): self._api = api_client - def get_workspace_access_detail( - self, workspace_id: int, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None - ) -> WorkspaceAccessDetail: - """Returns the access details for a principal in a workspace. Allows for checking access details for any - provisioned principal (user, service principal, or group) in a workspace. * Provisioned principal here - refers to one that has been synced into Databricks from the customer's IdP or added explicitly to - Databricks via SCIM/UI. Allows for passing in a "view" parameter to control what fields are returned - (BASIC by default or FULL). + def create_account_access_identity_rule( + self, account_access_identity_rule: AccountAccessIdentityRule + ) -> AccountAccessIdentityRule: + """Creates a new account access identity rule for a given account. This allows administrators to + explicitly allow or deny specific principals from accessing the account. - :param workspace_id: int - Required. The workspace ID for which the access details are being requested. - :param principal_id: int - Required. The internal ID of the principal (user/sp/group) for which the access details are being - requested. - :param view: :class:`WorkspaceAccessDetailView` (optional) - Controls what fields are returned. + :param account_access_identity_rule: :class:`AccountAccessIdentityRule` + Required. The rule to create. - :returns: :class:`WorkspaceAccessDetail` + :returns: :class:`AccountAccessIdentityRule` """ - query = {} - if view is not None: - query["view"] = view.value + body = account_access_identity_rule.as_dict() headers = { "Accept": "application/json", + "Content-Type": "application/json", } res = self._api.do( - "GET", - f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", - query=query, + "POST", + f"/api/2.0/accounts/{self._api.account_id}/aim-control-policy/account-access-identity-rules", + body=body, headers=headers, ) - return WorkspaceAccessDetail.from_dict(res) + return AccountAccessIdentityRule.from_dict(res) - def resolve_group(self, external_id: str) -> ResolveGroupResponse: - """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it - will be created in the account. If the customer is not onboarded onto Automatic Identity Management - (AIM), this will return an error. + def create_group(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the group in the customer's IdP. + :param group: :class:`Group` + Required. Group to be created in - :returns: :class:`ResolveGroupResponse` + :returns: :class:`Group` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = group.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } res = self._api.do( - "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/groups/resolveByExternalId", - body=body, - headers=headers, + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", body=body, headers=headers ) - return ResolveGroupResponse.from_dict(res) + return Group.from_dict(res) - def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipalResponse: - """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def create_group_membership(self, group_id: int, group_membership: GroupMembership) -> GroupMembership: + """Creates a group membership (assigns a principal to a group). - :param external_id: str - Required. The external ID of the service principal in the customer's IdP. + :param group_id: int + Required. Internal ID of the group in Databricks. + :param group_membership: :class:`GroupMembership` + Required. The group membership to create. - :returns: :class:`ResolveServicePrincipalResponse` + :returns: :class:`GroupMembership` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = group_membership.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -511,156 +1055,1447 @@ def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipal res = self._api.do( "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/resolveByExternalId", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{group_id}/memberships", body=body, headers=headers, ) - return ResolveServicePrincipalResponse.from_dict(res) + return GroupMembership.from_dict(res) - def resolve_user(self, external_id: str) -> ResolveUserResponse: - """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it - will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def create_service_principal(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the user in the customer's IdP. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in - :returns: :class:`ResolveUserResponse` + :returns: :class:`ServicePrincipal` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = service_principal.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } res = self._api.do( - "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/users/resolveByExternalId", - body=body, - headers=headers, + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", body=body, headers=headers ) - return ResolveUserResponse.from_dict(res) + return ServicePrincipal.from_dict(res) + def create_user(self, user: User) -> User: + """TODO: Write description later when this method is implemented -class WorkspaceIamV2API: - """These APIs are used to manage identities and the workspace access of these identities in .""" + :param user: :class:`User` + Required. User to be created in - def __init__(self, api_client): - self._api = api_client + :returns: :class:`User` + """ - def get_workspace_access_detail_local( - self, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None - ) -> WorkspaceAccessDetail: - """Returns the access details for a principal in the current workspace. Allows for checking access - details for any provisioned principal (user, service principal, or group) in the current workspace. * - Provisioned principal here refers to one that has been synced into Databricks from the customer's IdP - or added explicitly to Databricks via SCIM/UI. Allows for passing in a "view" parameter to control - what fields are returned (BASIC by default or FULL). + body = user.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - :param principal_id: int - Required. The internal ID of the principal (user/sp/group) for which the access details are being - requested. - :param view: :class:`WorkspaceAccessDetailView` (optional) - Controls what fields are returned. + res = self._api.do( + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/users", body=body, headers=headers + ) + return User.from_dict(res) - :returns: :class:`WorkspaceAccessDetail` + def create_workspace_assignment_detail( + self, workspace_id: int, workspace_assignment_detail: WorkspaceAssignmentDetail + ) -> WorkspaceAssignmentDetail: + """Creates a workspace assignment detail for a principal. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment detail is being created. + :param workspace_assignment_detail: :class:`WorkspaceAssignmentDetail` + Required. Workspace assignment detail to be created in . + + :returns: :class:`WorkspaceAssignmentDetail` """ - query = {} - if view is not None: - query["view"] = view.value + body = workspace_assignment_detail.as_dict() headers = { "Accept": "application/json", + "Content-Type": "application/json", } - cfg = self._api._cfg - if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: - headers["X-Databricks-Org-Id"] = cfg.workspace_id - res = self._api.do( - "GET", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, headers=headers + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAssignmentDetails", + body=body, + headers=headers, ) - return WorkspaceAccessDetail.from_dict(res) + return WorkspaceAssignmentDetail.from_dict(res) - def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: - """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it - will be created in the account. If the customer is not onboarded onto Automatic Identity Management - (AIM), this will return an error. + def delete_account_access_identity_rule(self, external_id: str): + """Deletes an account access identity rule for a given principal. :param external_id: str - Required. The external ID of the group in the customer's IdP. + Required. The external ID of the principal whose rule should be deleted. + - :returns: :class:`ResolveGroupResponse` """ - body = {} - if external_id is not None: - body["external_id"] = external_id headers = { "Accept": "application/json", - "Content-Type": "application/json", } - cfg = self._api._cfg - if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: - headers["X-Databricks-Org-Id"] = cfg.workspace_id + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/aim-control-policy/account-access-identity-rules/{external_id}", + headers=headers, + ) - res = self._api.do("POST", "/api/2.0/identity/groups/resolveByExternalId", body=body, headers=headers) - return ResolveGroupResponse.from_dict(res) + def delete_group(self, internal_id: int): + """TODO: Write description later when this method is implemented - def resolve_service_principal_proxy(self, external_id: str) -> ResolveServicePrincipalResponse: - """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + :param internal_id: int + Required. Internal ID of the group in Databricks. - :param external_id: str - Required. The external ID of the service principal in the customer's IdP. - :returns: :class:`ResolveServicePrincipalResponse` """ - body = {} - if external_id is not None: - body["external_id"] = external_id headers = { "Accept": "application/json", - "Content-Type": "application/json", } - cfg = self._api._cfg - if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: - headers["X-Databricks-Org-Id"] = cfg.workspace_id - - res = self._api.do( - "POST", "/api/2.0/identity/servicePrincipals/resolveByExternalId", body=body, headers=headers + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers ) - return ResolveServicePrincipalResponse.from_dict(res) - def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: - """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it - will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def delete_group_membership(self, group_id: int, principal_id: int): + """Deletes a group membership (unassigns a principal from a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param principal_id: int + Required. Internal ID of the principal to be unassigned from the group. - :param external_id: str - Required. The external ID of the user in the customer's IdP. - :returns: :class:`ResolveUserResponse` """ - body = {} - if external_id is not None: - body["external_id"] = external_id headers = { "Accept": "application/json", - "Content-Type": "application/json", } - cfg = self._api._cfg - if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: - headers["X-Databricks-Org-Id"] = cfg.workspace_id + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{group_id}/memberships/{principal_id}", + headers=headers, + ) - res = self._api.do("POST", "/api/2.0/identity/users/resolveByExternalId", body=body, headers=headers) - return ResolveUserResponse.from_dict(res) + def delete_service_principal(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + headers=headers, + ) + + def delete_user(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + + def delete_workspace_assignment_detail(self, workspace_id: int, principal_id: int): + """Deletes a workspace assignment detail for a principal. + + :param workspace_id: int + The workspace ID where the principal has access. + :param principal_id: int + Required. ID of the principal in Databricks to delete workspace assignment for. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAssignmentDetails/{principal_id}", + headers=headers, + ) + + def get_account_access_identity_rule(self, external_id: str) -> AccountAccessIdentityRule: + """Gets an account access identity rule for a given principal. + + :param external_id: str + Required. The external ID of the principal whose rule should be retrieved. + + :returns: :class:`AccountAccessIdentityRule` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/aim-control-policy/account-access-identity-rules/{external_id}", + headers=headers, + ) + return AccountAccessIdentityRule.from_dict(res) + + def get_group(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers + ) + return Group.from_dict(res) + + def get_service_principal(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", headers=headers + ) + return ServicePrincipal.from_dict(res) + + def get_user(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + return User.from_dict(res) + + def get_workspace_access_detail( + self, workspace_id: int, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None + ) -> WorkspaceAccessDetail: + """Returns the access details for a principal in a workspace. Allows for checking access details for any + provisioned principal (user, service principal, or group) in a workspace. * Provisioned principal here + refers to one that has been synced into Databricks from the customer's IdP or added explicitly to + Databricks via SCIM/UI. Allows for passing in a "view" parameter to control what fields are returned + (BASIC by default or FULL). + + :param workspace_id: int + Required. The workspace ID for which the access details are being requested. + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the access details are being + requested. + :param view: :class:`WorkspaceAccessDetailView` (optional) + Controls what fields are returned. + + :returns: :class:`WorkspaceAccessDetail` + """ + + query = {} + if view is not None: + query["view"] = view.value + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + query=query, + headers=headers, + ) + return WorkspaceAccessDetail.from_dict(res) + + def get_workspace_assignment_detail(self, workspace_id: int, principal_id: int) -> WorkspaceAssignmentDetail: + """Returns the assignment details for a principal in a workspace. + + :param workspace_id: int + Required. The workspace ID for which the assignment details are being requested. + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the assignment details are + being requested. + + :returns: :class:`WorkspaceAssignmentDetail` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAssignmentDetails/{principal_id}", + headers=headers, + ) + return WorkspaceAssignmentDetail.from_dict(res) + + def list_account_access_identity_rules( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListAccountAccessIdentityRulesResponse: + """Lists all account access identity rules for a given account. These rules control which principals + (users, service principals, groups) from the customer's IdP are allowed or denied access to the + Databricks account. + + :param filter: str (optional) + Optional. Filter to apply to the list. Supports filtering by displayName. + :param page_size: int (optional) + Optional. The maximum number of rules to return. The service may return fewer than this value. + :param page_token: str (optional) + Optional. A page token, received from a previous call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListAccountAccessIdentityRulesResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/aim-control-policy/account-access-identity-rules", + query=query, + headers=headers, + ) + return ListAccountAccessIdentityRulesResponse.from_dict(res) + + def list_direct_group_members( + self, group_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListDirectGroupMembersResponse: + """Lists provisioned direct members of a group with their membership source (internal or from identity + provider). + + :param group_id: int + Required. Internal ID of the group in Databricks whose direct members are being listed. + :param page_size: int (optional) + The maximum number of members to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListDirectGroupMembers call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListDirectGroupMembersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{group_id}/direct-members", + query=query, + headers=headers, + ) + return ListDirectGroupMembersResponse.from_dict(res) + + def list_groups( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering groups by group name or external id. + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", query=query, headers=headers + ) + return ListGroupsResponse.from_dict(res) + + def list_service_principals( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering service principals by application id or external id. + :param page_size: int (optional) + The maximum number of service principals to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", query=query, headers=headers + ) + return ListServicePrincipalsResponse.from_dict(res) + + def list_transitive_parent_groups( + self, principal_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListTransitiveParentGroupsResponse: + """Lists all transitive parent groups of a principal. + + :param principal_id: int + Required. Internal ID of the principal in Databricks whose transitive parent groups are being + listed. + :param page_size: int (optional) + The maximum number of parent groups to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListTransitiveParentGroups call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListTransitiveParentGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/principals/{principal_id}/transitive-parent-groups", + query=query, + headers=headers, + ) + return ListTransitiveParentGroupsResponse.from_dict(res) + + def list_users( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering users by username or external id. + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users", query=query, headers=headers + ) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details( + self, workspace_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID for which the workspace access details are being fetched. + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails", + query=query, + headers=headers, + ) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + + def list_workspace_assignment_details( + self, workspace_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAssignmentDetailsResponse: + """Lists workspace assignment details for a workspace. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment details are being fetched. + :param page_size: int (optional) + The maximum number of workspace assignment details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAssignmentDetails call. Provide this to retrieve + the subsequent page. + + :returns: :class:`ListWorkspaceAssignmentDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAssignmentDetails", + query=query, + headers=headers, + ) + return ListWorkspaceAssignmentDetailsResponse.from_dict(res) + + def resolve_group(self, external_id: str) -> ResolveGroupResponse: + """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it + will be created in the account. If the customer is not onboarded onto Automatic Identity Management + (AIM), this will return an error. + + :param external_id: str + Required. The external ID of the group in the customer's IdP. + + :returns: :class:`ResolveGroupResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/resolveByExternalId", + body=body, + headers=headers, + ) + return ResolveGroupResponse.from_dict(res) + + def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipalResponse: + """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. + + :param external_id: str + Required. The external ID of the service principal in the customer's IdP. + + :returns: :class:`ResolveServicePrincipalResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/resolveByExternalId", + body=body, + headers=headers, + ) + return ResolveServicePrincipalResponse.from_dict(res) + + def resolve_user(self, external_id: str) -> ResolveUserResponse: + """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it + will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. + + :param external_id: str + Required. The external ID of the user in the customer's IdP. + + :returns: :class:`ResolveUserResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/resolveByExternalId", + body=body, + headers=headers, + ) + return ResolveUserResponse.from_dict(res) + + def update_group(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return Group.from_dict(res) + + def update_service_principal( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service Principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return ServicePrincipal.from_dict(res) + + def update_user(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return User.from_dict(res) + + def update_workspace_assignment_detail( + self, + workspace_id: int, + principal_id: int, + workspace_assignment_detail: WorkspaceAssignmentDetail, + update_mask: FieldMask, + ) -> WorkspaceAssignmentDetail: + """Updates a workspace assignment detail for a principal. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment detail is being updated. + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_assignment_detail: :class:`WorkspaceAssignmentDetail` + Required. Workspace assignment detail to be updated in . + :param update_mask: FieldMask + Required. The list of fields to update. + + :returns: :class:`WorkspaceAssignmentDetail` + """ + + body = workspace_assignment_detail.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAssignmentDetails/{principal_id}", + query=query, + body=body, + headers=headers, + ) + return WorkspaceAssignmentDetail.from_dict(res) + + +class WorkspaceIamV2API: + """These APIs are used to manage identities and the workspace access of these identities in .""" + + def __init__(self, api_client): + self._api = api_client + + def create_group_membership_proxy(self, group_id: int, group_membership: GroupMembership) -> GroupMembership: + """Creates a group membership (assigns a principal to a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param group_membership: :class:`GroupMembership` + Required. The group membership to create. + + :returns: :class:`GroupMembership` + """ + + body = group_membership.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.0/identity/groups/{group_id}/memberships", body=body, headers=headers) + return GroupMembership.from_dict(res) + + def create_group_proxy(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + """ + + body = group.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/identity/groups", body=body, headers=headers) + return Group.from_dict(res) + + def create_service_principal_proxy(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + """ + + body = service_principal.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/identity/servicePrincipals", body=body, headers=headers) + return ServicePrincipal.from_dict(res) + + def create_user_proxy(self, user: User) -> User: + """TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + """ + + body = user.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/identity/users", body=body, headers=headers) + return User.from_dict(res) + + def delete_group_membership_proxy(self, group_id: int, principal_id: int): + """Deletes a group membership (unassigns a principal from a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param principal_id: int + Required. Internal ID of the principal to be unassigned from the group. + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/identity/groups/{group_id}/memberships/{principal_id}", headers=headers) + + def delete_group_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + + def delete_service_principal_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + + def delete_user_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/identity/users/{internal_id}", headers=headers) + + def get_group_proxy(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + return Group.from_dict(res) + + def get_service_principal_proxy(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + return ServicePrincipal.from_dict(res) + + def get_user_proxy(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/identity/users/{internal_id}", headers=headers) + return User.from_dict(res) + + def get_workspace_access_detail_local( + self, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None + ) -> WorkspaceAccessDetail: + """Returns the access details for a principal in the current workspace. Allows for checking access + details for any provisioned principal (user, service principal, or group) in the current workspace. * + Provisioned principal here refers to one that has been synced into Databricks from the customer's IdP + or added explicitly to Databricks via SCIM/UI. Allows for passing in a "view" parameter to control + what fields are returned (BASIC by default or FULL). + + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the access details are being + requested. + :param view: :class:`WorkspaceAccessDetailView` (optional) + Controls what fields are returned. + + :returns: :class:`WorkspaceAccessDetail` + """ + + query = {} + if view is not None: + query["view"] = view.value + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, headers=headers + ) + return WorkspaceAccessDetail.from_dict(res) + + def list_direct_group_members_proxy( + self, group_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListDirectGroupMembersResponse: + """Lists provisioned direct members of a group with their membership source (internal or from identity + provider). + + :param group_id: int + Required. Internal ID of the group in Databricks whose direct members are being listed. + :param page_size: int (optional) + The maximum number of members to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListDirectGroupMembersProxy call. Provide this to retrieve + the subsequent page. + + :returns: :class:`ListDirectGroupMembersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/identity/groups/{group_id}/direct-members", query=query, headers=headers) + return ListDirectGroupMembersResponse.from_dict(res) + + def list_groups_proxy( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering groups by group name or external id. + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/identity/groups", query=query, headers=headers) + return ListGroupsResponse.from_dict(res) + + def list_service_principals_proxy( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering service principals by application id or external id. + :param page_size: int (optional) + The maximum number of SPs to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/identity/servicePrincipals", query=query, headers=headers) + return ListServicePrincipalsResponse.from_dict(res) + + def list_transitive_parent_groups_proxy( + self, principal_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListTransitiveParentGroupsResponse: + """Lists all transitive parent groups of a principal. + + :param principal_id: int + Required. Internal ID of the principal in Databricks whose transitive parent groups are being + listed. + :param page_size: int (optional) + The maximum number of parent groups to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListTransitiveParentGroups call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListTransitiveParentGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "GET", f"/api/2.0/identity/principals/{principal_id}/transitive-parent-groups", query=query, headers=headers + ) + return ListTransitiveParentGroupsResponse.from_dict(res) + + def list_users_proxy( + self, *, filter: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering users by username or external id. + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if filter is not None: + query["filter"] = filter + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/identity/users", query=query, headers=headers) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details_local( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/identity/workspaceAccessDetails", query=query, headers=headers) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + + def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: + """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it + will be created in the account. If the customer is not onboarded onto Automatic Identity Management + (AIM), this will return an error. + + :param external_id: str + Required. The external ID of the group in the customer's IdP. + + :returns: :class:`ResolveGroupResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/identity/groups/resolveByExternalId", body=body, headers=headers) + return ResolveGroupResponse.from_dict(res) + + def resolve_service_principal_proxy(self, external_id: str) -> ResolveServicePrincipalResponse: + """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. + + :param external_id: str + Required. The external ID of the service principal in the customer's IdP. + + :returns: :class:`ResolveServicePrincipalResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "POST", "/api/2.0/identity/servicePrincipals/resolveByExternalId", body=body, headers=headers + ) + return ResolveServicePrincipalResponse.from_dict(res) + + def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: + """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it + will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. + + :param external_id: str + Required. The external ID of the user in the customer's IdP. + + :returns: :class:`ResolveUserResponse` + """ + + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/identity/users/resolveByExternalId", body=body, headers=headers) + return ResolveUserResponse.from_dict(res) + + def update_group_proxy(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.0/identity/groups/{internal_id}", query=query, body=body, headers=headers) + return Group.from_dict(res) + + def update_service_principal_proxy( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "PATCH", f"/api/2.0/identity/servicePrincipals/{internal_id}", query=query, body=body, headers=headers + ) + return ServicePrincipal.from_dict(res) + + def update_user_proxy(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.0/identity/users/{internal_id}", query=query, body=body, headers=headers) + return User.from_dict(res) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 369194e71..99a5f1b07 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -23,6 +23,39 @@ # all definitions in this file are in alphabetical order +class AlertEvaluationState(Enum): + """Same alert evaluation state as in redash-v2/api/proto/alertsv2/alerts.proto""" + + ERROR = "ERROR" + OK = "OK" + TRIGGERED = "TRIGGERED" + UNKNOWN = "UNKNOWN" + + +@dataclass +class AlertTaskOutput: + alert_state: Optional[AlertEvaluationState] = None + + def as_dict(self) -> dict: + """Serializes the AlertTaskOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert_state is not None: + body["alert_state"] = self.alert_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AlertTaskOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_state is not None: + body["alert_state"] = self.alert_state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AlertTaskOutput: + """Deserializes the AlertTaskOutput from a dictionary.""" + return cls(alert_state=_enum(d, "alert_state", AlertEvaluationState)) + + class AuthenticationMethod(Enum): OAUTH = "OAUTH" @@ -55,6 +88,10 @@ class BaseJob: job_id: Optional[int] = None """The canonical identifier for this job.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" @@ -77,6 +114,8 @@ def as_dict(self) -> dict: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings.as_dict() if self.trigger_state: @@ -98,6 +137,8 @@ def as_shallow_dict(self) -> dict: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings if self.trigger_state: @@ -114,6 +155,7 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseJob: effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), + path=d.get("path", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), ) @@ -590,6 +632,9 @@ class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: output_schema_info: Optional[OutputSchemaInfo] = None """Information on how to access the output schema for the clean room run""" + shared_output_schema_info: Optional[OutputSchemaInfo] = None + """Information on how to access the shared output schema for the clean room run""" + def as_dict(self) -> dict: """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} @@ -599,6 +644,8 @@ def as_dict(self) -> dict: body["notebook_output"] = self.notebook_output.as_dict() if self.output_schema_info: body["output_schema_info"] = self.output_schema_info.as_dict() + if self.shared_output_schema_info: + body["shared_output_schema_info"] = self.shared_output_schema_info.as_dict() return body def as_shallow_dict(self) -> dict: @@ -610,6 +657,8 @@ def as_shallow_dict(self) -> dict: body["notebook_output"] = self.notebook_output if self.output_schema_info: body["output_schema_info"] = self.output_schema_info + if self.shared_output_schema_info: + body["shared_output_schema_info"] = self.shared_output_schema_info return body @classmethod @@ -619,6 +668,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomsNotebookTaskCleanRoomsNoteboo clean_room_job_run_state=_from_dict(d, "clean_room_job_run_state", CleanRoomTaskRunState), notebook_output=_from_dict(d, "notebook_output", NotebookOutput), output_schema_info=_from_dict(d, "output_schema_info", OutputSchemaInfo), + shared_output_schema_info=_from_dict(d, "shared_output_schema_info", OutputSchemaInfo), ) @@ -941,6 +991,11 @@ class CronSchedule: pause_status: Optional[PauseStatus] = None """Indicate whether this schedule is paused or not.""" + sql_condition: Optional[SqlConditionConfiguration] = None + """SQL condition that must be satisfied before a scheduled run is triggered. The condition is + evaluated after the cron expression fires and must return a truthy result for the run to + proceed.""" + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} @@ -948,6 +1003,8 @@ def as_dict(self) -> dict: body["pause_status"] = self.pause_status.value if self.quartz_cron_expression is not None: body["quartz_cron_expression"] = self.quartz_cron_expression + if self.sql_condition: + body["sql_condition"] = self.sql_condition.as_dict() if self.timezone_id is not None: body["timezone_id"] = self.timezone_id return body @@ -959,6 +1016,8 @@ def as_shallow_dict(self) -> dict: body["pause_status"] = self.pause_status if self.quartz_cron_expression is not None: body["quartz_cron_expression"] = self.quartz_cron_expression + if self.sql_condition: + body["sql_condition"] = self.sql_condition if self.timezone_id is not None: body["timezone_id"] = self.timezone_id return body @@ -969,6 +1028,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: return cls( pause_status=_enum(d, "pause_status", PauseStatus), quartz_cron_expression=d.get("quartz_cron_expression", None), + sql_condition=_from_dict(d, "sql_condition", SqlConditionConfiguration), timezone_id=d.get("timezone_id", None), ) @@ -1921,6 +1981,10 @@ class GenAiComputeTask: compute: Optional[ComputeConfig] = None + docker_image_url: Optional[str] = None + """Optional custom Docker container image URL for running the training script. Format: + organization/repository:tag (e.g., "pytorch/pytorch:2.0.1")""" + mlflow_experiment_name: Optional[str] = None """Optional string containing the name of the MLflow experiment to log the run to. If name is not found, backend will create the mlflow experiment using the name.""" @@ -1954,6 +2018,8 @@ def as_dict(self) -> dict: body["compute"] = self.compute.as_dict() if self.dl_runtime_image is not None: body["dl_runtime_image"] = self.dl_runtime_image + if self.docker_image_url is not None: + body["docker_image_url"] = self.docker_image_url if self.mlflow_experiment_name is not None: body["mlflow_experiment_name"] = self.mlflow_experiment_name if self.source is not None: @@ -1975,6 +2041,8 @@ def as_shallow_dict(self) -> dict: body["compute"] = self.compute if self.dl_runtime_image is not None: body["dl_runtime_image"] = self.dl_runtime_image + if self.docker_image_url is not None: + body["docker_image_url"] = self.docker_image_url if self.mlflow_experiment_name is not None: body["mlflow_experiment_name"] = self.mlflow_experiment_name if self.source is not None: @@ -1994,6 +2062,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenAiComputeTask: command=d.get("command", None), compute=_from_dict(d, "compute", ComputeConfig), dl_runtime_image=d.get("dl_runtime_image", None), + docker_image_url=d.get("docker_image_url", None), mlflow_experiment_name=d.get("mlflow_experiment_name", None), source=_enum(d, "source", Source), training_script_path=d.get("training_script_path", None), @@ -2230,6 +2299,10 @@ class Job: next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -2263,6 +2336,8 @@ def as_dict(self) -> dict: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2288,6 +2363,8 @@ def as_shallow_dict(self) -> dict: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2307,6 +2384,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Job: has_more=d.get("has_more", None), job_id=d.get("job_id", None), next_page_token=d.get("next_page_token", None), + path=d.get("path", None), run_as_user_name=d.get("run_as_user_name", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), @@ -3024,6 +3102,10 @@ class JobSettings: parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" + parent_path: Optional[str] = None + """Path of the job parent folder in workspace file tree. If absent, the job doesn't have a + workspace object.""" + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. The performance target does not apply to tasks that run on @@ -3106,6 +3188,8 @@ def as_dict(self) -> dict: body["notification_settings"] = self.notification_settings.as_dict() if self.parameters: body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target.value if self.queue: @@ -3161,6 +3245,8 @@ def as_shallow_dict(self) -> dict: body["notification_settings"] = self.notification_settings if self.parameters: body["parameters"] = self.parameters + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target if self.queue: @@ -3202,6 +3288,7 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSettings: name=d.get("name", None), notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), parameters=_repeated_dict(d, "parameters", JobParameterDefinition), + parent_path=d.get("parent_path", None), performance_target=_enum(d, "performance_target", PerformanceTarget), queue=_from_dict(d, "queue", QueueSettings), run_as=_from_dict(d, "run_as", JobRunAs), @@ -5289,6 +5376,9 @@ def from_dict(cls, d: Dict[str, Any]) -> RunNowResponse: class RunOutput: """Run output was retrieved successfully.""" + alert_output: Optional[AlertTaskOutput] = None + """The output of an alert task, if available""" + clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None """The output of a clean rooms notebook task, if available""" @@ -5344,6 +5434,8 @@ class RunOutput: def as_dict(self) -> dict: """Serializes the RunOutput into a dictionary suitable for use as a JSON request body.""" body = {} + if self.alert_output: + body["alert_output"] = self.alert_output.as_dict() if self.clean_rooms_notebook_output: body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output.as_dict() if self.dashboard_output: @@ -5377,6 +5469,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the RunOutput into a shallow dictionary of its immediate attributes.""" body = {} + if self.alert_output: + body["alert_output"] = self.alert_output if self.clean_rooms_notebook_output: body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output if self.dashboard_output: @@ -5411,6 +5505,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RunOutput: """Deserializes the RunOutput from a dictionary.""" return cls( + alert_output=_from_dict(d, "alert_output", AlertTaskOutput), clean_rooms_notebook_output=_from_dict( d, "clean_rooms_notebook_output", CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput ), @@ -5766,6 +5861,10 @@ class RunTask: disable_auto_optimization: Optional[bool] = None """An option to disable auto optimization in serverless""" + disabled: Optional[bool] = None + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -5946,6 +6045,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.disable_auto_optimization is not None: body["disable_auto_optimization"] = self.disable_auto_optimization + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value if self.email_notifications: @@ -6053,6 +6154,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.disable_auto_optimization is not None: body["disable_auto_optimization"] = self.disable_auto_optimization + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target if self.email_notifications: @@ -6148,6 +6251,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTask: depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), disable_auto_optimization=d.get("disable_auto_optimization", None), + disabled=d.get("disabled", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), end_time=d.get("end_time", None), @@ -6457,6 +6561,126 @@ class SqlAlertState(Enum): UNKNOWN = "UNKNOWN" +@dataclass +class SqlConditionConfiguration: + sql_query_id: str + """The ID of the SQL query to evaluate as the trigger condition.""" + + warehouse_id: str + """The canonical identifier of the SQL warehouse to run the condition query against.""" + + def as_dict(self) -> dict: + """Serializes the SqlConditionConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.sql_query_id is not None: + body["sql_query_id"] = self.sql_query_id + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SqlConditionConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sql_query_id is not None: + body["sql_query_id"] = self.sql_query_id + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SqlConditionConfiguration: + """Deserializes the SqlConditionConfiguration from a dictionary.""" + return cls(sql_query_id=d.get("sql_query_id", None), warehouse_id=d.get("warehouse_id", None)) + + +@dataclass +class SqlConditionRunInfoDetails: + """SQL condition evaluation details captured at the time the run was triggered""" + + condition_evaluation_satisfied: Optional[bool] = None + """Whether the last condition evaluation was satisfied (query returned truthy result).""" + + condition_evaluation_sql_session_id: Optional[str] = None + """The ID of the SQL session, used by UI to track session context.""" + + condition_evaluation_sql_statement_id: Optional[str] = None + """The ID of the SQL statement execution, used by UI to track query execution and warehouse info.""" + + def as_dict(self) -> dict: + """Serializes the SqlConditionRunInfoDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition_evaluation_satisfied is not None: + body["condition_evaluation_satisfied"] = self.condition_evaluation_satisfied + if self.condition_evaluation_sql_session_id is not None: + body["condition_evaluation_sql_session_id"] = self.condition_evaluation_sql_session_id + if self.condition_evaluation_sql_statement_id is not None: + body["condition_evaluation_sql_statement_id"] = self.condition_evaluation_sql_statement_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SqlConditionRunInfoDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition_evaluation_satisfied is not None: + body["condition_evaluation_satisfied"] = self.condition_evaluation_satisfied + if self.condition_evaluation_sql_session_id is not None: + body["condition_evaluation_sql_session_id"] = self.condition_evaluation_sql_session_id + if self.condition_evaluation_sql_statement_id is not None: + body["condition_evaluation_sql_statement_id"] = self.condition_evaluation_sql_statement_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SqlConditionRunInfoDetails: + """Deserializes the SqlConditionRunInfoDetails from a dictionary.""" + return cls( + condition_evaluation_satisfied=d.get("condition_evaluation_satisfied", None), + condition_evaluation_sql_session_id=d.get("condition_evaluation_sql_session_id", None), + condition_evaluation_sql_statement_id=d.get("condition_evaluation_sql_statement_id", None), + ) + + +@dataclass +class SqlConditionState: + latest_condition_evaluation_satisfied: Optional[bool] = None + """Whether the last condition evaluation was satisfied (query returned truthy result).""" + + latest_condition_evaluation_sql_session_id: Optional[str] = None + """The ID of the SQL session, used by UI to track session context.""" + + latest_condition_evaluation_sql_statement_id: Optional[str] = None + """The ID of the SQL statement execution, used by UI to track query execution and warehouse info.""" + + def as_dict(self) -> dict: + """Serializes the SqlConditionState into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.latest_condition_evaluation_satisfied is not None: + body["latest_condition_evaluation_satisfied"] = self.latest_condition_evaluation_satisfied + if self.latest_condition_evaluation_sql_session_id is not None: + body["latest_condition_evaluation_sql_session_id"] = self.latest_condition_evaluation_sql_session_id + if self.latest_condition_evaluation_sql_statement_id is not None: + body["latest_condition_evaluation_sql_statement_id"] = self.latest_condition_evaluation_sql_statement_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SqlConditionState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.latest_condition_evaluation_satisfied is not None: + body["latest_condition_evaluation_satisfied"] = self.latest_condition_evaluation_satisfied + if self.latest_condition_evaluation_sql_session_id is not None: + body["latest_condition_evaluation_sql_session_id"] = self.latest_condition_evaluation_sql_session_id + if self.latest_condition_evaluation_sql_statement_id is not None: + body["latest_condition_evaluation_sql_statement_id"] = self.latest_condition_evaluation_sql_statement_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SqlConditionState: + """Deserializes the SqlConditionState from a dictionary.""" + return cls( + latest_condition_evaluation_satisfied=d.get("latest_condition_evaluation_satisfied", None), + latest_condition_evaluation_sql_session_id=d.get("latest_condition_evaluation_sql_session_id", None), + latest_condition_evaluation_sql_statement_id=d.get("latest_condition_evaluation_sql_statement_id", None), + ) + + @dataclass class SqlDashboardOutput: warehouse_id: Optional[str] = None @@ -7066,6 +7290,10 @@ class SubmitTask: disable_auto_optimization: Optional[bool] = None """An option to disable auto optimization in serverless""" + disabled: Optional[bool] = None + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" @@ -7179,6 +7407,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.disable_auto_optimization is not None: body["disable_auto_optimization"] = self.disable_auto_optimization + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications.as_dict() if self.environment_key is not None: @@ -7254,6 +7484,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.disable_auto_optimization is not None: body["disable_auto_optimization"] = self.disable_auto_optimization + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications if self.environment_key is not None: @@ -7320,6 +7552,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), disable_auto_optimization=d.get("disable_auto_optimization", None), + disabled=d.get("disabled", None), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), environment_key=d.get("environment_key", None), existing_cluster_id=d.get("existing_cluster_id", None), @@ -8197,11 +8430,16 @@ class TriggerInfo: run_id: Optional[int] = None """The run id of the Run Job task run""" + sql_condition: Optional[SqlConditionRunInfoDetails] = None + """SQL condition evaluation details for this run""" + def as_dict(self) -> dict: """Serializes the TriggerInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.run_id is not None: body["run_id"] = self.run_id + if self.sql_condition: + body["sql_condition"] = self.sql_condition.as_dict() return body def as_shallow_dict(self) -> dict: @@ -8209,12 +8447,16 @@ def as_shallow_dict(self) -> dict: body = {} if self.run_id is not None: body["run_id"] = self.run_id + if self.sql_condition: + body["sql_condition"] = self.sql_condition return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerInfo: """Deserializes the TriggerInfo from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls( + run_id=d.get("run_id", None), sql_condition=_from_dict(d, "sql_condition", SqlConditionRunInfoDetails) + ) @dataclass @@ -8230,6 +8472,10 @@ class TriggerSettings: periodic: Optional[PeriodicTriggerConfiguration] = None """Periodic trigger settings.""" + sql_condition: Optional[SqlConditionConfiguration] = None + """SQL condition that must be satisfied for the trigger to fire. Can be used in combination with + other trigger types and runs *after* other trigger types conditions are evaluated.""" + table_update: Optional[TableUpdateTriggerConfiguration] = None def as_dict(self) -> dict: @@ -8243,6 +8489,8 @@ def as_dict(self) -> dict: body["pause_status"] = self.pause_status.value if self.periodic: body["periodic"] = self.periodic.as_dict() + if self.sql_condition: + body["sql_condition"] = self.sql_condition.as_dict() if self.table_update: body["table_update"] = self.table_update.as_dict() return body @@ -8258,6 +8506,8 @@ def as_shallow_dict(self) -> dict: body["pause_status"] = self.pause_status if self.periodic: body["periodic"] = self.periodic + if self.sql_condition: + body["sql_condition"] = self.sql_condition if self.table_update: body["table_update"] = self.table_update return body @@ -8270,6 +8520,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: model=_from_dict(d, "model", ModelTriggerConfiguration), pause_status=_enum(d, "pause_status", PauseStatus), periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), + sql_condition=_from_dict(d, "sql_condition", SqlConditionConfiguration), table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration), ) @@ -8278,6 +8529,9 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: class TriggerStateProto: file_arrival: Optional[FileArrivalTriggerState] = None + sql_condition: Optional[SqlConditionState] = None + """State for SQL condition evaluation, can coexist with other trigger states.""" + table: Optional[TableTriggerState] = None def as_dict(self) -> dict: @@ -8285,6 +8539,8 @@ def as_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival.as_dict() + if self.sql_condition: + body["sql_condition"] = self.sql_condition.as_dict() if self.table: body["table"] = self.table.as_dict() return body @@ -8294,6 +8550,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival + if self.sql_condition: + body["sql_condition"] = self.sql_condition if self.table: body["table"] = self.table return body @@ -8303,6 +8561,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerStateProto: """Deserializes the TriggerStateProto from a dictionary.""" return cls( file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerState), + sql_condition=_from_dict(d, "sql_condition", SqlConditionState), table=_from_dict(d, "table", TableTriggerState), ) @@ -8635,6 +8894,7 @@ def create( name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, + parent_path: Optional[str] = None, performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, @@ -8707,6 +8967,9 @@ def create( `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param parent_path: str (optional) + Path of the job parent folder in workspace file tree. If absent, the job doesn't have a workspace + object. :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. The performance target does not apply to tasks that run on Serverless @@ -8783,6 +9046,8 @@ def create( body["notification_settings"] = notification_settings.as_dict() if parameters is not None: body["parameters"] = [v.as_dict() for v in parameters] + if parent_path is not None: + body["parent_path"] = parent_path if performance_target is not None: body["performance_target"] = performance_target.value if queue is not None: diff --git a/databricks/sdk/service/knowledgeassistants.py b/databricks/sdk/service/knowledgeassistants.py new file mode 100755 index 000000000..29f17928b --- /dev/null +++ b/databricks/sdk/service/knowledgeassistants.py @@ -0,0 +1,732 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Iterator, List, Optional + +from google.protobuf.timestamp_pb2 import Timestamp + +from databricks.sdk.client_types import HostType +from databricks.sdk.common.types.fieldmask import FieldMask +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _timestamp) + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class FileTableSpec: + """FileTableSpec specifies a file table source configuration.""" + + table_name: str + """Full UC name of the table, in the format of {CATALOG}.{SCHEMA}.{TABLE_NAME}.""" + + file_col: str + """The name of the column containing BINARY file content to be indexed.""" + + def as_dict(self) -> dict: + """Serializes the FileTableSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.file_col is not None: + body["file_col"] = self.file_col + if self.table_name is not None: + body["table_name"] = self.table_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FileTableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_col is not None: + body["file_col"] = self.file_col + if self.table_name is not None: + body["table_name"] = self.table_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FileTableSpec: + """Deserializes the FileTableSpec from a dictionary.""" + return cls(file_col=d.get("file_col", None), table_name=d.get("table_name", None)) + + +@dataclass +class FilesSpec: + """FilesSpec specifies a files source configuration.""" + + path: str + """A UC volume path that includes a list of files.""" + + def as_dict(self) -> dict: + """Serializes the FilesSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.path is not None: + body["path"] = self.path + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FilesSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: + body["path"] = self.path + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FilesSpec: + """Deserializes the FilesSpec from a dictionary.""" + return cls(path=d.get("path", None)) + + +@dataclass +class IndexSpec: + """IndexSpec specifies a vector search index source configuration.""" + + index_name: str + """Full UC name of the vector search index, in the format of {CATALOG}.{SCHEMA}.{INDEX_NAME}.""" + + text_col: str + """The column that includes the document text for retrieval.""" + + doc_uri_col: str + """The column that specifies a link or reference to where the information came from.""" + + def as_dict(self) -> dict: + """Serializes the IndexSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.doc_uri_col is not None: + body["doc_uri_col"] = self.doc_uri_col + if self.index_name is not None: + body["index_name"] = self.index_name + if self.text_col is not None: + body["text_col"] = self.text_col + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IndexSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.doc_uri_col is not None: + body["doc_uri_col"] = self.doc_uri_col + if self.index_name is not None: + body["index_name"] = self.index_name + if self.text_col is not None: + body["text_col"] = self.text_col + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IndexSpec: + """Deserializes the IndexSpec from a dictionary.""" + return cls( + doc_uri_col=d.get("doc_uri_col", None), + index_name=d.get("index_name", None), + text_col=d.get("text_col", None), + ) + + +@dataclass +class KnowledgeAssistant: + """Entity message that represents a knowledge assistant. Note: REQUIRED annotations below represent + create-time requirements. For updates, required fields are determined by the update mask.""" + + display_name: str + """The display name of the Knowledge Assistant, unique at workspace level. Required when creating a + Knowledge Assistant. When updating a Knowledge Assistant, optional unless included in + update_mask.""" + + description: str + """Description of what this agent can do (user-facing). Required when creating a Knowledge + Assistant. When updating a Knowledge Assistant, optional unless included in update_mask.""" + + create_time: Optional[Timestamp] = None + """Creation timestamp.""" + + creator: Optional[str] = None + """The creator of the Knowledge Assistant.""" + + endpoint_name: Optional[str] = None + """The name of the knowledge assistant agent endpoint.""" + + error_info: Optional[str] = None + """Error details when the Knowledge Assistant is in FAILED state.""" + + experiment_id: Optional[str] = None + """The MLflow experiment ID.""" + + id: Optional[str] = None + """The universally unique identifier (UUID) of the Knowledge Assistant.""" + + instructions: Optional[str] = None + """Additional global instructions on how the agent should generate answers. Optional on create and + update. When updating a Knowledge Assistant, include this field in update_mask to modify it.""" + + name: Optional[str] = None + """The resource name of the Knowledge Assistant. Format: + knowledge-assistants/{knowledge_assistant_id}""" + + state: Optional[KnowledgeAssistantState] = None + """State of the Knowledge Assistant. Not returned in List responses.""" + + def as_dict(self) -> dict: + """Serializes the KnowledgeAssistant into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time.ToJsonString() + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.error_info is not None: + body["error_info"] = self.error_info + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the KnowledgeAssistant into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.error_info is not None: + body["error_info"] = self.error_info + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> KnowledgeAssistant: + """Deserializes the KnowledgeAssistant from a dictionary.""" + return cls( + create_time=_timestamp(d, "create_time"), + creator=d.get("creator", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + endpoint_name=d.get("endpoint_name", None), + error_info=d.get("error_info", None), + experiment_id=d.get("experiment_id", None), + id=d.get("id", None), + instructions=d.get("instructions", None), + name=d.get("name", None), + state=_enum(d, "state", KnowledgeAssistantState), + ) + + +class KnowledgeAssistantState(Enum): + + ACTIVE = "ACTIVE" + CREATING = "CREATING" + FAILED = "FAILED" + + +@dataclass +class KnowledgeSource: + """KnowledgeSource represents a source of knowledge for the KnowledgeAssistant. Used in + create/update requests and returned in Get/List responses. Note: REQUIRED annotations below + represent create-time requirements. For updates, required fields are determined by the update + mask.""" + + display_name: str + """Human-readable display name of the knowledge source. Required when creating a Knowledge Source. + When updating a Knowledge Source, optional unless included in update_mask.""" + + description: str + """Description of the knowledge source. Required when creating a Knowledge Source. When updating a + Knowledge Source, optional unless included in update_mask.""" + + source_type: str + """The type of the source: "index", "files", or "file_table". Required when creating a Knowledge + Source. When updating a Knowledge Source, this field is ignored.""" + + create_time: Optional[Timestamp] = None + """Timestamp when this knowledge source was created.""" + + file_table: Optional[FileTableSpec] = None + + files: Optional[FilesSpec] = None + + id: Optional[str] = None + + index: Optional[IndexSpec] = None + + knowledge_cutoff_time: Optional[Timestamp] = None + """Timestamp representing the cutoff before which content in this knowledge source is being + ingested.""" + + name: Optional[str] = None + """Full resource name: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}""" + + state: Optional[KnowledgeSourceState] = None + + def as_dict(self) -> dict: + """Serializes the KnowledgeSource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time.ToJsonString() + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.file_table: + body["file_table"] = self.file_table.as_dict() + if self.files: + body["files"] = self.files.as_dict() + if self.id is not None: + body["id"] = self.id + if self.index: + body["index"] = self.index.as_dict() + if self.knowledge_cutoff_time is not None: + body["knowledge_cutoff_time"] = self.knowledge_cutoff_time.ToJsonString() + if self.name is not None: + body["name"] = self.name + if self.source_type is not None: + body["source_type"] = self.source_type + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the KnowledgeSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.file_table: + body["file_table"] = self.file_table + if self.files: + body["files"] = self.files + if self.id is not None: + body["id"] = self.id + if self.index: + body["index"] = self.index + if self.knowledge_cutoff_time is not None: + body["knowledge_cutoff_time"] = self.knowledge_cutoff_time + if self.name is not None: + body["name"] = self.name + if self.source_type is not None: + body["source_type"] = self.source_type + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> KnowledgeSource: + """Deserializes the KnowledgeSource from a dictionary.""" + return cls( + create_time=_timestamp(d, "create_time"), + description=d.get("description", None), + display_name=d.get("display_name", None), + file_table=_from_dict(d, "file_table", FileTableSpec), + files=_from_dict(d, "files", FilesSpec), + id=d.get("id", None), + index=_from_dict(d, "index", IndexSpec), + knowledge_cutoff_time=_timestamp(d, "knowledge_cutoff_time"), + name=d.get("name", None), + source_type=d.get("source_type", None), + state=_enum(d, "state", KnowledgeSourceState), + ) + + +class KnowledgeSourceState(Enum): + + FAILED_UPDATE = "FAILED_UPDATE" + UPDATED = "UPDATED" + UPDATING = "UPDATING" + + +@dataclass +class ListKnowledgeAssistantsResponse: + """A list of Knowledge Assistants.""" + + knowledge_assistants: Optional[List[KnowledgeAssistant]] = None + + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListKnowledgeAssistantsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.knowledge_assistants: + body["knowledge_assistants"] = [v.as_dict() for v in self.knowledge_assistants] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListKnowledgeAssistantsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.knowledge_assistants: + body["knowledge_assistants"] = self.knowledge_assistants + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListKnowledgeAssistantsResponse: + """Deserializes the ListKnowledgeAssistantsResponse from a dictionary.""" + return cls( + knowledge_assistants=_repeated_dict(d, "knowledge_assistants", KnowledgeAssistant), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListKnowledgeSourcesResponse: + knowledge_sources: Optional[List[KnowledgeSource]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListKnowledgeSourcesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.knowledge_sources: + body["knowledge_sources"] = [v.as_dict() for v in self.knowledge_sources] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListKnowledgeSourcesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.knowledge_sources: + body["knowledge_sources"] = self.knowledge_sources + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListKnowledgeSourcesResponse: + """Deserializes the ListKnowledgeSourcesResponse from a dictionary.""" + return cls( + knowledge_sources=_repeated_dict(d, "knowledge_sources", KnowledgeSource), + next_page_token=d.get("next_page_token", None), + ) + + +class KnowledgeAssistantsAPI: + """Manage Knowledge Assistants and related resources.""" + + def __init__(self, api_client): + self._api = api_client + + def create_knowledge_assistant(self, knowledge_assistant: KnowledgeAssistant) -> KnowledgeAssistant: + """Creates a Knowledge Assistant. + + :param knowledge_assistant: :class:`KnowledgeAssistant` + The Knowledge Assistant to create. + + :returns: :class:`KnowledgeAssistant` + """ + + body = knowledge_assistant.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.1/knowledge-assistants", body=body, headers=headers) + return KnowledgeAssistant.from_dict(res) + + def create_knowledge_source(self, parent: str, knowledge_source: KnowledgeSource) -> KnowledgeSource: + """Creates a Knowledge Source under a Knowledge Assistant. + + :param parent: str + Parent resource where this source will be created. Format: + knowledge-assistants/{knowledge_assistant_id} + :param knowledge_source: :class:`KnowledgeSource` + + :returns: :class:`KnowledgeSource` + """ + + body = knowledge_source.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.1/{parent}/knowledge-sources", body=body, headers=headers) + return KnowledgeSource.from_dict(res) + + def delete_knowledge_assistant(self, name: str): + """Deletes a Knowledge Assistant. + + :param name: str + The resource name of the knowledge assistant to be deleted. Format: + knowledge-assistants/{knowledge_assistant_id} + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.1/{name}", headers=headers) + + def delete_knowledge_source(self, name: str): + """Deletes a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source to delete. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.1/{name}", headers=headers) + + def get_knowledge_assistant(self, name: str) -> KnowledgeAssistant: + """Gets a Knowledge Assistant. + + :param name: str + The resource name of the knowledge assistant. Format: knowledge-assistants/{knowledge_assistant_id} + + :returns: :class:`KnowledgeAssistant` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.1/{name}", headers=headers) + return KnowledgeAssistant.from_dict(res) + + def get_knowledge_source(self, name: str) -> KnowledgeSource: + """Gets a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + + :returns: :class:`KnowledgeSource` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.1/{name}", headers=headers) + return KnowledgeSource.from_dict(res) + + def list_knowledge_assistants( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[KnowledgeAssistant]: + """List Knowledge Assistants + + :param page_size: int (optional) + The maximum number of knowledge assistants to return. If unspecified, at most 100 knowledge + assistants will be returned. The maximum value is 100; values above 100 will be coerced to 100. + :param page_token: str (optional) + A page token, received from a previous `ListKnowledgeAssistants` call. Provide this to retrieve the + subsequent page. If unspecified, the first page will be returned. + + :returns: Iterator over :class:`KnowledgeAssistant` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", "/api/2.1/knowledge-assistants", query=query, headers=headers) + if "knowledge_assistants" in json: + for v in json["knowledge_assistants"]: + yield KnowledgeAssistant.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_knowledge_sources( + self, parent: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[KnowledgeSource]: + """Lists Knowledge Sources under a Knowledge Assistant. + + :param parent: str + Parent resource to list from. Format: knowledge-assistants/{knowledge_assistant_id} + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`KnowledgeSource` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", f"/api/2.1/{parent}/knowledge-sources", query=query, headers=headers) + if "knowledge_sources" in json: + for v in json["knowledge_sources"]: + yield KnowledgeSource.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def sync_knowledge_sources(self, name: str): + """Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require sync) + + :param name: str + The resource name of the Knowledge Assistant. Format: knowledge-assistants/{knowledge_assistant_id} + + + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("POST", f"/api/2.1/{name}/knowledge-sources:sync", headers=headers) + + def update_knowledge_assistant( + self, name: str, knowledge_assistant: KnowledgeAssistant, update_mask: FieldMask + ) -> KnowledgeAssistant: + """Updates a Knowledge Assistant. + + :param name: str + The resource name of the Knowledge Assistant. Format: knowledge-assistants/{knowledge_assistant_id} + :param knowledge_assistant: :class:`KnowledgeAssistant` + The Knowledge Assistant update payload. Only fields listed in update_mask are updated. REQUIRED + annotations on Knowledge Assistant fields describe create-time requirements and do not mean all + those fields are required for update. + :param update_mask: FieldMask + Comma-delimited list of fields to update on the Knowledge Assistant. Allowed values: `display_name`, + `description`, `instructions`. Examples: - `display_name` - `description,instructions` + + :returns: :class:`KnowledgeAssistant` + """ + + body = knowledge_assistant.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.1/{name}", query=query, body=body, headers=headers) + return KnowledgeAssistant.from_dict(res) + + def update_knowledge_source( + self, name: str, knowledge_source: KnowledgeSource, update_mask: FieldMask + ) -> KnowledgeSource: + """Updates a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source to update. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + :param knowledge_source: :class:`KnowledgeSource` + The Knowledge Source update payload. Only fields listed in update_mask are updated. REQUIRED + annotations on Knowledge Source fields describe create-time requirements and do not mean all those + fields are required for update. + :param update_mask: FieldMask + Comma-delimited list of fields to update on the Knowledge Source. Allowed values: `display_name`, + `description`. Examples: - `display_name` - `display_name,description` + + :returns: :class:`KnowledgeSource` + """ + + body = knowledge_source.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.1/{name}", query=query, body=body, headers=headers) + return KnowledgeSource.from_dict(res) diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index e5f194a59..0aab79d93 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -117,6 +117,7 @@ class Category(Enum): LOOKUP_TABLES = "LOOKUP_TABLES" MANUFACTURING = "MANUFACTURING" MEDIA = "MEDIA" + OPEN_SOURCE = "OPEN_SOURCE" OTHER = "OTHER" PUBLIC_SECTOR = "PUBLIC_SECTOR" RETAIL = "RETAIL" diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 07edd4930..f5426573b 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -2089,6 +2089,31 @@ def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) +@dataclass +class GetLoggedModelsRequestResponse: + models: Optional[List[LoggedModel]] = None + """The retrieved logged models.""" + + def as_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.models: + body["models"] = [v.as_dict() for v in self.models] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.models: + body["models"] = self.models + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelsRequestResponse: + """Deserializes the GetLoggedModelsRequestResponse from a dictionary.""" + return cls(models=_repeated_dict(d, "models", LoggedModel)) + + @dataclass class GetMetricHistoryResponse: metrics: Optional[List[Metric]] = None @@ -6570,6 +6595,29 @@ def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) return GetLoggedModelResponse.from_dict(res) + def get_logged_models(self, *, model_ids: Optional[List[str]] = None) -> GetLoggedModelsRequestResponse: + """Batch endpoint for getting logged models from a list of model IDs + + :param model_ids: List[str] (optional) + The IDs of the logged models to retrieve. Max threshold is 100. + + :returns: :class:`GetLoggedModelsRequestResponse` + """ + + query = {} + if model_ids is not None: + query["model_ids"] = [v for v in model_ids] + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", "/api/2.0/mlflow/logged-models:batchGet", query=query, headers=headers) + return GetLoggedModelsRequestResponse.from_dict(res) + def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: """Gets the permission levels that a user can have on an object. diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 744dcdd4a..716e9dcbd 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -23,6 +23,24 @@ # all definitions in this file are in alphabetical order +@dataclass +class ApplyEnvironmentRequestResponse: + def as_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ApplyEnvironmentRequestResponse: + """Deserializes the ApplyEnvironmentRequestResponse from a dictionary.""" + return cls() + + @dataclass class AutoFullRefreshPolicy: """Policy for auto full refresh.""" @@ -117,6 +135,71 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionParameters: return cls(source_catalog=d.get("source_catalog", None)) +@dataclass +class ConnectorOptions: + """Wrapper message for source-specific options to support multiple connector types""" + + gdrive_options: Optional[GoogleDriveOptions] = None + + google_ads_options: Optional[GoogleAdsOptions] = None + + outlook_options: Optional[OutlookOptions] = None + + sharepoint_options: Optional[SharepointOptions] = None + + tiktok_ads_options: Optional[TikTokAdsOptions] = None + + def as_dict(self) -> dict: + """Serializes the ConnectorOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.gdrive_options: + body["gdrive_options"] = self.gdrive_options.as_dict() + if self.google_ads_options: + body["google_ads_options"] = self.google_ads_options.as_dict() + if self.outlook_options: + body["outlook_options"] = self.outlook_options.as_dict() + if self.sharepoint_options: + body["sharepoint_options"] = self.sharepoint_options.as_dict() + if self.tiktok_ads_options: + body["tiktok_ads_options"] = self.tiktok_ads_options.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectorOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gdrive_options: + body["gdrive_options"] = self.gdrive_options + if self.google_ads_options: + body["google_ads_options"] = self.google_ads_options + if self.outlook_options: + body["outlook_options"] = self.outlook_options + if self.sharepoint_options: + body["sharepoint_options"] = self.sharepoint_options + if self.tiktok_ads_options: + body["tiktok_ads_options"] = self.tiktok_ads_options + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConnectorOptions: + """Deserializes the ConnectorOptions from a dictionary.""" + return cls( + gdrive_options=_from_dict(d, "gdrive_options", GoogleDriveOptions), + google_ads_options=_from_dict(d, "google_ads_options", GoogleAdsOptions), + outlook_options=_from_dict(d, "outlook_options", OutlookOptions), + sharepoint_options=_from_dict(d, "sharepoint_options", SharepointOptions), + tiktok_ads_options=_from_dict(d, "tiktok_ads_options", TikTokAdsOptions), + ) + + +class ConnectorType(Enum): + """For certain database sources LakeFlow Connect offers both query based and cdc ingestion, + ConnectorType can bse used to convey the type of ingestion. If connection_name is provided for + database sources, we default to Query Based ingestion""" + + CDC = "CDC" + QUERY_BASED = "QUERY_BASED" + + @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -213,6 +296,55 @@ def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId: return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None)) +@dataclass +class DataStagingOptions: + """Location of staged data storage""" + + catalog_name: str + """(Required, Immutable) The name of the catalog for the connector's staging storage location.""" + + schema_name: str + """(Required, Immutable) The name of the schema for the connector's staging storage location.""" + + volume_name: Optional[str] = None + """(Optional) The Unity Catalog-compatible name for the storage location. This is the volume to use + for the data that is extracted by the connector. Spark Declarative Pipelines system will + automatically create the volume under the catalog and schema. For Combined Cdc Managed Ingestion + pipelines default name for the volume would be : + __databricks_ingestion_gateway_staging_data-$pipelineId""" + + def as_dict(self) -> dict: + """Serializes the DataStagingOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.volume_name is not None: + body["volume_name"] = self.volume_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataStagingOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.volume_name is not None: + body["volume_name"] = self.volume_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataStagingOptions: + """Deserializes the DataStagingOptions from a dictionary.""" + return cls( + catalog_name=d.get("catalog_name", None), + schema_name=d.get("schema_name", None), + volume_name=d.get("volume_name", None), + ) + + class DayOfWeek(Enum): """Days of week in which the window is allowed to happen. If not specified all days of the week will be used.""" @@ -351,6 +483,182 @@ def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec: return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None)) +@dataclass +class FileFilter: + modified_after: Optional[str] = None + """Include files with modification times occurring after the specified time. Timestamp format: + YYYY-MM-DDTHH:mm:ss (e.g. 2020-06-01T13:00:00) Based on + https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#modification-time-path-filters""" + + modified_before: Optional[str] = None + """Include files with modification times occurring before the specified time. Timestamp format: + YYYY-MM-DDTHH:mm:ss (e.g. 2020-06-01T13:00:00) Based on + https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#modification-time-path-filters""" + + path_filter: Optional[str] = None + """Include files with file names matching the pattern Based on + https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#path-glob-filter""" + + def as_dict(self) -> dict: + """Serializes the FileFilter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.modified_after is not None: + body["modified_after"] = self.modified_after + if self.modified_before is not None: + body["modified_before"] = self.modified_before + if self.path_filter is not None: + body["path_filter"] = self.path_filter + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FileFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.modified_after is not None: + body["modified_after"] = self.modified_after + if self.modified_before is not None: + body["modified_before"] = self.modified_before + if self.path_filter is not None: + body["path_filter"] = self.path_filter + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FileFilter: + """Deserializes the FileFilter from a dictionary.""" + return cls( + modified_after=d.get("modified_after", None), + modified_before=d.get("modified_before", None), + path_filter=d.get("path_filter", None), + ) + + +@dataclass +class FileIngestionOptions: + corrupt_record_column: Optional[str] = None + + file_filters: Optional[List[FileFilter]] = None + """Generic options""" + + format: Optional[FileIngestionOptionsFileFormat] = None + """required for TableSpec""" + + format_options: Optional[Dict[str, str]] = None + """Format-specific options Based on + https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/options#file-format-options""" + + ignore_corrupt_files: Optional[bool] = None + + infer_column_types: Optional[bool] = None + + reader_case_sensitive: Optional[bool] = None + """Column name case sensitivity + https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#change-case-sensitive-behavior""" + + rescued_data_column: Optional[str] = None + + schema_evolution_mode: Optional[FileIngestionOptionsSchemaEvolutionMode] = None + + schema_hints: Optional[str] = None + """Override inferred schema of specific columns Based on + https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#override-schema-inference-with-schema-hints""" + + single_variant_column: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the FileIngestionOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.corrupt_record_column is not None: + body["corrupt_record_column"] = self.corrupt_record_column + if self.file_filters: + body["file_filters"] = [v.as_dict() for v in self.file_filters] + if self.format is not None: + body["format"] = self.format.value + if self.format_options: + body["format_options"] = self.format_options + if self.ignore_corrupt_files is not None: + body["ignore_corrupt_files"] = self.ignore_corrupt_files + if self.infer_column_types is not None: + body["infer_column_types"] = self.infer_column_types + if self.reader_case_sensitive is not None: + body["reader_case_sensitive"] = self.reader_case_sensitive + if self.rescued_data_column is not None: + body["rescued_data_column"] = self.rescued_data_column + if self.schema_evolution_mode is not None: + body["schema_evolution_mode"] = self.schema_evolution_mode.value + if self.schema_hints is not None: + body["schema_hints"] = self.schema_hints + if self.single_variant_column is not None: + body["single_variant_column"] = self.single_variant_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FileIngestionOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.corrupt_record_column is not None: + body["corrupt_record_column"] = self.corrupt_record_column + if self.file_filters: + body["file_filters"] = self.file_filters + if self.format is not None: + body["format"] = self.format + if self.format_options: + body["format_options"] = self.format_options + if self.ignore_corrupt_files is not None: + body["ignore_corrupt_files"] = self.ignore_corrupt_files + if self.infer_column_types is not None: + body["infer_column_types"] = self.infer_column_types + if self.reader_case_sensitive is not None: + body["reader_case_sensitive"] = self.reader_case_sensitive + if self.rescued_data_column is not None: + body["rescued_data_column"] = self.rescued_data_column + if self.schema_evolution_mode is not None: + body["schema_evolution_mode"] = self.schema_evolution_mode + if self.schema_hints is not None: + body["schema_hints"] = self.schema_hints + if self.single_variant_column is not None: + body["single_variant_column"] = self.single_variant_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FileIngestionOptions: + """Deserializes the FileIngestionOptions from a dictionary.""" + return cls( + corrupt_record_column=d.get("corrupt_record_column", None), + file_filters=_repeated_dict(d, "file_filters", FileFilter), + format=_enum(d, "format", FileIngestionOptionsFileFormat), + format_options=d.get("format_options", None), + ignore_corrupt_files=d.get("ignore_corrupt_files", None), + infer_column_types=d.get("infer_column_types", None), + reader_case_sensitive=d.get("reader_case_sensitive", None), + rescued_data_column=d.get("rescued_data_column", None), + schema_evolution_mode=_enum(d, "schema_evolution_mode", FileIngestionOptionsSchemaEvolutionMode), + schema_hints=d.get("schema_hints", None), + single_variant_column=d.get("single_variant_column", None), + ) + + +class FileIngestionOptionsFileFormat(Enum): + + AVRO = "AVRO" + BINARYFILE = "BINARYFILE" + CSV = "CSV" + EXCEL = "EXCEL" + JSON = "JSON" + ORC = "ORC" + PARQUET = "PARQUET" + XML = "XML" + + +class FileIngestionOptionsSchemaEvolutionMode(Enum): + """Based on + https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#how-does-auto-loader-schema-evolution-work + """ + + ADD_NEW_COLUMNS = "ADD_NEW_COLUMNS" + ADD_NEW_COLUMNS_WITH_TYPE_WIDENING = "ADD_NEW_COLUMNS_WITH_TYPE_WIDENING" + FAIL_ON_NEW_COLUMNS = "FAIL_ON_NEW_COLUMNS" + NONE = "NONE" + RESCUE = "RESCUE" + + @dataclass class FileLibrary: path: Optional[str] = None @@ -450,6 +758,9 @@ class GetPipelineResponse: effective_publishing_mode: Optional[PublishingMode] = None """Publishing mode of the pipeline""" + effective_usage_policy_id: Optional[str] = None + """Serverless usage policy ID of the pipeline.""" + health: Optional[GetPipelineResponseHealth] = None """The health of a pipeline.""" @@ -492,6 +803,8 @@ def as_dict(self) -> dict: body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.effective_publishing_mode is not None: body["effective_publishing_mode"] = self.effective_publishing_mode.value + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health.value if self.last_modified is not None: @@ -525,6 +838,8 @@ def as_shallow_dict(self) -> dict: body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.effective_publishing_mode is not None: body["effective_publishing_mode"] = self.effective_publishing_mode + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health if self.last_modified is not None: @@ -554,6 +869,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), effective_publishing_mode=_enum(d, "effective_publishing_mode", PublishingMode), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), health=_enum(d, "health", GetPipelineResponseHealth), last_modified=d.get("last_modified", None), latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), @@ -598,6 +914,104 @@ def from_dict(cls, d: Dict[str, Any]) -> GetUpdateResponse: return cls(update=_from_dict(d, "update", UpdateInfo)) +@dataclass +class GoogleAdsOptions: + """Google Ads specific options for ingestion""" + + manager_account_id: str + """(Required) Manager Account ID (also called MCC Account ID) used to list and access customer + accounts under this manager account. This is required for fetching the list of customer accounts + during source selection.""" + + lookback_window_days: Optional[int] = None + """(Optional) Number of days to look back for report tables to capture late-arriving data. If not + specified, defaults to 30 days.""" + + sync_start_date: Optional[str] = None + """(Optional) Start date for the initial sync of report tables in YYYY-MM-DD format. This + determines the earliest date from which to sync historical data. If not specified, defaults to 2 + years of historical data.""" + + def as_dict(self) -> dict: + """Serializes the GoogleAdsOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.lookback_window_days is not None: + body["lookback_window_days"] = self.lookback_window_days + if self.manager_account_id is not None: + body["manager_account_id"] = self.manager_account_id + if self.sync_start_date is not None: + body["sync_start_date"] = self.sync_start_date + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GoogleAdsOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.lookback_window_days is not None: + body["lookback_window_days"] = self.lookback_window_days + if self.manager_account_id is not None: + body["manager_account_id"] = self.manager_account_id + if self.sync_start_date is not None: + body["sync_start_date"] = self.sync_start_date + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GoogleAdsOptions: + """Deserializes the GoogleAdsOptions from a dictionary.""" + return cls( + lookback_window_days=d.get("lookback_window_days", None), + manager_account_id=d.get("manager_account_id", None), + sync_start_date=d.get("sync_start_date", None), + ) + + +@dataclass +class GoogleDriveOptions: + entity_type: Optional[GoogleDriveOptionsGoogleDriveEntityType] = None + + file_ingestion_options: Optional[FileIngestionOptions] = None + + url: Optional[str] = None + """Required. Google Drive URL.""" + + def as_dict(self) -> dict: + """Serializes the GoogleDriveOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type.value + if self.file_ingestion_options: + body["file_ingestion_options"] = self.file_ingestion_options.as_dict() + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GoogleDriveOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.file_ingestion_options: + body["file_ingestion_options"] = self.file_ingestion_options + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GoogleDriveOptions: + """Deserializes the GoogleDriveOptions from a dictionary.""" + return cls( + entity_type=_enum(d, "entity_type", GoogleDriveOptionsGoogleDriveEntityType), + file_ingestion_options=_from_dict(d, "file_ingestion_options", FileIngestionOptions), + url=d.get("url", None), + ) + + +class GoogleDriveOptionsGoogleDriveEntityType(Enum): + + FILE = "FILE" + FILE_METADATA = "FILE_METADATA" + PERMISSION = "PERMISSION" + + @dataclass class IngestionConfig: report: Optional[ReportSpec] = None @@ -724,6 +1138,15 @@ class IngestionPipelineDefinition: ingestion_gateway_id to change the connector to Cdc Managed Ingestion Pipeline with Gateway pipeline.""" + connector_type: Optional[ConnectorType] = None + """(Optional) Connector Type for sources. Ex: CDC, Query Based.""" + + data_staging_options: Optional[DataStagingOptions] = None + """(Optional) Location of staged data storage. This is required for migration from Cdc Managed + Ingestion Pipeline with Gateway pipeline to Combined Cdc Managed Ingestion Pipeline. If not + specified, the volume for staged data will be created in catalog and schema/target specified in + the top level pipeline definition.""" + full_refresh_window: Optional[OperationTimeWindow] = None """(Optional) A window that specifies a set of time ranges for snapshot queries in CDC.""" @@ -761,6 +1184,10 @@ def as_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connector_type is not None: + body["connector_type"] = self.connector_type.value + if self.data_staging_options: + body["data_staging_options"] = self.data_staging_options.as_dict() if self.full_refresh_window: body["full_refresh_window"] = self.full_refresh_window.as_dict() if self.ingest_from_uc_foreign_catalog is not None: @@ -784,6 +1211,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connector_type is not None: + body["connector_type"] = self.connector_type + if self.data_staging_options: + body["data_staging_options"] = self.data_staging_options if self.full_refresh_window: body["full_refresh_window"] = self.full_refresh_window if self.ingest_from_uc_foreign_catalog is not None: @@ -807,6 +1238,8 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" return cls( connection_name=d.get("connection_name", None), + connector_type=_enum(d, "connector_type", ConnectorType), + data_staging_options=_from_dict(d, "data_staging_options", DataStagingOptions), full_refresh_window=_from_dict(d, "full_refresh_window", OperationTimeWindow), ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), @@ -818,6 +1251,56 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: ) +@dataclass +class IngestionPipelineDefinitionConfluenceOptions: + include_confluence_spaces: Optional[List[str]] = None + """(Optional) Spaces to filter confluence data on""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionConfluenceOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.include_confluence_spaces: + body["include_confluence_spaces"] = [v for v in self.include_confluence_spaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionConfluenceOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.include_confluence_spaces: + body["include_confluence_spaces"] = self.include_confluence_spaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionConfluenceOptions: + """Deserializes the IngestionPipelineDefinitionConfluenceOptions from a dictionary.""" + return cls(include_confluence_spaces=d.get("include_confluence_spaces", None)) + + +@dataclass +class IngestionPipelineDefinitionJiraOptions: + include_jira_spaces: Optional[List[str]] = None + """(Optional) Projects/spaces to filter jira data on""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionJiraOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.include_jira_spaces: + body["include_jira_spaces"] = [v for v in self.include_jira_spaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionJiraOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.include_jira_spaces: + body["include_jira_spaces"] = self.include_jira_spaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionJiraOptions: + """Deserializes the IngestionPipelineDefinitionJiraOptions from a dictionary.""" + return cls(include_jira_spaces=d.get("include_jira_spaces", None)) + + @dataclass class IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig: """Configurations that are only applicable for query-based ingestion connectors.""" @@ -964,21 +1447,54 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayRepor class IngestionSourceType(Enum): + ADOBE_CAMPAIGNS = "ADOBE_CAMPAIGNS" + AKAMAI_WAF = "AKAMAI_WAF" BIGQUERY = "BIGQUERY" + BING_ADS = "BING_ADS" + CONFLUENCE = "CONFLUENCE" + CROWDSTRIKE_EVENT_STREAM = "CROWDSTRIKE_EVENT_STREAM" DYNAMICS365 = "DYNAMICS365" FOREIGN_CATALOG = "FOREIGN_CATALOG" GA4_RAW_DATA = "GA4_RAW_DATA" + GITHUB = "GITHUB" + GOOGLE_ADS = "GOOGLE_ADS" + GOOGLE_SEARCH_CONSOLE = "GOOGLE_SEARCH_CONSOLE" + GUIDEWIRE = "GUIDEWIRE" + HUBSPOT = "HUBSPOT" + LINKEDIN_ADS = "LINKEDIN_ADS" + M365_AUDIT_LOGS = "M365_AUDIT_LOGS" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" + META_MARKETING = "META_MARKETING" + MICROSOFT_TEAMS = "MICROSOFT_TEAMS" MYSQL = "MYSQL" NETSUITE = "NETSUITE" + OKTA_SYSTEM_LOGS = "OKTA_SYSTEM_LOGS" + ONE_PASSWORD_EVENT_LOGS = "ONE_PASSWORD_EVENT_LOGS" ORACLE = "ORACLE" + OUTLOOK = "OUTLOOK" + PINTEREST_ADS = "PINTEREST_ADS" POSTGRESQL = "POSTGRESQL" + PROOFPOINT_SIEM = "PROOFPOINT_SIEM" + REDDIT_ADS = "REDDIT_ADS" + REDSHIFT = "REDSHIFT" SALESFORCE = "SALESFORCE" + SALESFORCE_MARKETING_CLOUD = "SALESFORCE_MARKETING_CLOUD" SERVICENOW = "SERVICENOW" SHAREPOINT = "SHAREPOINT" + SLACK_AUDIT_LOGS = "SLACK_AUDIT_LOGS" + SMARTSHEET = "SMARTSHEET" + SQLDW = "SQLDW" SQLSERVER = "SQLSERVER" TERADATA = "TERADATA" + TIKTOK_ADS = "TIKTOK_ADS" + VEEVA = "VEEVA" + VEEVA_VAULT = "VEEVA_VAULT" + WIZ_AUDIT_LOGS = "WIZ_AUDIT_LOGS" + WORKDAY_ACTIVITY_LOGGING = "WORKDAY_ACTIVITY_LOGGING" + WORKDAY_HCM = "WORKDAY_HCM" WORKDAY_RAAS = "WORKDAY_RAAS" + X_ADS = "X_ADS" + ZENDESK = "ZENDESK" @dataclass @@ -1258,9 +1774,31 @@ class Origin: flow_name: Optional[str] = None """The name of the flow. Not unique.""" + graph_id: Optional[str] = None + """The UUID of the graph associated with this event, corresponding to a GRAPH_UPDATED event.""" + host: Optional[str] = None """The optional host name where the event was triggered""" + ingestion_source_catalog_name: Optional[str] = None + """The name of the source catalog name (if known) from whose data ingestion is described by this + event.""" + + ingestion_source_connection_name: Optional[str] = None + """The name of the source UC connection (if known) from whose data ingestion is described by this + event.""" + + ingestion_source_schema_name: Optional[str] = None + """The name of the source schema name (if known) from whose data ingestion is described by this + event.""" + + ingestion_source_table_name: Optional[str] = None + """The name of the source table name (if known) from whose data ingestion is described by this + event.""" + + ingestion_source_table_version: Optional[str] = None + """An optional implementation-defined source table version of a dataset being (re)ingested.""" + maintenance_id: Optional[str] = None """The id of a maintenance run. Globally unique.""" @@ -1306,8 +1844,20 @@ def as_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host + if self.ingestion_source_catalog_name is not None: + body["ingestion_source_catalog_name"] = self.ingestion_source_catalog_name + if self.ingestion_source_connection_name is not None: + body["ingestion_source_connection_name"] = self.ingestion_source_connection_name + if self.ingestion_source_schema_name is not None: + body["ingestion_source_schema_name"] = self.ingestion_source_schema_name + if self.ingestion_source_table_name is not None: + body["ingestion_source_table_name"] = self.ingestion_source_table_name + if self.ingestion_source_table_version is not None: + body["ingestion_source_table_version"] = self.ingestion_source_table_version if self.maintenance_id is not None: body["maintenance_id"] = self.maintenance_id if self.materialization_name is not None: @@ -1345,8 +1895,20 @@ def as_shallow_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host + if self.ingestion_source_catalog_name is not None: + body["ingestion_source_catalog_name"] = self.ingestion_source_catalog_name + if self.ingestion_source_connection_name is not None: + body["ingestion_source_connection_name"] = self.ingestion_source_connection_name + if self.ingestion_source_schema_name is not None: + body["ingestion_source_schema_name"] = self.ingestion_source_schema_name + if self.ingestion_source_table_name is not None: + body["ingestion_source_table_name"] = self.ingestion_source_table_name + if self.ingestion_source_table_version is not None: + body["ingestion_source_table_version"] = self.ingestion_source_table_version if self.maintenance_id is not None: body["maintenance_id"] = self.maintenance_id if self.materialization_name is not None: @@ -1379,7 +1941,13 @@ def from_dict(cls, d: Dict[str, Any]) -> Origin: dataset_name=d.get("dataset_name", None), flow_id=d.get("flow_id", None), flow_name=d.get("flow_name", None), + graph_id=d.get("graph_id", None), host=d.get("host", None), + ingestion_source_catalog_name=d.get("ingestion_source_catalog_name", None), + ingestion_source_connection_name=d.get("ingestion_source_connection_name", None), + ingestion_source_schema_name=d.get("ingestion_source_schema_name", None), + ingestion_source_table_name=d.get("ingestion_source_table_name", None), + ingestion_source_table_version=d.get("ingestion_source_table_version", None), maintenance_id=d.get("maintenance_id", None), materialization_name=d.get("materialization_name", None), org_id=d.get("org_id", None), @@ -1393,6 +1961,66 @@ def from_dict(cls, d: Dict[str, Any]) -> Origin: ) +@dataclass +class OutlookOptions: + """Outlook specific options for ingestion""" + + folder_filter: Optional[List[str]] = None + """(Optional) Filter mail folders to include in the sync. If not specified, all folders will be + synced. Examples: Inbox, Sent Items, Custom_Folder""" + + sender_filter: Optional[List[str]] = None + """(Optional) Filter emails by sender address. Uses contains matching (substring). Wildcards are + not supported. Examples: vendor.com, alerts@system.io, noreply@ If not specified, emails from + all senders will be synced.""" + + start_date: Optional[str] = None + """(Optional) Start date for the initial sync in ISO 8601 format. Format: YYYY-MM-DDTHH:MM:SSZ + (e.g., 2024-01-01T00:00:00Z) This determines the earliest date from which to sync historical + data. If not specified, complete history is ingested.""" + + subject_filter: Optional[List[str]] = None + """(Optional) Filter emails by subject line. Uses contains matching (substring). Wildcards are not + supported. Examples: Support Ticket, Invoice, URGENT If not specified, emails with all subjects + will be synced.""" + + def as_dict(self) -> dict: + """Serializes the OutlookOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.folder_filter: + body["folder_filter"] = [v for v in self.folder_filter] + if self.sender_filter: + body["sender_filter"] = [v for v in self.sender_filter] + if self.start_date is not None: + body["start_date"] = self.start_date + if self.subject_filter: + body["subject_filter"] = [v for v in self.subject_filter] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OutlookOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.folder_filter: + body["folder_filter"] = self.folder_filter + if self.sender_filter: + body["sender_filter"] = self.sender_filter + if self.start_date is not None: + body["start_date"] = self.start_date + if self.subject_filter: + body["subject_filter"] = self.subject_filter + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OutlookOptions: + """Deserializes the OutlookOptions from a dictionary.""" + return cls( + folder_filter=d.get("folder_filter", None), + sender_filter=d.get("sender_filter", None), + start_date=d.get("start_date", None), + subject_filter=d.get("subject_filter", None), + ) + + @dataclass class PathPattern: include: Optional[str] = None @@ -2768,6 +3396,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: ) +@dataclass +class RestorePipelineRequestResponse: + def as_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RestorePipelineRequestResponse: + """Deserializes the RestorePipelineRequestResponse from a dictionary.""" + return cls() + + @dataclass class RewindDatasetSpec: """Configuration for rewinding a specific dataset.""" @@ -2912,6 +3558,13 @@ class SchemaSpec: are created in this destination schema. The pipeline fails If a table with the same name already exists.""" + confluence_options: Optional[IngestionPipelineDefinitionConfluenceOptions] = None + + connector_options: Optional[ConnectorOptions] = None + """(Optional) Source Specific Connector Options""" + + jira_options: Optional[IngestionPipelineDefinitionJiraOptions] = None + source_catalog: Optional[str] = None """The source catalog name. Might be optional depending on the type of source.""" @@ -2923,10 +3576,16 @@ class SchemaSpec: def as_dict(self) -> dict: """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.confluence_options: + body["confluence_options"] = self.confluence_options.as_dict() + if self.connector_options: + body["connector_options"] = self.connector_options.as_dict() if self.destination_catalog is not None: body["destination_catalog"] = self.destination_catalog if self.destination_schema is not None: body["destination_schema"] = self.destination_schema + if self.jira_options: + body["jira_options"] = self.jira_options.as_dict() if self.source_catalog is not None: body["source_catalog"] = self.source_catalog if self.source_schema is not None: @@ -2938,10 +3597,16 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.confluence_options: + body["confluence_options"] = self.confluence_options + if self.connector_options: + body["connector_options"] = self.connector_options if self.destination_catalog is not None: body["destination_catalog"] = self.destination_catalog if self.destination_schema is not None: body["destination_schema"] = self.destination_schema + if self.jira_options: + body["jira_options"] = self.jira_options if self.source_catalog is not None: body["source_catalog"] = self.source_catalog if self.source_schema is not None: @@ -2954,8 +3619,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SchemaSpec: """Deserializes the SchemaSpec from a dictionary.""" return cls( + confluence_options=_from_dict(d, "confluence_options", IngestionPipelineDefinitionConfluenceOptions), + connector_options=_from_dict(d, "connector_options", ConnectorOptions), destination_catalog=d.get("destination_catalog", None), destination_schema=d.get("destination_schema", None), + jira_options=_from_dict(d, "jira_options", IngestionPipelineDefinitionJiraOptions), source_catalog=d.get("source_catalog", None), source_schema=d.get("source_schema", None), table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), @@ -3040,6 +3708,57 @@ def from_dict(cls, d: Dict[str, Any]) -> SerializedException: ) +@dataclass +class SharepointOptions: + entity_type: Optional[SharepointOptionsSharepointEntityType] = None + """(Optional) The type of SharePoint entity to ingest. If not specified, defaults to FILE.""" + + file_ingestion_options: Optional[FileIngestionOptions] = None + """(Optional) File ingestion options for processing files.""" + + url: Optional[str] = None + """Required. The SharePoint URL.""" + + def as_dict(self) -> dict: + """Serializes the SharepointOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type.value + if self.file_ingestion_options: + body["file_ingestion_options"] = self.file_ingestion_options.as_dict() + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SharepointOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.file_ingestion_options: + body["file_ingestion_options"] = self.file_ingestion_options + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SharepointOptions: + """Deserializes the SharepointOptions from a dictionary.""" + return cls( + entity_type=_enum(d, "entity_type", SharepointOptionsSharepointEntityType), + file_ingestion_options=_from_dict(d, "file_ingestion_options", FileIngestionOptions), + url=d.get("url", None), + ) + + +class SharepointOptionsSharepointEntityType(Enum): + + FILE = "FILE" + FILE_METADATA = "FILE_METADATA" + LIST = "LIST" + PERMISSION = "PERMISSION" + + @dataclass class SourceCatalogConfig: """SourceCatalogConfig contains catalog-level custom configuration parameters for each source""" @@ -3217,10 +3936,17 @@ class TableSpec: destination_schema: str """Required. Destination schema to store table.""" + confluence_options: Optional[IngestionPipelineDefinitionConfluenceOptions] = None + + connector_options: Optional[ConnectorOptions] = None + """(Optional) Source Specific Connector Options""" + destination_table: Optional[str] = None """Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.""" + jira_options: Optional[IngestionPipelineDefinitionJiraOptions] = None + source_catalog: Optional[str] = None """Source catalog name. Might be optional depending on the type of source.""" @@ -3234,12 +3960,18 @@ class TableSpec: def as_dict(self) -> dict: """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.confluence_options: + body["confluence_options"] = self.confluence_options.as_dict() + if self.connector_options: + body["connector_options"] = self.connector_options.as_dict() if self.destination_catalog is not None: body["destination_catalog"] = self.destination_catalog if self.destination_schema is not None: body["destination_schema"] = self.destination_schema if self.destination_table is not None: body["destination_table"] = self.destination_table + if self.jira_options: + body["jira_options"] = self.jira_options.as_dict() if self.source_catalog is not None: body["source_catalog"] = self.source_catalog if self.source_schema is not None: @@ -3253,12 +3985,18 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TableSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.confluence_options: + body["confluence_options"] = self.confluence_options + if self.connector_options: + body["connector_options"] = self.connector_options if self.destination_catalog is not None: body["destination_catalog"] = self.destination_catalog if self.destination_schema is not None: body["destination_schema"] = self.destination_schema if self.destination_table is not None: body["destination_table"] = self.destination_table + if self.jira_options: + body["jira_options"] = self.jira_options if self.source_catalog is not None: body["source_catalog"] = self.source_catalog if self.source_schema is not None: @@ -3273,9 +4011,12 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TableSpec: """Deserializes the TableSpec from a dictionary.""" return cls( + confluence_options=_from_dict(d, "confluence_options", IngestionPipelineDefinitionConfluenceOptions), + connector_options=_from_dict(d, "connector_options", ConnectorOptions), destination_catalog=d.get("destination_catalog", None), destination_schema=d.get("destination_schema", None), destination_table=d.get("destination_table", None), + jira_options=_from_dict(d, "jira_options", IngestionPipelineDefinitionJiraOptions), source_catalog=d.get("source_catalog", None), source_schema=d.get("source_schema", None), source_table=d.get("source_table", None), @@ -3292,6 +4033,20 @@ class TableSpecificConfig: "auto_full_refresh_policy": { "enabled": true, "min_interval_hours": 23, } } If unspecified, auto full refresh is disabled.""" + clustering_columns: Optional[List[str]] = None + """List of column names to use for clustering the destination table. When specified, the + destination Delta table will be clustered by these columns. This can improve query performance + when filtering on these columns. Note: clustering_columns in table specific configuration will + override the pipeline definition. Note: we can only provide enable_auto_clustering or + clustering_columns, added as separate fields as we cannot have repeated field in oneof.""" + + enable_auto_clustering: Optional[bool] = None + """Whether to enable auto clustering on the destination table. When enabled, Delta will + automatically optimize the data layout based on the clustering columns for improved query + performance. Note: enable_auto_clustering in table specific configuration will override the + pipeline definition. Note: we can only provide enable_auto_clustering or clustering_columns, + added as separate fields as we cannot have repeated field in oneof.""" + exclude_columns: Optional[List[str]] = None """A list of column names to be excluded for the ingestion. When not specified, include_columns fully controls what columns to be ingested. When specified, all other columns including future @@ -3326,6 +4081,13 @@ class TableSpecificConfig: """The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.""" + table_properties: Optional[Dict[str, str]] = None + """Table properties to set on the destination table. These are key-value pairs that configure + various Delta table behaviors or any user defined properties. Example: + {"delta.feature.variantType": "supported", "delta.enableTypeWidening": "true"} Note: + table_properties in table specific configuration will override the table_properties of the + pipeline definition.""" + workday_report_parameters: Optional[IngestionPipelineDefinitionWorkdayReportParameters] = None """(Optional) Additional custom parameters for Workday Report""" @@ -3334,6 +4096,10 @@ def as_dict(self) -> dict: body = {} if self.auto_full_refresh_policy: body["auto_full_refresh_policy"] = self.auto_full_refresh_policy.as_dict() + if self.clustering_columns: + body["clustering_columns"] = [v for v in self.clustering_columns] + if self.enable_auto_clustering is not None: + body["enable_auto_clustering"] = self.enable_auto_clustering if self.exclude_columns: body["exclude_columns"] = [v for v in self.exclude_columns] if self.include_columns: @@ -3350,6 +4116,8 @@ def as_dict(self) -> dict: body["scd_type"] = self.scd_type.value if self.sequence_by: body["sequence_by"] = [v for v in self.sequence_by] + if self.table_properties: + body["table_properties"] = self.table_properties if self.workday_report_parameters: body["workday_report_parameters"] = self.workday_report_parameters.as_dict() return body @@ -3359,6 +4127,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.auto_full_refresh_policy: body["auto_full_refresh_policy"] = self.auto_full_refresh_policy + if self.clustering_columns: + body["clustering_columns"] = self.clustering_columns + if self.enable_auto_clustering is not None: + body["enable_auto_clustering"] = self.enable_auto_clustering if self.exclude_columns: body["exclude_columns"] = self.exclude_columns if self.include_columns: @@ -3375,6 +4147,8 @@ def as_shallow_dict(self) -> dict: body["scd_type"] = self.scd_type if self.sequence_by: body["sequence_by"] = self.sequence_by + if self.table_properties: + body["table_properties"] = self.table_properties if self.workday_report_parameters: body["workday_report_parameters"] = self.workday_report_parameters return body @@ -3384,6 +4158,8 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: """Deserializes the TableSpecificConfig from a dictionary.""" return cls( auto_full_refresh_policy=_from_dict(d, "auto_full_refresh_policy", AutoFullRefreshPolicy), + clustering_columns=d.get("clustering_columns", None), + enable_auto_clustering=d.get("enable_auto_clustering", None), exclude_columns=d.get("exclude_columns", None), include_columns=d.get("include_columns", None), primary_keys=d.get("primary_keys", None), @@ -3396,6 +4172,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), + table_properties=d.get("table_properties", None), workday_report_parameters=_from_dict( d, "workday_report_parameters", IngestionPipelineDefinitionWorkdayReportParameters ), @@ -3410,6 +4187,110 @@ class TableSpecificConfigScdType(Enum): SCD_TYPE_2 = "SCD_TYPE_2" +@dataclass +class TikTokAdsOptions: + """TikTok Ads specific options for ingestion""" + + data_level: Optional[TikTokAdsOptionsTikTokDataLevel] = None + """(Optional) Data level for the report. If not specified, defaults to AUCTION_CAMPAIGN.""" + + dimensions: Optional[List[str]] = None + """(Optional) Dimensions to include in the report. Examples: "campaign_id", "adgroup_id", "ad_id", + "stat_time_day", "stat_time_hour" If not specified, defaults to campaign_id.""" + + lookback_window_days: Optional[int] = None + """(Optional) Number of days to look back for report tables during incremental sync to capture + late-arriving conversions and attribution data. If not specified, defaults to 7 days.""" + + metrics: Optional[List[str]] = None + """(Optional) Metrics to include in the report. Examples: "spend", "impressions", "clicks", + "conversion", "cpc" If not specified, defaults to basic metrics (spend, impressions, clicks, + etc.)""" + + query_lifetime: Optional[bool] = None + """(Optional) Whether to request lifetime metrics (all-time aggregated data). When true, the report + returns all-time data. If not specified, defaults to false.""" + + report_type: Optional[TikTokAdsOptionsTikTokReportType] = None + """(Optional) Report type for the TikTok Ads API. If not specified, defaults to BASIC.""" + + sync_start_date: Optional[str] = None + """(Optional) Start date for the initial sync of report tables in YYYY-MM-DD format. This + determines the earliest date from which to sync historical data. If not specified, defaults to 1 + year of historical data for daily reports and 30 days for hourly reports.""" + + def as_dict(self) -> dict: + """Serializes the TikTokAdsOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_level is not None: + body["data_level"] = self.data_level.value + if self.dimensions: + body["dimensions"] = [v for v in self.dimensions] + if self.lookback_window_days is not None: + body["lookback_window_days"] = self.lookback_window_days + if self.metrics: + body["metrics"] = [v for v in self.metrics] + if self.query_lifetime is not None: + body["query_lifetime"] = self.query_lifetime + if self.report_type is not None: + body["report_type"] = self.report_type.value + if self.sync_start_date is not None: + body["sync_start_date"] = self.sync_start_date + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TikTokAdsOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_level is not None: + body["data_level"] = self.data_level + if self.dimensions: + body["dimensions"] = self.dimensions + if self.lookback_window_days is not None: + body["lookback_window_days"] = self.lookback_window_days + if self.metrics: + body["metrics"] = self.metrics + if self.query_lifetime is not None: + body["query_lifetime"] = self.query_lifetime + if self.report_type is not None: + body["report_type"] = self.report_type + if self.sync_start_date is not None: + body["sync_start_date"] = self.sync_start_date + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TikTokAdsOptions: + """Deserializes the TikTokAdsOptions from a dictionary.""" + return cls( + data_level=_enum(d, "data_level", TikTokAdsOptionsTikTokDataLevel), + dimensions=d.get("dimensions", None), + lookback_window_days=d.get("lookback_window_days", None), + metrics=d.get("metrics", None), + query_lifetime=d.get("query_lifetime", None), + report_type=_enum(d, "report_type", TikTokAdsOptionsTikTokReportType), + sync_start_date=d.get("sync_start_date", None), + ) + + +class TikTokAdsOptionsTikTokDataLevel(Enum): + """Data level for TikTok Ads report aggregation.""" + + AUCTION_AD = "AUCTION_AD" + AUCTION_ADGROUP = "AUCTION_ADGROUP" + AUCTION_ADVERTISER = "AUCTION_ADVERTISER" + AUCTION_CAMPAIGN = "AUCTION_CAMPAIGN" + + +class TikTokAdsOptionsTikTokReportType(Enum): + """Report type for TikTok Ads API.""" + + AUDIENCE = "AUDIENCE" + BASIC = "BASIC" + BUSINESS_CENTER = "BUSINESS_CENTER" + DSA = "DSA" + GMV_MAX = "GMV_MAX" + PLAYABLE_AD = "PLAYABLE_AD" + + @dataclass class Truncation: """Information about truncations applied to this event.""" @@ -3487,6 +4368,10 @@ class UpdateInfo: full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" + mode: Optional[UpdateMode] = None + """Indicates whether the update is either part of a continuous job run, or running in legacy + continuous pipeline mode.""" + parameters: Optional[Dict[str, str]] = None """Key/value map of parameters used to initiate the update""" @@ -3523,6 +4408,8 @@ def as_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = [v for v in self.full_refresh_selection] + if self.mode is not None: + body["mode"] = self.mode.value if self.parameters: body["parameters"] = self.parameters if self.pipeline_id is not None: @@ -3552,6 +4439,8 @@ def as_shallow_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = self.full_refresh_selection + if self.mode is not None: + body["mode"] = self.mode if self.parameters: body["parameters"] = self.parameters if self.pipeline_id is not None: @@ -3576,6 +4465,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateInfo: creation_time=d.get("creation_time", None), full_refresh=d.get("full_refresh", None), full_refresh_selection=d.get("full_refresh_selection", None), + mode=_enum(d, "mode", UpdateMode), parameters=d.get("parameters", None), pipeline_id=d.get("pipeline_id", None), refresh_selection=d.get("refresh_selection", None), @@ -3613,6 +4503,12 @@ class UpdateInfoState(Enum): WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" +class UpdateMode(Enum): + + CONTINUOUS = "CONTINUOUS" + DEFAULT = "DEFAULT" + + @dataclass class UpdateStateInfo: creation_time: Optional[str] = None @@ -3717,6 +4613,26 @@ def wait_get_pipeline_idle( attempt += 1 raise TimeoutError(f"timed out after {timeout}: {status_message}") + def apply_environment(self, pipeline_id: str) -> ApplyEnvironmentRequestResponse: + """* Applies the current pipeline environment onto the pipeline compute. The environment applied can be + used by subsequent dev-mode updates. + + :param pipeline_id: str + + :returns: :class:`ApplyEnvironmentRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/environment/apply", headers=headers) + return ApplyEnvironmentRequestResponse.from_dict(res) + def clone( self, pipeline_id: str, @@ -4094,11 +5010,23 @@ def create( res = self._api.do("POST", "/api/2.0/pipelines", body=body, headers=headers) return CreatePipelineResponse.from_dict(res) - def delete(self, pipeline_id: str, *, force: Optional[bool] = None): + def delete( + self, + pipeline_id: str, + *, + cascade: Optional[bool] = None, + delete_datasets: Optional[bool] = None, + force: Optional[bool] = None, + ): """Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade to all pipeline tables. Please reach out to Databricks support for assistance to undo this action. :param pipeline_id: str + :param cascade: bool (optional) + If false, pipeline deletion will not cascade to its datasets (MVs, STs, Views). By default, this + parameter will be true and all tables will be deleted with the pipeline. + :param delete_datasets: bool (optional) + Deprecated: Use `cascade` instead. :param force: bool (optional) If true, deletion will proceed even if resource cleanup fails. By default, deletion will fail if resources cleanup is required but fails. @@ -4107,6 +5035,10 @@ def delete(self, pipeline_id: str, *, force: Optional[bool] = None): """ query = {} + if cascade is not None: + query["cascade"] = cascade + if delete_datasets is not None: + query["delete_datasets"] = delete_datasets if force is not None: query["force"] = force headers = { @@ -4359,6 +5291,27 @@ def list_updates( res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) return ListUpdatesResponse.from_dict(res) + def restore_pipeline(self, pipeline_id: str) -> RestorePipelineRequestResponse: + """* Restores a pipeline that was previously deleted, if within the restoration window. All tables + deleted at pipeline deletion will be undropped as well. + + :param pipeline_id: str + The ID of the pipeline to restore + + :returns: :class:`RestorePipelineRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/restore", headers=headers) + return RestorePipelineRequestResponse.from_dict(res) + def set_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: diff --git a/databricks/sdk/service/postgres.py b/databricks/sdk/service/postgres.py index d9c360ea2..4bf3a7e41 100755 --- a/databricks/sdk/service/postgres.py +++ b/databricks/sdk/service/postgres.py @@ -202,6 +202,14 @@ def from_dict(cls, d: Dict[str, Any]) -> BranchSpec: @dataclass class BranchStatus: + branch_id: Optional[str] = None + """The short identifier of the branch, suitable for showing to the users. For a branch with name + `projects/my-project/branches/my-branch`, the branch_id is `my-branch`. + + Use this field when building UI components that display branches to users (e.g., a drop-down + selector). Prefer showing `branch_id` instead of the full resource name from `Branch.name`, + which follows the `projects/{project_id}/branches/{branch_id}` format and is not user-friendly.""" + current_state: Optional[BranchStatusState] = None """The branch's state, indicating if it is initializing, ready for use, or archived.""" @@ -236,6 +244,8 @@ class BranchStatus: def as_dict(self) -> dict: """Serializes the BranchStatus into a dictionary suitable for use as a JSON request body.""" body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id if self.current_state is not None: body["current_state"] = self.current_state.value if self.default is not None: @@ -261,6 +271,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the BranchStatus into a shallow dictionary of its immediate attributes.""" body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id if self.current_state is not None: body["current_state"] = self.current_state if self.default is not None: @@ -287,6 +299,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> BranchStatus: """Deserializes the BranchStatus from a dictionary.""" return cls( + branch_id=d.get("branch_id", None), current_state=_enum(d, "current_state", BranchStatusState), default=d.get("default", None), expire_time=_timestamp(d, "expire_time"), @@ -310,6 +323,173 @@ class BranchStatusState(Enum): RESETTING = "RESETTING" +@dataclass +class Catalog: + name: str + """The name of the catalog in UC.""" + + database: str + """The project and branch scoped database associated with this catalog. Must be in the format: + projects/{project_id}/branches/{branch_id}/databases/{database_id} where database_name is the + name of the logical database in Postgres.""" + + branch: Optional[str] = None + """The resource path of the branch associated with the catalog. Format: + projects/{project_id}/branches/{branch_id}.""" + + create_database_if_not_exists: Optional[bool] = None + + project: Optional[str] = None + """The resource path of the project associated with the catalog. Format: projects/{project_id}.""" + + uid: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the Catalog into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.create_database_if_not_exists is not None: + body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.uid is not None: + body["uid"] = self.uid + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Catalog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.create_database_if_not_exists is not None: + body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.uid is not None: + body["uid"] = self.uid + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Catalog: + """Deserializes the Catalog from a dictionary.""" + return cls( + branch=d.get("branch", None), + create_database_if_not_exists=d.get("create_database_if_not_exists", None), + database=d.get("database", None), + name=d.get("name", None), + project=d.get("project", None), + uid=d.get("uid", None), + ) + + +@dataclass +class ComputeInstance: + name: str + """The fully qualified name for this compute instance. Format: + projects/*/branches/*/endpoints/*/compute-instances/*""" + + compute_instance_id: str + """The unique ID for this compute.""" + + compute_host: Optional[str] = None + """A host scoped directly to the enclosing compute. This host is guaranteed to resolve to the + specific compute instance.""" + + current_state: Optional[ComputeInstanceComputeState] = None + """The current state of the compute.""" + + pending_state: Optional[ComputeInstanceComputeState] = None + """The desired pending state of the compute, if a state transition is in progress.""" + + role: Optional[ComputeInstanceComputeType] = None + """The role of this compute within the endpoint.""" + + start_time: Optional[Timestamp] = None + """A timestamp indicating when the compute was last started.""" + + suspend_time: Optional[Timestamp] = None + """A timestamp indicating when the compute was last suspended.""" + + def as_dict(self) -> dict: + """Serializes the ComputeInstance into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compute_host is not None: + body["compute_host"] = self.compute_host + if self.compute_instance_id is not None: + body["compute_instance_id"] = self.compute_instance_id + if self.current_state is not None: + body["current_state"] = self.current_state.value + if self.name is not None: + body["name"] = self.name + if self.pending_state is not None: + body["pending_state"] = self.pending_state.value + if self.role is not None: + body["role"] = self.role.value + if self.start_time is not None: + body["start_time"] = self.start_time.ToJsonString() + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time.ToJsonString() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ComputeInstance into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compute_host is not None: + body["compute_host"] = self.compute_host + if self.compute_instance_id is not None: + body["compute_instance_id"] = self.compute_instance_id + if self.current_state is not None: + body["current_state"] = self.current_state + if self.name is not None: + body["name"] = self.name + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.role is not None: + body["role"] = self.role + if self.start_time is not None: + body["start_time"] = self.start_time + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ComputeInstance: + """Deserializes the ComputeInstance from a dictionary.""" + return cls( + compute_host=d.get("compute_host", None), + compute_instance_id=d.get("compute_instance_id", None), + current_state=_enum(d, "current_state", ComputeInstanceComputeState), + name=d.get("name", None), + pending_state=_enum(d, "pending_state", ComputeInstanceComputeState), + role=_enum(d, "role", ComputeInstanceComputeType), + start_time=_timestamp(d, "start_time"), + suspend_time=_timestamp(d, "suspend_time"), + ) + + +class ComputeInstanceComputeState(Enum): + + ACTIVE = "ACTIVE" + IDLE = "IDLE" + INIT = "INIT" + + +class ComputeInstanceComputeType(Enum): + + HOT_STANDBY = "HOT_STANDBY" + READ_ONLY = "READ_ONLY" + READ_WRITE = "READ_WRITE" + + @dataclass class Database: """Database represents a Postgres database within a Branch.""" @@ -555,6 +735,71 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksServiceExceptionWithDetailsPr ) +@dataclass +class DeltaTableSyncInfo: + """Copied from database_table_statuses.proto to decouple SDK packages.""" + + delta_commit_timestamp: Optional[Timestamp] = None + """The timestamp when the above Delta version was committed in the source Delta table. Note: This + is the Delta commit time, not the time the data was written to the synced table.""" + + delta_commit_version: Optional[int] = None + """The Delta Lake commit version that was last successfully synced.""" + + def as_dict(self) -> dict: + """Serializes the DeltaTableSyncInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.delta_commit_timestamp is not None: + body["delta_commit_timestamp"] = self.delta_commit_timestamp.ToJsonString() + if self.delta_commit_version is not None: + body["delta_commit_version"] = self.delta_commit_version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeltaTableSyncInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_commit_timestamp is not None: + body["delta_commit_timestamp"] = self.delta_commit_timestamp + if self.delta_commit_version is not None: + body["delta_commit_version"] = self.delta_commit_version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: + """Deserializes the DeltaTableSyncInfo from a dictionary.""" + return cls( + delta_commit_timestamp=_timestamp(d, "delta_commit_timestamp"), + delta_commit_version=d.get("delta_commit_version", None), + ) + + +@dataclass +class DisableForwardEtlResponse: + """Response to disable Forward ETL""" + + disabled: Optional[bool] = None + """Whether Forward ETL was successfully disabled.""" + + def as_dict(self) -> dict: + """Serializes the DisableForwardEtlResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.disabled is not None: + body["disabled"] = self.disabled + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DisableForwardEtlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disabled is not None: + body["disabled"] = self.disabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DisableForwardEtlResponse: + """Deserializes the DisableForwardEtlResponse from a dictionary.""" + return cls(disabled=d.get("disabled", None)) + + @dataclass class Endpoint: create_time: Optional[Timestamp] = None @@ -742,6 +987,15 @@ class EndpointHosts: attribute is defined if the enclosing endpoint is a group with greater than 1 computes configured, and has readable secondaries enabled.""" + read_only_pooled_host: Optional[str] = None + """The read-only hostname of the compute endpoint, with pooling. This attribute is always defined + for read-only endpoints, and may be defined for read-write endpoints if configured with read + replicas and allow read-only connections.""" + + read_write_pooled_host: Optional[str] = None + """The read-write hostname of the compute endpoint, with pooling. This attribute is only defined + for read-write endpoints.""" + def as_dict(self) -> dict: """Serializes the EndpointHosts into a dictionary suitable for use as a JSON request body.""" body = {} @@ -749,6 +1003,10 @@ def as_dict(self) -> dict: body["host"] = self.host if self.read_only_host is not None: body["read_only_host"] = self.read_only_host + if self.read_only_pooled_host is not None: + body["read_only_pooled_host"] = self.read_only_pooled_host + if self.read_write_pooled_host is not None: + body["read_write_pooled_host"] = self.read_write_pooled_host return body def as_shallow_dict(self) -> dict: @@ -758,12 +1016,21 @@ def as_shallow_dict(self) -> dict: body["host"] = self.host if self.read_only_host is not None: body["read_only_host"] = self.read_only_host + if self.read_only_pooled_host is not None: + body["read_only_pooled_host"] = self.read_only_pooled_host + if self.read_write_pooled_host is not None: + body["read_write_pooled_host"] = self.read_write_pooled_host return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointHosts: """Deserializes the EndpointHosts from a dictionary.""" - return cls(host=d.get("host", None), read_only_host=d.get("read_only_host", None)) + return cls( + host=d.get("host", None), + read_only_host=d.get("read_only_host", None), + read_only_pooled_host=d.get("read_only_pooled_host", None), + read_write_pooled_host=d.get("read_write_pooled_host", None), + ) @dataclass @@ -923,6 +1190,9 @@ class EndpointStatus: hosts: Optional[EndpointHosts] = None """Contains host information for connecting to the endpoint.""" + last_active_time: Optional[Timestamp] = None + """A timestamp indicating when the compute endpoint was last active.""" + pending_state: Optional[EndpointStatusState] = None settings: Optional[EndpointSettings] = None @@ -947,6 +1217,8 @@ def as_dict(self) -> dict: body["group"] = self.group.as_dict() if self.hosts: body["hosts"] = self.hosts.as_dict() + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time.ToJsonString() if self.pending_state is not None: body["pending_state"] = self.pending_state.value if self.settings: @@ -972,6 +1244,8 @@ def as_shallow_dict(self) -> dict: body["group"] = self.group if self.hosts: body["hosts"] = self.hosts + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time if self.pending_state is not None: body["pending_state"] = self.pending_state if self.settings: @@ -991,6 +1265,7 @@ def from_dict(cls, d: Dict[str, Any]) -> EndpointStatus: endpoint_type=_enum(d, "endpoint_type", EndpointType), group=_from_dict(d, "group", EndpointGroupStatus), hosts=_from_dict(d, "hosts", EndpointHosts), + last_active_time=_timestamp(d, "last_active_time"), pending_state=_enum(d, "pending_state", EndpointStatusState), settings=_from_dict(d, "settings", EndpointSettings), suspend_timeout_duration=_duration(d, "suspend_timeout_duration"), @@ -1100,189 +1375,582 @@ class ErrorCode(Enum): @dataclass -class InitialEndpointSpec: - group: Optional[EndpointGroupSpec] = None - """Settings for HA configuration of the endpoint""" +class ForwardEtlConfig: + """Forward ETL configuration""" + + create_time_millis: Optional[int] = None + """Configuration creation timestamp in milliseconds since epoch.""" + + enabled: Optional[bool] = None + """Whether Forward ETL is enabled.""" + + pg_database_oid: Optional[int] = None + """PostgreSQL database OID.""" + + pg_schema_oid: Optional[int] = None + """PostgreSQL schema OID.""" + + tenant_id: Optional[str] = None + """Tenant ID (dashless UUID format).""" + + timeline_id: Optional[str] = None + """Timeline ID (dashless UUID format).""" + + uc_catalog_id: Optional[str] = None + """Unity Catalog catalog ID.""" + + uc_schema_id: Optional[str] = None + """Unity Catalog schema ID.""" + + update_time_millis: Optional[int] = None + """Configuration last update timestamp in milliseconds since epoch.""" + + workspace_id: Optional[int] = None + """Workspace ID.""" def as_dict(self) -> dict: - """Serializes the InitialEndpointSpec into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group: - body["group"] = self.group.as_dict() + if self.create_time_millis is not None: + body["create_time_millis"] = self.create_time_millis + if self.enabled is not None: + body["enabled"] = self.enabled + if self.pg_database_oid is not None: + body["pg_database_oid"] = self.pg_database_oid + if self.pg_schema_oid is not None: + body["pg_schema_oid"] = self.pg_schema_oid + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.timeline_id is not None: + body["timeline_id"] = self.timeline_id + if self.uc_catalog_id is not None: + body["uc_catalog_id"] = self.uc_catalog_id + if self.uc_schema_id is not None: + body["uc_schema_id"] = self.uc_schema_id + if self.update_time_millis is not None: + body["update_time_millis"] = self.update_time_millis + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the InitialEndpointSpec into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.group: - body["group"] = self.group + if self.create_time_millis is not None: + body["create_time_millis"] = self.create_time_millis + if self.enabled is not None: + body["enabled"] = self.enabled + if self.pg_database_oid is not None: + body["pg_database_oid"] = self.pg_database_oid + if self.pg_schema_oid is not None: + body["pg_schema_oid"] = self.pg_schema_oid + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.timeline_id is not None: + body["timeline_id"] = self.timeline_id + if self.uc_catalog_id is not None: + body["uc_catalog_id"] = self.uc_catalog_id + if self.uc_schema_id is not None: + body["uc_schema_id"] = self.uc_schema_id + if self.update_time_millis is not None: + body["update_time_millis"] = self.update_time_millis + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InitialEndpointSpec: - """Deserializes the InitialEndpointSpec from a dictionary.""" - return cls(group=_from_dict(d, "group", EndpointGroupSpec)) + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlConfig: + """Deserializes the ForwardEtlConfig from a dictionary.""" + return cls( + create_time_millis=d.get("create_time_millis", None), + enabled=d.get("enabled", None), + pg_database_oid=d.get("pg_database_oid", None), + pg_schema_oid=d.get("pg_schema_oid", None), + tenant_id=d.get("tenant_id", None), + timeline_id=d.get("timeline_id", None), + uc_catalog_id=d.get("uc_catalog_id", None), + uc_schema_id=d.get("uc_schema_id", None), + update_time_millis=d.get("update_time_millis", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass -class ListBranchesResponse: - branches: Optional[List[Branch]] = None - """List of branches in the project.""" +class ForwardEtlDatabase: + """Database metadata""" - next_page_token: Optional[str] = None - """Token to request the next page of branches.""" + name: Optional[str] = None + """Database name.""" + + oid: Optional[int] = None + """PostgreSQL database OID.""" def as_dict(self) -> dict: - """Serializes the ListBranchesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlDatabase into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branches: - body["branches"] = [v.as_dict() for v in self.branches] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.name is not None: + body["name"] = self.name + if self.oid is not None: + body["oid"] = self.oid return body def as_shallow_dict(self) -> dict: - """Serializes the ListBranchesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlDatabase into a shallow dictionary of its immediate attributes.""" body = {} - if self.branches: - body["branches"] = self.branches - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.name is not None: + body["name"] = self.name + if self.oid is not None: + body["oid"] = self.oid return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListBranchesResponse: - """Deserializes the ListBranchesResponse from a dictionary.""" - return cls(branches=_repeated_dict(d, "branches", Branch), next_page_token=d.get("next_page_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlDatabase: + """Deserializes the ForwardEtlDatabase from a dictionary.""" + return cls(name=d.get("name", None), oid=d.get("oid", None)) @dataclass -class ListDatabasesResponse: - databases: Optional[List[Database]] = None - """List of databases.""" +class ForwardEtlMetadata: + """Forward ETL metadata response""" - next_page_token: Optional[str] = None - """Pagination token to request the next page of databases.""" + databases: Optional[List[ForwardEtlDatabase]] = None + """List of databases with their PostgreSQL OIDs.""" + + schemas: Optional[List[ForwardEtlSchema]] = None + """List of schemas with their PostgreSQL OIDs.""" def as_dict(self) -> dict: - """Serializes the ListDatabasesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlMetadata into a dictionary suitable for use as a JSON request body.""" body = {} if self.databases: body["databases"] = [v.as_dict() for v in self.databases] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the ListDatabasesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlMetadata into a shallow dictionary of its immediate attributes.""" body = {} if self.databases: body["databases"] = self.databases - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListDatabasesResponse: - """Deserializes the ListDatabasesResponse from a dictionary.""" - return cls(databases=_repeated_dict(d, "databases", Database), next_page_token=d.get("next_page_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlMetadata: + """Deserializes the ForwardEtlMetadata from a dictionary.""" + return cls( + databases=_repeated_dict(d, "databases", ForwardEtlDatabase), + schemas=_repeated_dict(d, "schemas", ForwardEtlSchema), + ) @dataclass -class ListEndpointsResponse: - endpoints: Optional[List[Endpoint]] = None - """List of compute endpoints in the branch.""" +class ForwardEtlSchema: + """Schema metadata""" - next_page_token: Optional[str] = None - """Token to request the next page of compute endpoints.""" + name: Optional[str] = None + """Schema name.""" + + oid: Optional[int] = None + """PostgreSQL schema OID.""" def as_dict(self) -> dict: - """Serializes the ListEndpointsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoints: - body["endpoints"] = [v.as_dict() for v in self.endpoints] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.name is not None: + body["name"] = self.name + if self.oid is not None: + body["oid"] = self.oid return body def as_shallow_dict(self) -> dict: - """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoints: - body["endpoints"] = self.endpoints - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.name is not None: + body["name"] = self.name + if self.oid is not None: + body["oid"] = self.oid return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListEndpointsResponse: - """Deserializes the ListEndpointsResponse from a dictionary.""" - return cls(endpoints=_repeated_dict(d, "endpoints", Endpoint), next_page_token=d.get("next_page_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlSchema: + """Deserializes the ForwardEtlSchema from a dictionary.""" + return cls(name=d.get("name", None), oid=d.get("oid", None)) @dataclass -class ListProjectsResponse: - next_page_token: Optional[str] = None - """Token to request the next page of projects.""" +class ForwardEtlStatus: + """Forward ETL status response""" - projects: Optional[List[Project]] = None - """List of all projects in the workspace that the user has permission to access.""" + configurations: Optional[List[ForwardEtlConfig]] = None + """List of Forward ETL configurations.""" + + table_mappings: Optional[List[ForwardEtlTableMapping]] = None + """Per-table replication mappings.""" def as_dict(self) -> dict: - """Serializes the ListProjectsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.projects: - body["projects"] = [v.as_dict() for v in self.projects] + if self.configurations: + body["configurations"] = [v.as_dict() for v in self.configurations] + if self.table_mappings: + body["table_mappings"] = [v.as_dict() for v in self.table_mappings] return body def as_shallow_dict(self) -> dict: - """Serializes the ListProjectsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.projects: - body["projects"] = self.projects + if self.configurations: + body["configurations"] = self.configurations + if self.table_mappings: + body["table_mappings"] = self.table_mappings return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListProjectsResponse: - """Deserializes the ListProjectsResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), projects=_repeated_dict(d, "projects", Project)) + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlStatus: + """Deserializes the ForwardEtlStatus from a dictionary.""" + return cls( + configurations=_repeated_dict(d, "configurations", ForwardEtlConfig), + table_mappings=_repeated_dict(d, "table_mappings", ForwardEtlTableMapping), + ) @dataclass -class ListRolesResponse: - next_page_token: Optional[str] = None - """Token to request the next page of Postgres roles.""" +class ForwardEtlTableMapping: + """Per-table replication mapping""" - roles: Optional[List[Role]] = None - """List of Postgres roles in the branch.""" + enabled: Optional[bool] = None + """Whether replication is enabled for this table.""" + + last_synced_lsn: Optional[str] = None + """Last synced LSN (Log Sequence Number) for this table.""" + + pg_table_name: Optional[str] = None + """PostgreSQL table name.""" + + pg_table_oid: Optional[int] = None + """PostgreSQL table OID.""" + + uc_table_id: Optional[str] = None + """Unity Catalog table ID.""" + + uc_table_name: Optional[str] = None + """Unity Catalog table name.""" def as_dict(self) -> dict: - """Serializes the ListRolesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForwardEtlTableMapping into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] + if self.enabled is not None: + body["enabled"] = self.enabled + if self.last_synced_lsn is not None: + body["last_synced_lsn"] = self.last_synced_lsn + if self.pg_table_name is not None: + body["pg_table_name"] = self.pg_table_name + if self.pg_table_oid is not None: + body["pg_table_oid"] = self.pg_table_oid + if self.uc_table_id is not None: + body["uc_table_id"] = self.uc_table_id + if self.uc_table_name is not None: + body["uc_table_name"] = self.uc_table_name return body def as_shallow_dict(self) -> dict: - """Serializes the ListRolesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ForwardEtlTableMapping into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.roles: - body["roles"] = self.roles + if self.enabled is not None: + body["enabled"] = self.enabled + if self.last_synced_lsn is not None: + body["last_synced_lsn"] = self.last_synced_lsn + if self.pg_table_name is not None: + body["pg_table_name"] = self.pg_table_name + if self.pg_table_oid is not None: + body["pg_table_oid"] = self.pg_table_oid + if self.uc_table_id is not None: + body["uc_table_id"] = self.uc_table_id + if self.uc_table_name is not None: + body["uc_table_name"] = self.uc_table_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListRolesResponse: - """Deserializes the ListRolesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), roles=_repeated_dict(d, "roles", Role)) - + def from_dict(cls, d: Dict[str, Any]) -> ForwardEtlTableMapping: + """Deserializes the ForwardEtlTableMapping from a dictionary.""" + return cls( + enabled=d.get("enabled", None), + last_synced_lsn=d.get("last_synced_lsn", None), + pg_table_name=d.get("pg_table_name", None), + pg_table_oid=d.get("pg_table_oid", None), + uc_table_id=d.get("uc_table_id", None), + uc_table_name=d.get("uc_table_name", None), + ) + + +@dataclass +class InitialEndpointSpec: + group: Optional[EndpointGroupSpec] = None + """Settings for HA configuration of the endpoint""" + + def as_dict(self) -> dict: + """Serializes the InitialEndpointSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.group: + body["group"] = self.group.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the InitialEndpointSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group: + body["group"] = self.group + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> InitialEndpointSpec: + """Deserializes the InitialEndpointSpec from a dictionary.""" + return cls(group=_from_dict(d, "group", EndpointGroupSpec)) + + +@dataclass +class ListBranchesResponse: + branches: Optional[List[Branch]] = None + """List of branches in the project.""" + + next_page_token: Optional[str] = None + """Token to request the next page of branches.""" + + def as_dict(self) -> dict: + """Serializes the ListBranchesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branches: + body["branches"] = [v.as_dict() for v in self.branches] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListBranchesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branches: + body["branches"] = self.branches + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListBranchesResponse: + """Deserializes the ListBranchesResponse from a dictionary.""" + return cls(branches=_repeated_dict(d, "branches", Branch), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListComputeInstancesResponse: + compute_instances: Optional[List[ComputeInstance]] = None + """The compute instances from the specified endpoint.""" + + next_page_token: Optional[str] = None + """A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListComputeInstancesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compute_instances: + body["compute_instances"] = [v.as_dict() for v in self.compute_instances] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListComputeInstancesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compute_instances: + body["compute_instances"] = self.compute_instances + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListComputeInstancesResponse: + """Deserializes the ListComputeInstancesResponse from a dictionary.""" + return cls( + compute_instances=_repeated_dict(d, "compute_instances", ComputeInstance), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListDatabasesResponse: + databases: Optional[List[Database]] = None + """List of databases.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of databases.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabasesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.databases: + body["databases"] = [v.as_dict() for v in self.databases] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabasesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.databases: + body["databases"] = self.databases + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabasesResponse: + """Deserializes the ListDatabasesResponse from a dictionary.""" + return cls(databases=_repeated_dict(d, "databases", Database), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListEndpointsResponse: + endpoints: Optional[List[Endpoint]] = None + """List of compute endpoints in the branch.""" + + next_page_token: Optional[str] = None + """Token to request the next page of compute endpoints.""" + + def as_dict(self) -> dict: + """Serializes the ListEndpointsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.endpoints: + body["endpoints"] = [v.as_dict() for v in self.endpoints] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoints: + body["endpoints"] = self.endpoints + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListEndpointsResponse: + """Deserializes the ListEndpointsResponse from a dictionary.""" + return cls(endpoints=_repeated_dict(d, "endpoints", Endpoint), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListProjectsResponse: + next_page_token: Optional[str] = None + """Token to request the next page of projects.""" + + projects: Optional[List[Project]] = None + """List of all projects in the workspace that the user has permission to access.""" + + def as_dict(self) -> dict: + """Serializes the ListProjectsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.projects: + body["projects"] = [v.as_dict() for v in self.projects] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListProjectsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.projects: + body["projects"] = self.projects + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListProjectsResponse: + """Deserializes the ListProjectsResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), projects=_repeated_dict(d, "projects", Project)) + + +@dataclass +class ListRolesResponse: + next_page_token: Optional[str] = None + """Token to request the next page of Postgres roles.""" + + roles: Optional[List[Role]] = None + """List of Postgres roles in the branch.""" + + def as_dict(self) -> dict: + """Serializes the ListRolesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListRolesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.roles: + body["roles"] = self.roles + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListRolesResponse: + """Deserializes the ListRolesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), roles=_repeated_dict(d, "roles", Role)) + + +@dataclass +class NewPipelineSpec: + budget_policy_id: Optional[str] = None + """Budget policy to set on the newly created pipeline.""" + + storage_catalog: Optional[str] = None + """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This + needs to be a standard catalog where the user has permissions to create Delta tables.""" + + storage_schema: Optional[str] = None + """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs + to be in the standard catalog where the user has permissions to create Delta tables.""" + + def as_dict(self) -> dict: + """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.storage_catalog is not None: + body["storage_catalog"] = self.storage_catalog + if self.storage_schema is not None: + body["storage_schema"] = self.storage_schema + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.storage_catalog is not None: + body["storage_catalog"] = self.storage_catalog + if self.storage_schema is not None: + body["storage_schema"] = self.storage_schema + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: + """Deserializes the NewPipelineSpec from a dictionary.""" + return cls( + budget_policy_id=d.get("budget_policy_id", None), + storage_catalog=d.get("storage_catalog", None), + storage_schema=d.get("storage_schema", None), + ) + @dataclass class Operation: @@ -1556,6 +2224,10 @@ class ProjectSpec: preserve existing tags, omit this field from the update_mask (or use wildcard "*" which auto-excludes empty tags).""" + default_branch: Optional[str] = None + """The full resource path for the default branch of the project Format: + projects/{project_id}/branches/{branch_id}""" + default_endpoint_settings: Optional[ProjectDefaultEndpointSettings] = None display_name: Optional[str] = None @@ -1571,6 +2243,13 @@ class ProjectSpec: pg_version: Optional[int] = None """The major Postgres version number. Supported versions are 16 and 17.""" + workspace_key_encrypted: Optional[bool] = None + """Indicates if this project should be created with workspace-scoped customer managed key (CMK) + encryption enabled. Since we need to do an end to end perf bench using BSS API to A/B test the + performance impact of CMK encryption, we need to be able to control this flag in the API. This + flag will be removed once we find a better way to separate the tenants or enforce + workspace-level CMK encryption or migrate everyone to CMK.""" + def as_dict(self) -> dict: """Serializes the ProjectSpec into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1578,6 +2257,8 @@ def as_dict(self) -> dict: body["budget_policy_id"] = self.budget_policy_id if self.custom_tags: body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.default_branch is not None: + body["default_branch"] = self.default_branch if self.default_endpoint_settings: body["default_endpoint_settings"] = self.default_endpoint_settings.as_dict() if self.display_name is not None: @@ -1588,6 +2269,8 @@ def as_dict(self) -> dict: body["history_retention_duration"] = self.history_retention_duration.ToJsonString() if self.pg_version is not None: body["pg_version"] = self.pg_version + if self.workspace_key_encrypted is not None: + body["workspace_key_encrypted"] = self.workspace_key_encrypted return body def as_shallow_dict(self) -> dict: @@ -1597,6 +2280,8 @@ def as_shallow_dict(self) -> dict: body["budget_policy_id"] = self.budget_policy_id if self.custom_tags: body["custom_tags"] = self.custom_tags + if self.default_branch is not None: + body["default_branch"] = self.default_branch if self.default_endpoint_settings: body["default_endpoint_settings"] = self.default_endpoint_settings if self.display_name is not None: @@ -1607,6 +2292,8 @@ def as_shallow_dict(self) -> dict: body["history_retention_duration"] = self.history_retention_duration if self.pg_version is not None: body["pg_version"] = self.pg_version + if self.workspace_key_encrypted is not None: + body["workspace_key_encrypted"] = self.workspace_key_encrypted return body @classmethod @@ -1615,11 +2302,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ProjectSpec: return cls( budget_policy_id=d.get("budget_policy_id", None), custom_tags=_repeated_dict(d, "custom_tags", ProjectCustomTag), + default_branch=d.get("default_branch", None), default_endpoint_settings=_from_dict(d, "default_endpoint_settings", ProjectDefaultEndpointSettings), display_name=d.get("display_name", None), enable_pg_native_login=d.get("enable_pg_native_login", None), history_retention_duration=_duration(d, "history_retention_duration"), pg_version=d.get("pg_version", None), + workspace_key_encrypted=d.get("workspace_key_encrypted", None), ) @@ -1631,9 +2320,15 @@ class ProjectStatus: budget_policy_id: Optional[str] = None """The budget policy that is applied to the project.""" + compute_last_active_time: Optional[Timestamp] = None + """The most recent time when any endpoint of this project was active.""" + custom_tags: Optional[List[ProjectCustomTag]] = None """The effective custom tags associated with the project.""" + default_branch: Optional[str] = None + """The full resource path of the default branch of the project""" + default_endpoint_settings: Optional[ProjectDefaultEndpointSettings] = None """The effective default endpoint settings.""" @@ -1662,8 +2357,12 @@ def as_dict(self) -> dict: body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time.ToJsonString() if self.custom_tags: body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.default_branch is not None: + body["default_branch"] = self.default_branch if self.default_endpoint_settings: body["default_endpoint_settings"] = self.default_endpoint_settings.as_dict() if self.display_name is not None: @@ -1687,8 +2386,12 @@ def as_shallow_dict(self) -> dict: body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time if self.custom_tags: body["custom_tags"] = self.custom_tags + if self.default_branch is not None: + body["default_branch"] = self.default_branch if self.default_endpoint_settings: body["default_endpoint_settings"] = self.default_endpoint_settings if self.display_name is not None: @@ -1711,7 +2414,9 @@ def from_dict(cls, d: Dict[str, Any]) -> ProjectStatus: return cls( branch_logical_size_limit_bytes=d.get("branch_logical_size_limit_bytes", None), budget_policy_id=d.get("budget_policy_id", None), + compute_last_active_time=_timestamp(d, "compute_last_active_time"), custom_tags=_repeated_dict(d, "custom_tags", ProjectCustomTag), + default_branch=d.get("default_branch", None), default_endpoint_settings=_from_dict(d, "default_endpoint_settings", ProjectDefaultEndpointSettings), display_name=d.get("display_name", None), enable_pg_native_login=d.get("enable_pg_native_login", None), @@ -1722,6 +2427,24 @@ def from_dict(cls, d: Dict[str, Any]) -> ProjectStatus: ) +class ProvisioningInfoState(Enum): + + ACTIVE = "ACTIVE" + DEGRADED = "DEGRADED" + DELETING = "DELETING" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + UPDATING = "UPDATING" + + +class ProvisioningPhase(Enum): + """Copied from database_table_statuses.proto to decouple SDK packages.""" + + PROVISIONING_PHASE_INDEX_SCAN = "PROVISIONING_PHASE_INDEX_SCAN" + PROVISIONING_PHASE_INDEX_SORT = "PROVISIONING_PHASE_INDEX_SORT" + PROVISIONING_PHASE_MAIN = "PROVISIONING_PHASE_MAIN" + + @dataclass class RequestedClaims: permission_set: Optional[RequestedClaimsPermissionSet] = None @@ -2025,6 +2748,9 @@ def from_dict(cls, d: Dict[str, Any]) -> RoleRoleSpec: @dataclass class RoleRoleStatus: + attributes: Optional[RoleAttributes] = None + """The PG role attributes associated with the role.""" + auth_method: Optional[RoleAuthMethod] = None identity_type: Optional[RoleIdentityType] = None @@ -2039,6 +2765,8 @@ class RoleRoleStatus: def as_dict(self) -> dict: """Serializes the RoleRoleStatus into a dictionary suitable for use as a JSON request body.""" body = {} + if self.attributes: + body["attributes"] = self.attributes.as_dict() if self.auth_method is not None: body["auth_method"] = self.auth_method.value if self.identity_type is not None: @@ -2052,6 +2780,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the RoleRoleStatus into a shallow dictionary of its immediate attributes.""" body = {} + if self.attributes: + body["attributes"] = self.attributes if self.auth_method is not None: body["auth_method"] = self.auth_method if self.identity_type is not None: @@ -2066,6 +2796,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RoleRoleStatus: """Deserializes the RoleRoleStatus from a dictionary.""" return cls( + attributes=_from_dict(d, "attributes", RoleAttributes), auth_method=_enum(d, "auth_method", RoleAuthMethod), identity_type=_enum(d, "identity_type", RoleIdentityType), membership_roles=_repeated_enum(d, "membership_roles", RoleMembershipRole), @@ -2073,9 +2804,641 @@ def from_dict(cls, d: Dict[str, Any]) -> RoleRoleStatus: ) -class PostgresAPI: - """Use the Postgres API to create and manage Lakebase Autoscaling Postgres infrastructure, including - projects, branches, compute endpoints, and roles. +@dataclass +class SyncedTable: + name: str + """Full three-part (catalog, schema, table) name of the table.""" + + branch: Optional[str] = None + """The resource path of the branch associated with the table. Format: + projects/{project_id}/branches/{branch_id}.""" + + data_synchronization_status: Optional[SyncedTableStatus] = None + """Synced Table data synchronization status""" + + database: Optional[str] = None + """The project and branch scoped database to which this table lives in. Of the format: + projects/{project_id}/branches/{branch_id}/databases/{database_id} where database_id is the name + of the logical database in Postgres. When creating a synced table in a database catalog this + field is optional, and if specified must match the database, project, and branch of the catalog. + When creating a synced table in a standard catalog, this field is required.""" + + project: Optional[str] = None + """The resource path of the project associated with the table. Format: projects/{project_id}.""" + + spec: Optional[SyncedTableSpec] = None + + table_serving_url: Optional[str] = None + """REST API URL for serving data from this table.""" + + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None + """The provisioning state of the synced table entity in Unity Catalog.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.data_synchronization_status: + body["data_synchronization_status"] = self.data_synchronization_status.as_dict() + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.spec: + body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.data_synchronization_status: + body["data_synchronization_status"] = self.data_synchronization_status + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.spec: + body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTable: + """Deserializes the SyncedTable from a dictionary.""" + return cls( + branch=d.get("branch", None), + data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus), + database=d.get("database", None), + name=d.get("name", None), + project=d.get("project", None), + spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), + unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + ) + + +@dataclass +class SyncedTableContinuousUpdateStatus: + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + or the SYNCED_UPDATING_PIPELINE_RESOURCES state. Copied from database_table_statuses.proto to + decouple SDK packages.""" + + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None + """Progress of the initial data synchronization.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was successfully synced to the synced table.""" + + timestamp: Optional[Timestamp] = None + """The end timestamp of the last time any data was synchronized from the source table to the synced + table. This is when the data is available in the synced table.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp.ToJsonString() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableContinuousUpdateStatus: + """Deserializes the SyncedTableContinuousUpdateStatus from a dictionary.""" + return cls( + initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress), + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=_timestamp(d, "timestamp"), + ) + + +@dataclass +class SyncedTableFailedStatus: + """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + SYNCED_PIPELINE_FAILED state. Copied from database_table_statuses.proto to decouple SDK + packages.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was successfully synced to the synced table. The last + source table Delta version that was synced to the synced table. Only populated if the table is + still synced and available for serving.""" + + timestamp: Optional[Timestamp] = None + """The end timestamp of the last time any data was synchronized from the source table to the synced + table. Only populated if the table is still synced and available for serving.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableFailedStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp.ToJsonString() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableFailedStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableFailedStatus: + """Deserializes the SyncedTableFailedStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=_timestamp(d, "timestamp"), + ) + + +@dataclass +class SyncedTablePipelineProgress: + """Progress information of the Synced Table data synchronization pipeline. Copied from + database_table_statuses.proto to decouple SDK packages.""" + + estimated_completion_time_seconds: Optional[float] = None + """The estimated time remaining to complete this update in seconds.""" + + latest_version_currently_processing: Optional[int] = None + """The source table Delta version that was last processed by the pipeline. The pipeline may not + have completely processed this version yet.""" + + provisioning_phase: Optional[ProvisioningPhase] = None + """The current phase of the data synchronization pipeline.""" + + sync_progress_completion: Optional[float] = None + """The completion ratio of this update. This is a number between 0 and 1.""" + + synced_row_count: Optional[int] = None + """The number of rows that have been synced in this update.""" + + total_row_count: Optional[int] = None + """The total number of rows that need to be synced in this update. This number may be an estimate.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTablePipelineProgress into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.provisioning_phase is not None: + body["provisioning_phase"] = self.provisioning_phase.value + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTablePipelineProgress into a shallow dictionary of its immediate attributes.""" + body = {} + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.provisioning_phase is not None: + body["provisioning_phase"] = self.provisioning_phase + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress: + """Deserializes the SyncedTablePipelineProgress from a dictionary.""" + return cls( + estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), + latest_version_currently_processing=d.get("latest_version_currently_processing", None), + provisioning_phase=_enum(d, "provisioning_phase", ProvisioningPhase), + sync_progress_completion=d.get("sync_progress_completion", None), + synced_row_count=d.get("synced_row_count", None), + total_row_count=d.get("total_row_count", None), + ) + + +@dataclass +class SyncedTablePosition: + """Copied from database_table_statuses.proto to decouple SDK packages.""" + + delta_table_sync_info: Optional[DeltaTableSyncInfo] = None + + sync_end_timestamp: Optional[Timestamp] = None + """The end timestamp of the most recent successful synchronization. This is the time when the data + is available in the synced table.""" + + sync_start_timestamp: Optional[Timestamp] = None + """The starting timestamp of the most recent successful synchronization from the source table to + the destination (synced) table. Note this is the starting timestamp of the sync operation, not + the end time. E.g., for a batch, this is the time when the sync operation started.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTablePosition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.delta_table_sync_info: + body["delta_table_sync_info"] = self.delta_table_sync_info.as_dict() + if self.sync_end_timestamp is not None: + body["sync_end_timestamp"] = self.sync_end_timestamp.ToJsonString() + if self.sync_start_timestamp is not None: + body["sync_start_timestamp"] = self.sync_start_timestamp.ToJsonString() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTablePosition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_table_sync_info: + body["delta_table_sync_info"] = self.delta_table_sync_info + if self.sync_end_timestamp is not None: + body["sync_end_timestamp"] = self.sync_end_timestamp + if self.sync_start_timestamp is not None: + body["sync_start_timestamp"] = self.sync_start_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePosition: + """Deserializes the SyncedTablePosition from a dictionary.""" + return cls( + delta_table_sync_info=_from_dict(d, "delta_table_sync_info", DeltaTableSyncInfo), + sync_end_timestamp=_timestamp(d, "sync_end_timestamp"), + sync_start_timestamp=_timestamp(d, "sync_start_timestamp"), + ) + + +@dataclass +class SyncedTableProvisioningStatus: + """Detailed status of a synced table. Shown if the synced table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. Copied from + database_table_statuses.proto to decouple SDK packages.""" + + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None + """Details about initial data synchronization. Only populated when in the + PROVISIONING_INITIAL_SNAPSHOT state.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableProvisioningStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableProvisioningStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableProvisioningStatus: + """Deserializes the SyncedTableProvisioningStatus from a dictionary.""" + return cls( + initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress) + ) + + +@dataclass +class SyncedTableSpec: + accelerated_sync: Optional[bool] = None + """When true, enables accelerated sync mode for the initial data load. This significantly improves + performance for large tables. Requires workspace-level enablement through Lakebase Accelerated + Sync preview.""" + + create_database_objects_if_missing: Optional[bool] = None + """If true, the synced table's logical database and schema resources in PG will be created if they + do not already exist.""" + + existing_pipeline_id: Optional[str] = None + """ID of an existing pipeline to bin-pack this synced table into. At most one of + existing_pipeline_id and new_pipeline_spec should be defined.""" + + new_pipeline_spec: Optional[NewPipelineSpec] = None + """Specification for creating a new pipeline. At most one of existing_pipeline_id and + new_pipeline_spec should be defined.""" + + primary_key_columns: Optional[List[str]] = None + """Primary Key columns to be used for data insert/update in the destination.""" + + scheduling_policy: Optional[SyncedTableSpecSyncedTableSchedulingPolicy] = None + """Scheduling policy of the underlying pipeline.""" + + source_table_full_name: Optional[str] = None + """Three-part (catalog, schema, table) name of the source Delta table.""" + + timeseries_key: Optional[str] = None + """Time series key to deduplicate (tie-break) rows with the same primary key.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.accelerated_sync is not None: + body["accelerated_sync"] = self.accelerated_sync + if self.create_database_objects_if_missing is not None: + body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.existing_pipeline_id is not None: + body["existing_pipeline_id"] = self.existing_pipeline_id + if self.new_pipeline_spec: + body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict() + if self.primary_key_columns: + body["primary_key_columns"] = [v for v in self.primary_key_columns] + if self.scheduling_policy is not None: + body["scheduling_policy"] = self.scheduling_policy.value + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.accelerated_sync is not None: + body["accelerated_sync"] = self.accelerated_sync + if self.create_database_objects_if_missing is not None: + body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.existing_pipeline_id is not None: + body["existing_pipeline_id"] = self.existing_pipeline_id + if self.new_pipeline_spec: + body["new_pipeline_spec"] = self.new_pipeline_spec + if self.primary_key_columns: + body["primary_key_columns"] = self.primary_key_columns + if self.scheduling_policy is not None: + body["scheduling_policy"] = self.scheduling_policy + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: + """Deserializes the SyncedTableSpec from a dictionary.""" + return cls( + accelerated_sync=d.get("accelerated_sync", None), + create_database_objects_if_missing=d.get("create_database_objects_if_missing", None), + existing_pipeline_id=d.get("existing_pipeline_id", None), + new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec), + primary_key_columns=d.get("primary_key_columns", None), + scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSpecSyncedTableSchedulingPolicy), + source_table_full_name=d.get("source_table_full_name", None), + timeseries_key=d.get("timeseries_key", None), + ) + + +class SyncedTableSpecSyncedTableSchedulingPolicy(Enum): + """Scheduling policy of the synced table's underlying pipeline. These should be kept in sync with + the scheudling policy enums for SyncedDatabaseTables with DatabaseInstances""" + + CONTINUOUS = "CONTINUOUS" + SNAPSHOT = "SNAPSHOT" + TRIGGERED = "TRIGGERED" + + +class SyncedTableState(Enum): + """The state of a synced table. Copied from database_table_statuses.proto to decouple SDK packages.""" + + SYNCED_TABLED_OFFLINE = "SYNCED_TABLED_OFFLINE" + SYNCED_TABLE_OFFLINE_FAILED = "SYNCED_TABLE_OFFLINE_FAILED" + SYNCED_TABLE_ONLINE = "SYNCED_TABLE_ONLINE" + SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE" + SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE" + SYNCED_TABLE_ONLINE_PIPELINE_FAILED = "SYNCED_TABLE_ONLINE_PIPELINE_FAILED" + SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE" + SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES" + SYNCED_TABLE_PROVISIONING = "SYNCED_TABLE_PROVISIONING" + SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT" + SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES" + + +@dataclass +class SyncedTableStatus: + continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None + + detailed_state: Optional[SyncedTableState] = None + """The state of the synced table.""" + + failed_status: Optional[SyncedTableFailedStatus] = None + + last_sync: Optional[SyncedTablePosition] = None + """Summary of the last successful synchronization from source to destination.""" + + message: Optional[str] = None + """A text description of the current state of the synced table.""" + + pipeline_id: Optional[str] = None + """ID of the associated pipeline.""" + + provisioning_status: Optional[SyncedTableProvisioningStatus] = None + + triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None + + def as_dict(self) -> dict: + """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state.value + if self.failed_status: + body["failed_status"] = self.failed_status.as_dict() + if self.last_sync: + body["last_sync"] = self.last_sync.as_dict() + if self.message is not None: + body["message"] = self.message + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status.as_dict() + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state + if self.failed_status: + body["failed_status"] = self.failed_status + if self.last_sync: + body["last_sync"] = self.last_sync + if self.message is not None: + body["message"] = self.message + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus: + """Deserializes the SyncedTableStatus from a dictionary.""" + return cls( + continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", SyncedTableState), + failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus), + last_sync=_from_dict(d, "last_sync", SyncedTablePosition), + message=d.get("message", None), + pipeline_id=d.get("pipeline_id", None), + provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus), + ) + + +@dataclass +class SyncedTableTriggeredUpdateStatus: + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE + or the SYNCED_NO_PENDING_UPDATE state. Copied from database_table_statuses.proto to decouple SDK + packages.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was successfully synced to the synced table.""" + + timestamp: Optional[Timestamp] = None + """The end timestamp of the last time any data was synchronized from the source table to the synced + table. This is when the data is available in the synced table.""" + + triggered_update_progress: Optional[SyncedTablePipelineProgress] = None + """Progress of the active data synchronization pipeline.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableTriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp.ToJsonString() + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableTriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableTriggeredUpdateStatus: + """Deserializes the SyncedTableTriggeredUpdateStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=_timestamp(d, "timestamp"), + triggered_update_progress=_from_dict(d, "triggered_update_progress", SyncedTablePipelineProgress), + ) + + +@dataclass +class Table: + """Table represents a non-synced database table in a Lakebase project. Unlike SyncedTable, this + does not have a data synchronization pipeline.""" + + name: str + """Full three-part (catalog, schema, table) name of the table.""" + + database: str + """The project and branch scoped database to which this table belongs. Of the format: + projects/{project_id}/branches/{branch_id}/databases/{database_id} where database_id is the name + of the logical database in Postgres.""" + + branch: Optional[str] = None + """The id of the database branch associated with the table. Of the format + projects/{project_id}/branches/{branch_id}.""" + + project: Optional[str] = None + """The id of the database project associated with the table. Of the format projects/{project_id}.""" + + table_serving_url: Optional[str] = None + """REST API URL for serving data from this table.""" + + def as_dict(self) -> dict: + """Serializes the Table into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Table into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: + body["branch"] = self.branch + if self.database is not None: + body["database"] = self.database + if self.name is not None: + body["name"] = self.name + if self.project is not None: + body["project"] = self.project + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Table: + """Deserializes the Table from a dictionary.""" + return cls( + branch=d.get("branch", None), + database=d.get("database", None), + name=d.get("name", None), + project=d.get("project", None), + table_serving_url=d.get("table_serving_url", None), + ) + + +class PostgresAPI: + """Use the Postgres API to create and manage Lakebase Autoscaling Postgres infrastructure, including + projects, branches, compute endpoints, and roles. This API manages database infrastructure only. To query or modify data, use the Data API or direct SQL connections. @@ -2123,6 +3486,27 @@ def create_branch(self, parent: str, branch: Branch, branch_id: str) -> CreateBr operation = Operation.from_dict(res) return CreateBranchOperation(self, operation) + def create_catalog(self, catalog: Catalog) -> Catalog: + """Register a Database in UC. + + :param catalog: :class:`Catalog` + + :returns: :class:`Catalog` + """ + + body = catalog.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/postgres/catalogs", body=body, headers=headers) + return Catalog.from_dict(res) + def create_database( self, parent: str, database: Database, *, database_id: Optional[str] = None ) -> CreateDatabaseOperation: @@ -2263,6 +3647,48 @@ def create_role(self, parent: str, role: Role, *, role_id: Optional[str] = None) operation = Operation.from_dict(res) return CreateRoleOperation(self, operation) + def create_synced_table(self, synced_table: SyncedTable) -> SyncedTable: + """Create a Synced Table. + + :param synced_table: :class:`SyncedTable` + + :returns: :class:`SyncedTable` + """ + + body = synced_table.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/postgres/synced_tables", body=body, headers=headers) + return SyncedTable.from_dict(res) + + def create_table(self, table: Table) -> Table: + """Create a Table (non-synced database table for Autoscaling v2 Lakebase projects). + + :param table: :class:`Table` + + :returns: :class:`Table` + """ + + body = table.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.0/postgres/tables", body=body, headers=headers) + return Table.from_dict(res) + def delete_branch(self, name: str) -> DeleteBranchOperation: """Deletes the specified database branch. @@ -2284,6 +3710,24 @@ def delete_branch(self, name: str) -> DeleteBranchOperation: operation = Operation.from_dict(res) return DeleteBranchOperation(self, operation) + def delete_catalog(self, name: str): + """Delete a Database Catalog. + + :param name: str + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.0/postgres/{name}", headers=headers) + def delete_database(self, name: str) -> DeleteDatabaseOperation: """Delete a Database. @@ -2306,14 +3750,89 @@ def delete_database(self, name: str) -> DeleteDatabaseOperation: operation = Operation.from_dict(res) return DeleteDatabaseOperation(self, operation) - def delete_endpoint(self, name: str) -> DeleteEndpointOperation: - """Deletes the specified compute endpoint. + def delete_endpoint(self, name: str) -> DeleteEndpointOperation: + """Deletes the specified compute endpoint. + + :param name: str + The full resource path of the endpoint to delete. Format: + projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + + :returns: :class:`Operation` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", headers=headers) + operation = Operation.from_dict(res) + return DeleteEndpointOperation(self, operation) + + def delete_project(self, name: str) -> DeleteProjectOperation: + """Deletes the specified database project. + + :param name: str + The full resource path of the project to delete. Format: projects/{project_id} + + :returns: :class:`Operation` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", headers=headers) + operation = Operation.from_dict(res) + return DeleteProjectOperation(self, operation) + + def delete_role(self, name: str, *, reassign_owned_to: Optional[str] = None) -> DeleteRoleOperation: + """Deletes the specified Postgres role. + + :param name: str + The full resource path of the role to delete. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + :param reassign_owned_to: str (optional) + Reassign objects. If this is set, all objects owned by the role are reassigned to the role specified + in this parameter. + + NOTE: setting this requires spinning up a compute to succeed, since it involves running SQL queries. + + TODO: #LKB-7187 implement reassign_owned_to on LBM side. This might end-up being a synchronous query + when this parameter is used. + + :returns: :class:`Operation` + """ + + query = {} + if reassign_owned_to is not None: + query["reassign_owned_to"] = reassign_owned_to + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", query=query, headers=headers) + operation = Operation.from_dict(res) + return DeleteRoleOperation(self, operation) + + def delete_synced_table(self, name: str): + """Delete a Synced Table. :param name: str - The full resource path of the endpoint to delete. Format: - projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + Full three-part (catalog, schema, table) name of the synced table. + - :returns: :class:`Operation` """ headers = { @@ -2324,17 +3843,15 @@ def delete_endpoint(self, name: str) -> DeleteEndpointOperation: if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: headers["X-Databricks-Org-Id"] = cfg.workspace_id - res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", headers=headers) - operation = Operation.from_dict(res) - return DeleteEndpointOperation(self, operation) + self._api.do("DELETE", f"/api/2.0/postgres/synced_tables/{name}", headers=headers) - def delete_project(self, name: str) -> DeleteProjectOperation: - """Deletes the specified database project. + def delete_table(self, name: str): + """Delete a Table (non-synced database table for Autoscaling v2 Lakebase projects). :param name: str - The full resource path of the project to delete. Format: projects/{project_id} + Full three-part (catalog, schema, table) name of the table. + - :returns: :class:`Operation` """ headers = { @@ -2345,31 +3862,42 @@ def delete_project(self, name: str) -> DeleteProjectOperation: if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: headers["X-Databricks-Org-Id"] = cfg.workspace_id - res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", headers=headers) - operation = Operation.from_dict(res) - return DeleteProjectOperation(self, operation) - - def delete_role(self, name: str, *, reassign_owned_to: Optional[str] = None) -> DeleteRoleOperation: - """Deletes the specified Postgres role. - - :param name: str - The full resource path of the role to delete. Format: - projects/{project_id}/branches/{branch_id}/roles/{role_id} - :param reassign_owned_to: str (optional) - Reassign objects. If this is set, all objects owned by the role are reassigned to the role specified - in this parameter. - - NOTE: setting this requires spinning up a compute to succeed, since it involves running SQL queries. + self._api.do("DELETE", f"/api/2.0/postgres/tables/{name}", headers=headers) - TODO: #LKB-7187 implement reassign_owned_to on LBM side. This might end-up being a synchronous query - when this parameter is used. + def disable_forward_etl( + self, + parent: str, + *, + pg_database_oid: Optional[int] = None, + pg_schema_oid: Optional[int] = None, + tenant_id: Optional[str] = None, + timeline_id: Optional[str] = None, + ) -> DisableForwardEtlResponse: + """Disable Forward ETL for a branch. - :returns: :class:`Operation` + :param parent: str + The Branch to disable Forward ETL for. Format: projects/{project_id}/branches/{branch_id} + :param pg_database_oid: int (optional) + PostgreSQL database OID to disable. + :param pg_schema_oid: int (optional) + PostgreSQL schema OID to disable. + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`DisableForwardEtlResponse` """ query = {} - if reassign_owned_to is not None: - query["reassign_owned_to"] = reassign_owned_to + if pg_database_oid is not None: + query["pg_database_oid"] = pg_database_oid + if pg_schema_oid is not None: + query["pg_schema_oid"] = pg_schema_oid + if tenant_id is not None: + query["tenant_id"] = tenant_id + if timeline_id is not None: + query["timeline_id"] = timeline_id headers = { "Accept": "application/json", } @@ -2378,12 +3906,17 @@ def delete_role(self, name: str, *, reassign_owned_to: Optional[str] = None) -> if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: headers["X-Databricks-Org-Id"] = cfg.workspace_id - res = self._api.do("DELETE", f"/api/2.0/postgres/{name}", query=query, headers=headers) - operation = Operation.from_dict(res) - return DeleteRoleOperation(self, operation) + res = self._api.do("DELETE", f"/api/2.0/postgres/{parent}/forward-etl", query=query, headers=headers) + return DisableForwardEtlResponse.from_dict(res) def generate_database_credential( - self, endpoint: str, *, claims: Optional[List[RequestedClaims]] = None + self, + endpoint: str, + *, + claims: Optional[List[RequestedClaims]] = None, + expire_time: Optional[Timestamp] = None, + group_name: Optional[str] = None, + ttl: Optional[Duration] = None, ) -> DatabaseCredential: """Generate OAuth credentials for a Postgres database. @@ -2392,6 +3925,14 @@ def generate_database_credential( projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} :param claims: List[:class:`RequestedClaims`] (optional) The returned token will be scoped to UC tables with the specified permissions. + :param expire_time: Timestamp (optional) + Timestamp in UTC of when this credential should expire. Expire time should be within 1 hour of the + current time. + :param group_name: str (optional) + Databricks workspace group name. When provided, credentials are generated with permissions scoped to + this group. + :param ttl: Duration (optional) + The requested time-to-live for the generated credential token. Maximum allowed duration is 1 hour. :returns: :class:`DatabaseCredential` """ @@ -2401,6 +3942,12 @@ def generate_database_credential( body["claims"] = [v.as_dict() for v in claims] if endpoint is not None: body["endpoint"] = endpoint + if expire_time is not None: + body["expire_time"] = expire_time.ToJsonString() + if group_name is not None: + body["group_name"] = group_name + if ttl is not None: + body["ttl"] = ttl.ToJsonString() headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -2433,6 +3980,47 @@ def get_branch(self, name: str) -> Branch: res = self._api.do("GET", f"/api/2.0/postgres/{name}", headers=headers) return Branch.from_dict(res) + def get_catalog(self, name: str) -> Catalog: + """Get a Database Catalog. + + :param name: str + + :returns: :class:`Catalog` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/{name}", headers=headers) + return Catalog.from_dict(res) + + def get_compute_instance(self, name: str) -> ComputeInstance: + """Lists the specific compute instance under an endpoint. Note: ComputeInstances are managed via the + parent Endpoint resource, and cannot be created, updated, or deleted directly. + + :param name: str + The full resource path of the compute instance to retrieve. Format: + projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}/compute-instances/{compute_instance_id} + + :returns: :class:`ComputeInstance` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/{name}", headers=headers) + return ComputeInstance.from_dict(res) + def get_database(self, name: str) -> Database: """Get a Database. @@ -2476,6 +4064,68 @@ def get_endpoint(self, name: str) -> Endpoint: res = self._api.do("GET", f"/api/2.0/postgres/{name}", headers=headers) return Endpoint.from_dict(res) + def get_forward_etl_metadata( + self, parent: str, *, tenant_id: Optional[str] = None, timeline_id: Optional[str] = None + ) -> ForwardEtlMetadata: + """Get Forward ETL metadata (database and schema OIDs). + + :param parent: str + The Branch to get metadata for. Format: projects/{project_id}/branches/{branch_id} + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`ForwardEtlMetadata` + """ + + query = {} + if tenant_id is not None: + query["tenant_id"] = tenant_id + if timeline_id is not None: + query["timeline_id"] = timeline_id + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/{parent}/forward-etl/metadata", query=query, headers=headers) + return ForwardEtlMetadata.from_dict(res) + + def get_forward_etl_status( + self, parent: str, *, tenant_id: Optional[str] = None, timeline_id: Optional[str] = None + ) -> ForwardEtlStatus: + """Get Forward ETL configuration and status for a branch. + + :param parent: str + The Branch to get Forward ETL status for. Format: projects/{project_id}/branches/{branch_id} + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`ForwardEtlStatus` + """ + + query = {} + if tenant_id is not None: + query["tenant_id"] = tenant_id + if timeline_id is not None: + query["timeline_id"] = timeline_id + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/{parent}/forward-etl", query=query, headers=headers) + return ForwardEtlStatus.from_dict(res) + def get_operation(self, name: str) -> Operation: """Retrieves the status of a long-running operation. @@ -2538,6 +4188,46 @@ def get_role(self, name: str) -> Role: res = self._api.do("GET", f"/api/2.0/postgres/{name}", headers=headers) return Role.from_dict(res) + def get_synced_table(self, name: str) -> SyncedTable: + """Get a Synced Table. + + :param name: str + Full three-part (catalog, schema, table) name of the synced table. + + :returns: :class:`SyncedTable` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/synced_tables/{name}", headers=headers) + return SyncedTable.from_dict(res) + + def get_table(self, name: str) -> Table: + """Get a Table (non-synced database table for Autoscaling v2 Lakebase projects). + + :param name: str + Full three-part (catalog, schema, table) name of the table. + + :returns: :class:`Table` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.0/postgres/tables/{name}", headers=headers) + return Table.from_dict(res) + def list_branches( self, parent: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[Branch]: @@ -2575,6 +4265,52 @@ def list_branches( return query["page_token"] = json["next_page_token"] + def list_compute_instances( + self, parent: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ComputeInstance]: + """Lists all compute instances that have been created under the specified endpoint. Note: + ComputeInstances are managed via the parent Endpoint resource, and cannot be created, updated, or + deleted directly. + + :param parent: str + The parent, which owns the compute instances. + :param page_size: int (optional) + The maximum number of compute instances to return. The service may return fewer than this value. + + If unspecified, at most 50 compute instances will be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + :param page_token: str (optional) + A page token, received from a previous `ListInstances` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters provided to `ListInstances` must match the call that provided + the page token. + + :returns: Iterator over :class:`ComputeInstance` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", f"/api/2.0/postgres/{parent}/compute-instances", query=query, headers=headers) + if "compute_instances" in json: + for v in json["compute_instances"]: + yield ComputeInstance.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + def list_databases( self, parent: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[Database]: @@ -2857,6 +4593,41 @@ def update_project(self, name: str, project: Project, update_mask: FieldMask) -> operation = Operation.from_dict(res) return UpdateProjectOperation(self, operation) + def update_role(self, name: str, role: Role, update_mask: FieldMask) -> UpdateRoleOperation: + """Update a role for a branch. + + :param name: str + Output only. The full resource path of the role. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + :param role: :class:`Role` + The Postgres Role to update. + + The role's `name` field is used to identify the role to update. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + :param update_mask: FieldMask + The list of fields to update in Postgres Role. If unspecified, all fields will be updated when + possible. + + :returns: :class:`Operation` + """ + + body = role.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.0/postgres/{name}", query=query, body=body, headers=headers) + operation = Operation.from_dict(res) + return UpdateRoleOperation(self, operation) + class CreateBranchOperation: """Long-running operation for create_branch""" @@ -3924,3 +5695,80 @@ def done(self) -> bool: self._operation = operation return operation.done + + +class UpdateRoleOperation: + """Long-running operation for update_role""" + + def __init__(self, impl: PostgresAPI, operation: Operation): + self._impl = impl + self._operation = operation + + def wait(self, opts: Optional[lro.LroOptions] = None) -> Role: + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. + + :param opts: :class:`LroOptions` + Timeout options (default: polls indefinitely) + + :returns: :class:`Role` + """ + + def poll_operation(): + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + if not operation.done: + return None, RetryError.continues("operation still in progress") + + if operation.error: + error_msg = operation.error.message if operation.error.message else "unknown error" + if operation.error.error_code: + error_msg = f"[{operation.error.error_code}] {error_msg}" + return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) + + # Operation completed successfully, unmarshal response. + if operation.response is None: + return None, RetryError.halt(Exception("operation completed but no response available")) + + role = Role.from_dict(operation.response) + + return role, None + + return poll(poll_operation, timeout=opts.timeout if opts is not None else None) + + def name(self) -> str: + """Name returns the name of the long-running operation. The name is assigned + by the server and is unique within the service from which the operation is created. + + :returns: str + """ + return self._operation.name + + def metadata(self) -> RoleOperationMetadata: + """Metadata returns metadata associated with the long-running operation. + If the metadata is not available, the returned metadata is None. + + :returns: :class:`RoleOperationMetadata` or None + """ + if self._operation.metadata is None: + return None + + return RoleOperationMetadata.from_dict(self._operation.metadata) + + def done(self) -> bool: + """Done reports whether the long-running operation has completed. + + :returns: bool + """ + # Refresh the operation state first + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + return operation.done diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index ad03b23d5..3764fe41b 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -2531,6 +2531,7 @@ def create( self, *, aws_region: Optional[str] = None, + azure_workspace_info: Optional[AzureWorkspaceInfo] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, compute_mode: Optional[CustomerFacingComputeMode] = None, @@ -2580,6 +2581,7 @@ def create( [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html :param aws_region: str (optional) + :param azure_workspace_info: :class:`AzureWorkspaceInfo` (optional) :param cloud: str (optional) DEPRECATED: This field is being ignored by the server and will be removed in the future. The cloud name. This field always has the value `gcp`. @@ -2654,6 +2656,8 @@ def create( body = {} if aws_region is not None: body["aws_region"] = aws_region + if azure_workspace_info is not None: + body["azure_workspace_info"] = azure_workspace_info.as_dict() if cloud is not None: body["cloud"] = cloud if cloud_resource_container is not None: @@ -2706,6 +2710,7 @@ def create_and_wait( self, *, aws_region: Optional[str] = None, + azure_workspace_info: Optional[AzureWorkspaceInfo] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, compute_mode: Optional[CustomerFacingComputeMode] = None, @@ -2727,6 +2732,7 @@ def create_and_wait( ) -> Workspace: return self.create( aws_region=aws_region, + azure_workspace_info=azure_workspace_info, cloud=cloud, cloud_resource_container=cloud_resource_container, compute_mode=compute_mode, diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index 717d1fd7f..17bbbf5bb 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -18,47 +18,77 @@ @dataclass class AnomalyDetectionConfig: + custom_check_configurations: Optional[List[CustomCheckConfiguration]] = None + excluded_table_full_names: Optional[List[str]] = None """List of fully qualified table names to exclude from anomaly detection.""" + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + last_run_id: Optional[str] = None """Run id of the last run of the workflow""" latest_run_status: Optional[AnomalyDetectionRunStatus] = None """The status of the last run of the workflow.""" + validity_check_configurations: Optional[List[ValidityCheckConfiguration]] = None + def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.custom_check_configurations: + body["custom_check_configurations"] = [v.as_dict() for v in self.custom_check_configurations] if self.excluded_table_full_names: body["excluded_table_full_names"] = [v for v in self.excluded_table_full_names] + if self.job_type is not None: + body["job_type"] = self.job_type.value if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: body["latest_run_status"] = self.latest_run_status.value + if self.validity_check_configurations: + body["validity_check_configurations"] = [v.as_dict() for v in self.validity_check_configurations] return body def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.custom_check_configurations: + body["custom_check_configurations"] = self.custom_check_configurations if self.excluded_table_full_names: body["excluded_table_full_names"] = self.excluded_table_full_names + if self.job_type is not None: + body["job_type"] = self.job_type if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: body["latest_run_status"] = self.latest_run_status + if self.validity_check_configurations: + body["validity_check_configurations"] = self.validity_check_configurations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" return cls( + custom_check_configurations=_repeated_dict(d, "custom_check_configurations", CustomCheckConfiguration), excluded_table_full_names=d.get("excluded_table_full_names", None), + job_type=_enum(d, "job_type", AnomalyDetectionJobType), last_run_id=d.get("last_run_id", None), latest_run_status=_enum(d, "latest_run_status", AnomalyDetectionRunStatus), + validity_check_configurations=_repeated_dict( + d, "validity_check_configurations", ValidityCheckConfiguration + ), ) +class AnomalyDetectionJobType(Enum): + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + class AnomalyDetectionRunStatus(Enum): """Status of Anomaly Detection Job Run""" @@ -72,6 +102,147 @@ class AnomalyDetectionRunStatus(Enum): ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" +@dataclass +class ColumnMatcher: + column_names: Optional[List[str]] = None + """List of column names (in target tables) to match.""" + + variable_name: Optional[str] = None + """Variable name within a custom sql query that this matcher applies to.""" + + def as_dict(self) -> dict: + """Serializes the ColumnMatcher into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column_names: + body["column_names"] = [v for v in self.column_names] + if self.variable_name is not None: + body["variable_name"] = self.variable_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ColumnMatcher into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_names: + body["column_names"] = self.column_names + if self.variable_name is not None: + body["variable_name"] = self.variable_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ColumnMatcher: + """Deserializes the ColumnMatcher from a dictionary.""" + return cls(column_names=d.get("column_names", None), variable_name=d.get("variable_name", None)) + + +@dataclass +class CustomCheckConfiguration: + scalar_check: Optional[CustomScalarCheck] = None + + def as_dict(self) -> dict: + """Serializes the CustomCheckConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.scalar_check: + body["scalar_check"] = self.scalar_check.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomCheckConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scalar_check: + body["scalar_check"] = self.scalar_check + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomCheckConfiguration: + """Deserializes the CustomCheckConfiguration from a dictionary.""" + return cls(scalar_check=_from_dict(d, "scalar_check", CustomScalarCheck)) + + +@dataclass +class CustomCheckThresholds: + lower_bound: Optional[Threshold] = None + """Lower bound threshold""" + + upper_bound: Optional[Threshold] = None + """Upper bound threshold""" + + def as_dict(self) -> dict: + """Serializes the CustomCheckThresholds into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.lower_bound: + body["lower_bound"] = self.lower_bound.as_dict() + if self.upper_bound: + body["upper_bound"] = self.upper_bound.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomCheckThresholds into a shallow dictionary of its immediate attributes.""" + body = {} + if self.lower_bound: + body["lower_bound"] = self.lower_bound + if self.upper_bound: + body["upper_bound"] = self.upper_bound + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomCheckThresholds: + """Deserializes the CustomCheckThresholds from a dictionary.""" + return cls( + lower_bound=_from_dict(d, "lower_bound", Threshold), upper_bound=_from_dict(d, "upper_bound", Threshold) + ) + + +@dataclass +class CustomScalarCheck: + check_name: Optional[str] = None + """Name of the custom check""" + + column_matchers: Optional[List[ColumnMatcher]] = None + """Column matchers to determine which tables to apply this check to""" + + sql_query: Optional[str] = None + """Templated SQL query for this check""" + + thresholds: Optional[CustomCheckThresholds] = None + """Upper/lower thresholds for the output of the query""" + + def as_dict(self) -> dict: + """Serializes the CustomScalarCheck into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.check_name is not None: + body["check_name"] = self.check_name + if self.column_matchers: + body["column_matchers"] = [v.as_dict() for v in self.column_matchers] + if self.sql_query is not None: + body["sql_query"] = self.sql_query + if self.thresholds: + body["thresholds"] = self.thresholds.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomScalarCheck into a shallow dictionary of its immediate attributes.""" + body = {} + if self.check_name is not None: + body["check_name"] = self.check_name + if self.column_matchers: + body["column_matchers"] = self.column_matchers + if self.sql_query is not None: + body["sql_query"] = self.sql_query + if self.thresholds: + body["thresholds"] = self.thresholds + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomScalarCheck: + """Deserializes the CustomScalarCheck from a dictionary.""" + return cls( + check_name=d.get("check_name", None), + column_matchers=_repeated_dict(d, "column_matchers", ColumnMatcher), + sql_query=d.get("sql_query", None), + thresholds=_from_dict(d, "thresholds", CustomCheckThresholds), + ) + + @dataclass class ListQualityMonitorResponse: next_page_token: Optional[str] = None @@ -232,6 +403,44 @@ def from_dict(cls, d: Dict[str, Any]) -> RangeValidityCheck: ) +@dataclass +class Threshold: + bound_value: Optional[int] = None + """Bound value for this threshold. Meaningful only if threshold_type is MANUAL.""" + + threshold_type: Optional[ThresholdType] = None + + def as_dict(self) -> dict: + """Serializes the Threshold into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bound_value is not None: + body["bound_value"] = self.bound_value + if self.threshold_type is not None: + body["threshold_type"] = self.threshold_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Threshold into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bound_value is not None: + body["bound_value"] = self.bound_value + if self.threshold_type is not None: + body["threshold_type"] = self.threshold_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Threshold: + """Deserializes the Threshold from a dictionary.""" + return cls(bound_value=d.get("bound_value", None), threshold_type=_enum(d, "threshold_type", ThresholdType)) + + +class ThresholdType(Enum): + + THRESHOLD_TYPE_AUTO = "THRESHOLD_TYPE_AUTO" + THRESHOLD_TYPE_MANUAL = "THRESHOLD_TYPE_MANUAL" + THRESHOLD_TYPE_UNBOUNDED = "THRESHOLD_TYPE_UNBOUNDED" + + @dataclass class UniquenessValidityCheck: column_names: Optional[List[str]] = None diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index e3202e87a..9bcd4227d 100644 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -439,6 +439,8 @@ class AmazonBedrockConfig: access keys, see `aws_access_key_id`, `aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.""" + uc_service_credential_name: Optional[str] = None + def as_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -456,6 +458,8 @@ def as_dict(self) -> dict: body["bedrock_provider"] = self.bedrock_provider.value if self.instance_profile_arn is not None: body["instance_profile_arn"] = self.instance_profile_arn + if self.uc_service_credential_name is not None: + body["uc_service_credential_name"] = self.uc_service_credential_name return body def as_shallow_dict(self) -> dict: @@ -475,6 +479,8 @@ def as_shallow_dict(self) -> dict: body["bedrock_provider"] = self.bedrock_provider if self.instance_profile_arn is not None: body["instance_profile_arn"] = self.instance_profile_arn + if self.uc_service_credential_name is not None: + body["uc_service_credential_name"] = self.uc_service_credential_name return body @classmethod @@ -488,6 +494,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AmazonBedrockConfig: aws_secret_access_key_plaintext=d.get("aws_secret_access_key_plaintext", None), bedrock_provider=_enum(d, "bedrock_provider", AmazonBedrockConfigBedrockProvider), instance_profile_arn=d.get("instance_profile_arn", None), + uc_service_credential_name=d.get("uc_service_credential_name", None), ) @@ -4426,6 +4433,7 @@ def http_request( headers: Optional[str] = None, json: Optional[str] = None, params: Optional[str] = None, + sub_domain: Optional[str] = None, ) -> HttpRequestResponse: """Make external services call using the credentials stored in UC Connection. @@ -4442,6 +4450,11 @@ def http_request( The JSON payload to send in the request body. :param params: str (optional) Query parameters for the request. + :param sub_domain: str (optional) + Optional subdomain to prepend to the connection URL's host. If provided, this will be added as a + prefix to the connection URL's host. For example, if the connection URL is + `https://api.example.com/v1` and `sub_domain` is `"custom"`, the resulting URL will be + `https://custom.api.example.com/v1`. :returns: :class:`HttpRequestResponse` """ @@ -4459,6 +4472,8 @@ def http_request( body["params"] = params if path is not None: body["path"] = path + if sub_domain is not None: + body["sub_domain"] = sub_domain headers = { "Accept": "text/plain", "Content-Type": "application/json", diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 02cb59cad..697106a66 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -8,6 +8,7 @@ from typing import Any, Dict, Iterator, List, Optional from databricks.sdk.client_types import HostType +from databricks.sdk.common.types.fieldmask import FieldMask from databricks.sdk.service._internal import (_enum, _from_dict, _repeated_dict, _repeated_enum) @@ -665,6 +666,7 @@ class ComplianceStandard(Enum): """Compliance standard for SHIELD customers. See README.md for how instructions of how to add new standards.""" + ARC_AMPE = "ARC_AMPE" CANADA_PROTECTED_B = "CANADA_PROTECTED_B" CYBER_ESSENTIAL_PLUS = "CYBER_ESSENTIAL_PLUS" FEDRAMP_HIGH = "FEDRAMP_HIGH" @@ -849,6 +851,8 @@ class CreatePrivateEndpointRule: error_message: Optional[str] = None + gcp_endpoint: Optional[GcpEndpoint] = None + group_id: Optional[str] = None """Not used by customer-managed private endpoint services. @@ -874,6 +878,8 @@ def as_dict(self) -> dict: body["endpoint_service"] = self.endpoint_service if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint.as_dict() if self.group_id is not None: body["group_id"] = self.group_id if self.resource_id is not None: @@ -891,6 +897,8 @@ def as_shallow_dict(self) -> dict: body["endpoint_service"] = self.endpoint_service if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint if self.group_id is not None: body["group_id"] = self.group_id if self.resource_id is not None: @@ -906,6 +914,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePrivateEndpointRule: domain_names=d.get("domain_names", None), endpoint_service=d.get("endpoint_service", None), error_message=d.get("error_message", None), + gcp_endpoint=_from_dict(d, "gcp_endpoint", GcpEndpoint), group_id=d.get("group_id", None), resource_id=d.get("resource_id", None), resource_names=d.get("resource_names", None), @@ -2897,6 +2906,41 @@ def from_dict(cls, d: Dict[str, Any]) -> FetchIpAccessListResponse: return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) +@dataclass +class GcpEndpoint: + psc_endpoint_uri: Optional[str] = None + """Output only. The URI of the created PSC endpoint.""" + + service_attachment: Optional[str] = None + """The full url of the target service attachment. Example: + projects/my-gcp-project/regions/us-east4/serviceAttachments/my-service-attachment""" + + def as_dict(self) -> dict: + """Serializes the GcpEndpoint into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.psc_endpoint_uri is not None: + body["psc_endpoint_uri"] = self.psc_endpoint_uri + if self.service_attachment is not None: + body["service_attachment"] = self.service_attachment + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GcpEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.psc_endpoint_uri is not None: + body["psc_endpoint_uri"] = self.psc_endpoint_uri + if self.service_attachment is not None: + body["service_attachment"] = self.service_attachment + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GcpEndpoint: + """Deserializes the GcpEndpoint from a dictionary.""" + return cls( + psc_endpoint_uri=d.get("psc_endpoint_uri", None), service_attachment=d.get("service_attachment", None) + ) + + @dataclass class GenericWebhookConfig: password: Optional[str] = None @@ -3952,6 +3996,8 @@ class NccEgressDefaultRules: azure_service_endpoint_rule: Optional[NccAzureServiceEndpointRule] = None + gcp_project_id_rule: Optional[NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule] = None + def as_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a dictionary suitable for use as a JSON request body.""" body = {} @@ -3959,6 +4005,8 @@ def as_dict(self) -> dict: body["aws_stable_ip_rule"] = self.aws_stable_ip_rule.as_dict() if self.azure_service_endpoint_rule: body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule.as_dict() + if self.gcp_project_id_rule: + body["gcp_project_id_rule"] = self.gcp_project_id_rule.as_dict() return body def as_shallow_dict(self) -> dict: @@ -3968,6 +4016,8 @@ def as_shallow_dict(self) -> dict: body["aws_stable_ip_rule"] = self.aws_stable_ip_rule if self.azure_service_endpoint_rule: body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule + if self.gcp_project_id_rule: + body["gcp_project_id_rule"] = self.gcp_project_id_rule return body @classmethod @@ -3976,6 +4026,9 @@ def from_dict(cls, d: Dict[str, Any]) -> NccEgressDefaultRules: return cls( aws_stable_ip_rule=_from_dict(d, "aws_stable_ip_rule", NccAwsStableIpRule), azure_service_endpoint_rule=_from_dict(d, "azure_service_endpoint_rule", NccAzureServiceEndpointRule), + gcp_project_id_rule=_from_dict( + d, "gcp_project_id_rule", NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule + ), ) @@ -4068,6 +4121,8 @@ class NccPrivateEndpointRule: error_message: Optional[str] = None + gcp_endpoint: Optional[GcpEndpoint] = None + group_id: Optional[str] = None """Not used by customer-managed private endpoint services. @@ -4120,6 +4175,8 @@ def as_dict(self) -> dict: body["endpoint_service"] = self.endpoint_service if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint.as_dict() if self.group_id is not None: body["group_id"] = self.group_id if self.network_connectivity_config_id is not None: @@ -4159,6 +4216,8 @@ def as_shallow_dict(self) -> dict: body["endpoint_service"] = self.endpoint_service if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint if self.group_id is not None: body["group_id"] = self.group_id if self.network_connectivity_config_id is not None: @@ -4189,6 +4248,7 @@ def from_dict(cls, d: Dict[str, Any]) -> NccPrivateEndpointRule: endpoint_name=d.get("endpoint_name", None), endpoint_service=d.get("endpoint_service", None), error_message=d.get("error_message", None), + gcp_endpoint=_from_dict(d, "gcp_endpoint", GcpEndpoint), group_id=d.get("group_id", None), network_connectivity_config_id=d.get("network_connectivity_config_id", None), resource_id=d.get("resource_id", None), @@ -4210,6 +4270,32 @@ class NccPrivateEndpointRulePrivateLinkConnectionState(Enum): REJECTED = "REJECTED" +@dataclass +class NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule: + project_ids: Optional[List[str]] = None + """A list of Databricks internal project IDs from where network access originates for serverless + DBSQL, This list is stable and will not change once the NCC object is created.""" + + def as_dict(self) -> dict: + """Serializes the NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.project_ids: + body["project_ids"] = [v for v in self.project_ids] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.project_ids: + body["project_ids"] = self.project_ids + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule: + """Deserializes the NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule from a dictionary.""" + return cls(project_ids=d.get("project_ids", None)) + + @dataclass class NetworkConnectivityConfiguration: """Properties of the new network connectivity configuration.""" @@ -4531,6 +4617,12 @@ class PublicTokenInfo: expiry_time: Optional[int] = None """Server time (in epoch milliseconds) when the token will expire, or -1 if not applicable.""" + last_accessed_time: Optional[int] = None + """Server time (in epoch milliseconds) when the token was accessed most recently.""" + + scopes: Optional[List[str]] = None + """Scope of the token was created with, if applicable.""" + token_id: Optional[str] = None """The ID of this token.""" @@ -4543,6 +4635,10 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.expiry_time is not None: body["expiry_time"] = self.expiry_time + if self.last_accessed_time is not None: + body["last_accessed_time"] = self.last_accessed_time + if self.scopes: + body["scopes"] = [v for v in self.scopes] if self.token_id is not None: body["token_id"] = self.token_id return body @@ -4556,6 +4652,10 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.expiry_time is not None: body["expiry_time"] = self.expiry_time + if self.last_accessed_time is not None: + body["last_accessed_time"] = self.last_accessed_time + if self.scopes: + body["scopes"] = self.scopes if self.token_id is not None: body["token_id"] = self.token_id return body @@ -4567,6 +4667,8 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: comment=d.get("comment", None), creation_time=d.get("creation_time", None), expiry_time=d.get("expiry_time", None), + last_accessed_time=d.get("last_accessed_time", None), + scopes=d.get("scopes", None), token_id=d.get("token_id", None), ) @@ -4943,6 +5045,9 @@ class TokenInfo: owner_id: Optional[int] = None """User ID of the user that owns the token.""" + scopes: Optional[List[str]] = None + """Scope of the token was created with, if applicable.""" + token_id: Optional[str] = None """ID of the token.""" @@ -4966,6 +5071,8 @@ def as_dict(self) -> dict: body["last_used_day"] = self.last_used_day if self.owner_id is not None: body["owner_id"] = self.owner_id + if self.scopes: + body["scopes"] = [v for v in self.scopes] if self.token_id is not None: body["token_id"] = self.token_id if self.workspace_id is not None: @@ -4989,6 +5096,8 @@ def as_shallow_dict(self) -> dict: body["last_used_day"] = self.last_used_day if self.owner_id is not None: body["owner_id"] = self.owner_id + if self.scopes: + body["scopes"] = self.scopes if self.token_id is not None: body["token_id"] = self.token_id if self.workspace_id is not None: @@ -5006,6 +5115,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: expiry_time=d.get("expiry_time", None), last_used_day=d.get("last_used_day", None), owner_id=d.get("owner_id", None), + scopes=d.get("scopes", None), token_id=d.get("token_id", None), workspace_id=d.get("workspace_id", None), ) @@ -5160,6 +5270,8 @@ class UpdatePrivateEndpointRule: error_message: Optional[str] = None + gcp_endpoint: Optional[GcpEndpoint] = None + resource_names: Optional[List[str]] = None """Only used by private endpoints towards AWS S3 service. @@ -5176,6 +5288,8 @@ def as_dict(self) -> dict: body["enabled"] = self.enabled if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint.as_dict() if self.resource_names: body["resource_names"] = [v for v in self.resource_names] return body @@ -5189,6 +5303,8 @@ def as_shallow_dict(self) -> dict: body["enabled"] = self.enabled if self.error_message is not None: body["error_message"] = self.error_message + if self.gcp_endpoint: + body["gcp_endpoint"] = self.gcp_endpoint if self.resource_names: body["resource_names"] = self.resource_names return body @@ -5200,10 +5316,29 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: domain_names=d.get("domain_names", None), enabled=d.get("enabled", None), error_message=d.get("error_message", None), + gcp_endpoint=_from_dict(d, "gcp_endpoint", GcpEndpoint), resource_names=d.get("resource_names", None), ) +@dataclass +class UpdateTokenResponse: + def as_dict(self) -> dict: + """Serializes the UpdateTokenResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateTokenResponse: + """Deserializes the UpdateTokenResponse from a dictionary.""" + return cls() + + WorkspaceConf = Dict[str, str] @@ -7887,12 +8022,13 @@ def list_network_connectivity_configurations( query["page_token"] = json["next_page_token"] def list_private_endpoint_rules( - self, network_connectivity_config_id: str, *, page_token: Optional[str] = None + self, network_connectivity_config_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[NccPrivateEndpointRule]: """Gets an array of private endpoint rules. :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. + :param page_size: int (optional) :param page_token: str (optional) Pagination token to go to next page based on previous query. @@ -7900,6 +8036,8 @@ def list_private_endpoint_rules( """ query = {} + if page_size is not None: + query["page_size"] = page_size if page_token is not None: query["page_token"] = page_token headers = { @@ -8678,7 +8816,12 @@ def __init__(self, api_client): self._api = api_client def create_obo_token( - self, application_id: str, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None + self, + application_id: str, + *, + comment: Optional[str] = None, + lifetime_seconds: Optional[int] = None, + scopes: Optional[List[str]] = None, ) -> CreateOboTokenResponse: """Creates a token on behalf of a service principal. @@ -8688,6 +8831,7 @@ def create_obo_token( Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. + :param scopes: List[str] (optional) :returns: :class:`CreateOboTokenResponse` """ @@ -8699,6 +8843,8 @@ def create_obo_token( body["comment"] = comment if lifetime_seconds is not None: body["lifetime_seconds"] = lifetime_seconds + if scopes is not None: + body["scopes"] = [v for v in scopes] headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -8873,7 +9019,13 @@ class TokensAPI: def __init__(self, api_client): self._api = api_client - def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: + def create( + self, + *, + comment: Optional[str] = None, + lifetime_seconds: Optional[int] = None, + scopes: Optional[List[str]] = None, + ) -> CreateTokenResponse: """Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. @@ -8884,6 +9036,8 @@ def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[in The lifetime of the token, in seconds. If the lifetime is not specified, this token remains valid for 2 years. + :param scopes: List[str] (optional) + Optional scopes of the token. :returns: :class:`CreateTokenResponse` """ @@ -8893,6 +9047,8 @@ def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[in body["comment"] = comment if lifetime_seconds is not None: body["lifetime_seconds"] = lifetime_seconds + if scopes is not None: + body["scopes"] = [v for v in scopes] headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -8949,6 +9105,48 @@ def list(self) -> Iterator[PublicTokenInfo]: parsed = ListPublicTokensResponse.from_dict(json).token_infos return parsed if parsed is not None else [] + def update(self, token_id: str, token: PublicTokenInfo, update_mask: FieldMask) -> UpdateTokenResponse: + """Updates the comment or scopes of a token. + + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. + + :param token_id: str + The SHA-256 hash of the token to be updated. + :param token: :class:`PublicTokenInfo` + :param update_mask: FieldMask + A list of field name under PublicTokenInfo, For example in request use {"update_mask": + "comment,scopes"} + + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`UpdateTokenResponse` + """ + + body = {} + if token is not None: + body["token"] = token.as_dict() + if update_mask is not None: + body["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.0/token/{token_id}", body=body, headers=headers) + return UpdateTokenResponse.from_dict(res) + class WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index a0226af28..77f442a4f 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1125,6 +1125,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[str]] = None """The set of privileges to remove.""" @@ -1135,6 +1144,8 @@ def as_dict(self) -> dict: body["add"] = [v for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v for v in self.remove] return body @@ -1146,6 +1157,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -1153,7 +1166,12 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) + return cls( + add=d.get("add", None), + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + remove=d.get("remove", None), + ) class Privilege(Enum): @@ -1211,6 +1229,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -1219,6 +1241,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -1228,6 +1252,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -1235,7 +1261,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -1738,6 +1768,33 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) +@dataclass +class ReplicationConfig: + """Configuration for share replication.""" + + enabled: Optional[bool] = None + """Whether replication is enabled for the share.""" + + def as_dict(self) -> dict: + """Serializes the ReplicationConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.enabled is not None: + body["enabled"] = self.enabled + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ReplicationConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: + body["enabled"] = self.enabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ReplicationConfig: + """Deserializes the ReplicationConfig from a dictionary.""" + return cls(enabled=d.get("enabled", None)) + + @dataclass class RetrieveTokenResponse: bearer_token: Optional[str] = None @@ -1866,6 +1923,13 @@ class ShareInfo: owner: Optional[str] = None """Username of current owner of share.""" + replication_config: Optional[ReplicationConfig] = None + """Configuration for share replication.""" + + serverless_budget_policy_id: Optional[str] = None + """Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN]""" + storage_location: Optional[str] = None """Storage Location URL (full path) for the share.""" @@ -1893,6 +1957,10 @@ def as_dict(self) -> dict: body["objects"] = [v.as_dict() for v in self.objects] if self.owner is not None: body["owner"] = self.owner + if self.replication_config: + body["replication_config"] = self.replication_config.as_dict() + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1918,6 +1986,10 @@ def as_shallow_dict(self) -> dict: body["objects"] = self.objects if self.owner is not None: body["owner"] = self.owner + if self.replication_config: + body["replication_config"] = self.replication_config + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1938,6 +2010,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: name=d.get("name", None), objects=_repeated_dict(d, "objects", SharedDataObject), owner=d.get("owner", None), + replication_config=_from_dict(d, "replication_config", ReplicationConfig), + serverless_budget_policy_id=d.get("serverless_budget_policy_id", None), storage_location=d.get("storage_location", None), storage_root=d.get("storage_root", None), updated_at=d.get("updated_at", None), @@ -2236,12 +2310,19 @@ class SharedSecurableKind(Enum): @dataclass class Table: + access_modes: Optional[List[str]] = None + """The access modes supported for this table (e.g., "url", "dir"). Used for open sharing to + indicate how the table can be accessed.""" + comment: Optional[str] = None """The comment of the table.""" id: Optional[str] = None """The id of the table.""" + location: Optional[str] = None + """The cloud storage location of the table for open sharing.""" + materialization_namespace: Optional[str] = None """The catalog and schema of the materialized table""" @@ -2266,10 +2347,14 @@ class Table: def as_dict(self) -> dict: """Serializes the Table into a dictionary suitable for use as a JSON request body.""" body = {} + if self.access_modes: + body["accessModes"] = [v for v in self.access_modes] if self.comment is not None: body["comment"] = self.comment if self.id is not None: body["id"] = self.id + if self.location is not None: + body["location"] = self.location if self.materialization_namespace is not None: body["materialization_namespace"] = self.materialization_namespace if self.materialized_table_name is not None: @@ -2289,10 +2374,14 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Table into a shallow dictionary of its immediate attributes.""" body = {} + if self.access_modes: + body["accessModes"] = self.access_modes if self.comment is not None: body["comment"] = self.comment if self.id is not None: body["id"] = self.id + if self.location is not None: + body["location"] = self.location if self.materialization_namespace is not None: body["materialization_namespace"] = self.materialization_namespace if self.materialized_table_name is not None: @@ -2313,8 +2402,10 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Table: """Deserializes the Table from a dictionary.""" return cls( + access_modes=d.get("accessModes", None), comment=d.get("comment", None), id=d.get("id", None), + location=d.get("location", None), materialization_namespace=d.get("materialization_namespace", None), materialized_table_name=d.get("materialized_table_name", None), name=d.get("name", None), @@ -2935,6 +3026,49 @@ def list( return query["page_token"] = json["next_page_token"] + def update( + self, recipient_name: str, name: str, policy: FederationPolicy, *, update_mask: Optional[str] = None + ) -> FederationPolicy: + """Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the + recipient. + + :param recipient_name: str + Name of the recipient. This is the name of the recipient for which the policy is being updated. + :param name: str + Name of the policy. This is the name of the current name of the policy. + :param policy: :class:`FederationPolicy` + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'comment,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` + """ + + body = policy.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "PATCH", + f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", + query=query, + body=body, + headers=headers, + ) + return FederationPolicy.from_dict(res) + class RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you @@ -3269,7 +3403,15 @@ class SharesAPI: def __init__(self, api_client): self._api = api_client - def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: + def create( + self, + name: str, + *, + comment: Optional[str] = None, + replication_config: Optional[ReplicationConfig] = None, + serverless_budget_policy_id: Optional[str] = None, + storage_root: Optional[str] = None, + ) -> ShareInfo: """Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. @@ -3277,6 +3419,11 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti Name of the share. :param comment: str (optional) User-provided free-form text description. + :param replication_config: :class:`ReplicationConfig` (optional) + Configuration for share replication. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. @@ -3288,6 +3435,10 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti body["comment"] = comment if name is not None: body["name"] = name + if replication_config is not None: + body["replication_config"] = replication_config.as_dict() + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root headers = { @@ -3435,6 +3586,7 @@ def update( comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + serverless_budget_policy_id: Optional[str] = None, storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None, ) -> ShareInfo: @@ -3462,6 +3614,9 @@ def update( New name for the share. :param owner: str (optional) Username of current owner of share. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) @@ -3477,6 +3632,8 @@ def update( body["new_name"] = new_name if owner is not None: body["owner"] = owner + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root if updates is not None: diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 3483eecb4..6ec872588 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -666,6 +666,9 @@ class AlertV2: custom_summary: Optional[str] = None """Custom summary for the alert. support mustache template.""" + effective_parent_path: Optional[str] = None + """The actual workspace path of the folder containing the alert. This is an output-only field.""" + effective_run_as: Optional[AlertV2RunAs] = None """The actual identity that will be used to execute the alert. This is an output-only field that shows the resolved run-as identity after applying permissions and defaults.""" @@ -710,6 +713,8 @@ def as_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name + if self.effective_parent_path is not None: + body["effective_parent_path"] = self.effective_parent_path if self.effective_run_as: body["effective_run_as"] = self.effective_run_as.as_dict() if self.evaluation: @@ -747,6 +752,8 @@ def as_shallow_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name + if self.effective_parent_path is not None: + body["effective_parent_path"] = self.effective_parent_path if self.effective_run_as: body["effective_run_as"] = self.effective_run_as if self.evaluation: @@ -781,6 +788,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2: custom_description=d.get("custom_description", None), custom_summary=d.get("custom_summary", None), display_name=d.get("display_name", None), + effective_parent_path=d.get("effective_parent_path", None), effective_run_as=_from_dict(d, "effective_run_as", AlertV2RunAs), evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), id=d.get("id", None), @@ -1683,6 +1691,7 @@ class CreateWarehouseRequestWarehouseType(Enum): CLASSIC = "CLASSIC" PRO = "PRO" + REYDEN = "REYDEN" TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" @@ -1724,12 +1733,17 @@ class CronSchedule: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.""" + effective_pause_status: Optional[SchedulePauseStatus] = None + """The actual pause status of the schedule. This is an output-only field.""" + pause_status: Optional[SchedulePauseStatus] = None """Indicate whether this schedule is paused or not.""" def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} + if self.effective_pause_status is not None: + body["effective_pause_status"] = self.effective_pause_status.value if self.pause_status is not None: body["pause_status"] = self.pause_status.value if self.quartz_cron_schedule is not None: @@ -1741,6 +1755,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} + if self.effective_pause_status is not None: + body["effective_pause_status"] = self.effective_pause_status if self.pause_status is not None: body["pause_status"] = self.pause_status if self.quartz_cron_schedule is not None: @@ -1753,6 +1769,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" return cls( + effective_pause_status=_enum(d, "effective_pause_status", SchedulePauseStatus), pause_status=_enum(d, "pause_status", SchedulePauseStatus), quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None), @@ -2309,6 +2326,7 @@ class EditWarehouseRequestWarehouseType(Enum): CLASSIC = "CLASSIC" PRO = "PRO" + REYDEN = "REYDEN" TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" @@ -2653,6 +2671,7 @@ class EndpointInfoWarehouseType(Enum): CLASSIC = "CLASSIC" PRO = "PRO" + REYDEN = "REYDEN" TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" @@ -3270,6 +3289,7 @@ class GetWarehouseResponseWarehouseType(Enum): CLASSIC = "CLASSIC" PRO = "PRO" + REYDEN = "REYDEN" TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" @@ -5592,6 +5612,7 @@ class QueryStatementType(Enum): ALTER = "ALTER" ANALYZE = "ANALYZE" + CALL = "CALL" COPY = "COPY" CREATE = "CREATE" DELETE = "DELETE" @@ -6463,6 +6484,7 @@ class TerminationReasonCode(Enum): DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" DRIVER_EVICTION = "DRIVER_EVICTION" DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" @@ -6543,6 +6565,8 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" + NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -6556,6 +6580,7 @@ class TerminationReasonCode(Enum): SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" @@ -7340,6 +7365,7 @@ class WarehouseTypePairWarehouseType(Enum): CLASSIC = "CLASSIC" PRO = "PRO" + REYDEN = "REYDEN" TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" diff --git a/databricks/sdk/service/supervisoragents.py b/databricks/sdk/service/supervisoragents.py new file mode 100755 index 000000000..d61025e27 --- /dev/null +++ b/databricks/sdk/service/supervisoragents.py @@ -0,0 +1,462 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any, Dict, Iterator, List, Optional + +from google.protobuf.timestamp_pb2 import Timestamp + +from databricks.sdk.client_types import HostType +from databricks.sdk.common.types.fieldmask import FieldMask +from databricks.sdk.service._internal import _repeated_dict, _timestamp + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class ListToolsResponse: + next_page_token: Optional[str] = None + + tools: Optional[List[Tool]] = None + + def as_dict(self) -> dict: + """Serializes the ListToolsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tools: + body["tools"] = [v.as_dict() for v in self.tools] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListToolsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tools: + body["tools"] = self.tools + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListToolsResponse: + """Deserializes the ListToolsResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), tools=_repeated_dict(d, "tools", Tool)) + + +@dataclass +class SupervisorAgent: + display_name: str + """The display name of the Supervisor Agent, unique at workspace level.""" + + description: str + + create_time: Optional[Timestamp] = None + """Create timestamp.""" + + creator: Optional[str] = None + """The creator of the Supervisor Agent.""" + + endpoint_name: Optional[str] = None + """The name of the supervisor agent endpoint.""" + + experiment_id: Optional[str] = None + """The MLflow experiment ID.""" + + id: Optional[str] = None + """The universally unique identifier (UUID) of the Supervisor Agent.""" + + instructions: Optional[str] = None + """Optional natural-language routing instructions for the supervisor agent.""" + + name: Optional[str] = None + """The resource name of the Supervisor Agent. Format: supervisor-agents/{supervisor_agent_id}""" + + tools: Optional[List[Tool]] = None + + def as_dict(self) -> dict: + """Serializes the SupervisorAgent into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time.ToJsonString() + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.tools: + body["tools"] = [v.as_dict() for v in self.tools] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SupervisorAgent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.tools: + body["tools"] = self.tools + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SupervisorAgent: + """Deserializes the SupervisorAgent from a dictionary.""" + return cls( + create_time=_timestamp(d, "create_time"), + creator=d.get("creator", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + endpoint_name=d.get("endpoint_name", None), + experiment_id=d.get("experiment_id", None), + id=d.get("id", None), + instructions=d.get("instructions", None), + name=d.get("name", None), + tools=_repeated_dict(d, "tools", Tool), + ) + + +@dataclass +class SupervisorAgentListResponse: + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + supervisor_agents: Optional[List[SupervisorAgent]] = None + + def as_dict(self) -> dict: + """Serializes the SupervisorAgentListResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.supervisor_agents: + body["supervisor_agents"] = [v.as_dict() for v in self.supervisor_agents] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SupervisorAgentListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.supervisor_agents: + body["supervisor_agents"] = self.supervisor_agents + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SupervisorAgentListResponse: + """Deserializes the SupervisorAgentListResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + supervisor_agents=_repeated_dict(d, "supervisor_agents", SupervisorAgent), + ) + + +@dataclass +class Tool: + """Tool is a Sub-resource of SupervisorAgent""" + + type: str + + id: str + + endpoint_name: str + """The name of the serving endpoint.""" + + name: str + + connection_name: str + + description: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the Tool into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.description is not None: + body["description"] = self.description + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Tool into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.description is not None: + body["description"] = self.description + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Tool: + """Deserializes the Tool from a dictionary.""" + return cls( + connection_name=d.get("connection_name", None), + description=d.get("description", None), + endpoint_name=d.get("endpoint_name", None), + id=d.get("id", None), + name=d.get("name", None), + type=d.get("type", None), + ) + + +class SupervisorAgentsAPI: + """Manage Supervisor Agents and related resources.""" + + def __init__(self, api_client): + self._api = api_client + + def create_supervisor_agent(self, supervisor_agent: SupervisorAgent) -> SupervisorAgent: + + body = supervisor_agent.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", "/api/2.1/supervisor-agents", body=body, headers=headers) + return SupervisorAgent.from_dict(res) + + def create_tool(self, parent: str, tool: Tool) -> Tool: + """Creates a Tool under a Supervisor Agent. + + :param parent: str + Parent resource where this tool will be created. Format: supervisor-agents/{supervisor_agent_id} + :param tool: :class:`Tool` + + :returns: :class:`Tool` + """ + + body = tool.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("POST", f"/api/2.1/{parent}/tools", body=body, headers=headers) + return Tool.from_dict(res) + + def delete_supervisor_agent(self, name: str): + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.1/{name}", headers=headers) + + def delete_tool(self, name: str): + """Deletes a Tool. + + :param name: str + The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + + + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + self._api.do("DELETE", f"/api/2.1/{name}", headers=headers) + + def get_supervisor_agent(self, name: str) -> SupervisorAgent: + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.1/{name}", headers=headers) + return SupervisorAgent.from_dict(res) + + def get_tool(self, name: str) -> Tool: + """Gets a Tool. + + :param name: str + The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + + :returns: :class:`Tool` + """ + + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("GET", f"/api/2.1/{name}", headers=headers) + return Tool.from_dict(res) + + def list_supervisor_agents( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SupervisorAgent]: + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", "/api/2.1/supervisor-agents", query=query, headers=headers) + if "supervisor_agents" in json: + for v in json["supervisor_agents"]: + yield SupervisorAgent.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_tools( + self, parent: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Tool]: + """Lists Tools under a Supervisor Agent. + + :param parent: str + Parent resource to list from. Format: supervisor-agents/{supervisor_agent_id} + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Tool` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + while True: + json = self._api.do("GET", f"/api/2.1/{parent}/tools", query=query, headers=headers) + if "tools" in json: + for v in json["tools"]: + yield Tool.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_supervisor_agent( + self, name: str, supervisor_agent: SupervisorAgent, update_mask: FieldMask + ) -> SupervisorAgent: + + body = supervisor_agent.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.1/{name}", query=query, body=body, headers=headers) + return SupervisorAgent.from_dict(res) + + def update_tool(self, name: str, tool: Tool, update_mask: FieldMask) -> Tool: + """Updates a Tool. + + :param name: str + :param tool: :class:`Tool` + The Tool to update. + :param update_mask: FieldMask + Field mask for fields to be updated. + + :returns: :class:`Tool` + """ + + body = tool.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask.ToJsonString() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do("PATCH", f"/api/2.1/{name}", query=query, body=body, headers=headers) + return Tool.from_dict(res) diff --git a/databricks/sdk/service/tags.py b/databricks/sdk/service/tags.py index b6a6684fe..b3ec0846c 100755 --- a/databricks/sdk/service/tags.py +++ b/databricks/sdk/service/tags.py @@ -7,7 +7,7 @@ from typing import Any, Dict, Iterator, List, Optional from databricks.sdk.client_types import HostType -from databricks.sdk.service._internal import _repeated_dict +from databricks.sdk.service._internal import _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -15,6 +15,62 @@ # all definitions in this file are in alphabetical order +@dataclass +class ConflictResolutionPolicy: + """Policy that determines how to resolve conflicts when multiple upstream sources have different + tag values.""" + + default_value_override: Optional[DefaultValueOverridePolicy] = None + """Uses a specified default value to override when conflicts happen.""" + + def as_dict(self) -> dict: + """Serializes the ConflictResolutionPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_value_override: + body["default_value_override"] = self.default_value_override.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConflictResolutionPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_value_override: + body["default_value_override"] = self.default_value_override + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConflictResolutionPolicy: + """Deserializes the ConflictResolutionPolicy from a dictionary.""" + return cls(default_value_override=_from_dict(d, "default_value_override", DefaultValueOverridePolicy)) + + +@dataclass +class DefaultValueOverridePolicy: + """Policy that specifies a default value to use when resolving tag conflicts during propagation.""" + + default_value: Optional[str] = None + """The tag value to apply when conflicts are detected. This value must be one of the allowed values + defined in the tag policy.""" + + def as_dict(self) -> dict: + """Serializes the DefaultValueOverridePolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_value is not None: + body["default_value"] = self.default_value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultValueOverridePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_value is not None: + body["default_value"] = self.default_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultValueOverridePolicy: + """Deserializes the DefaultValueOverridePolicy from a dictionary.""" + return cls(default_value=d.get("default_value", None)) + + @dataclass class ListTagAssignmentsResponse: next_page_token: Optional[str] = None @@ -81,11 +137,49 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTagPoliciesResponse: ) +@dataclass +class PropagationConfig: + """Configuration that controls how tags are automatically propagated through data lineage.""" + + conflict_resolution: Optional[ConflictResolutionPolicy] = None + """Policy that determines how to resolve conflicts when multiple upstream sources have different + tag values.""" + + enabled: Optional[bool] = None + """Determines whether this tag should automatically propagate through lineage.""" + + def as_dict(self) -> dict: + """Serializes the PropagationConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.conflict_resolution: + body["conflict_resolution"] = self.conflict_resolution.as_dict() + if self.enabled is not None: + body["enabled"] = self.enabled + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PropagationConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.conflict_resolution: + body["conflict_resolution"] = self.conflict_resolution + if self.enabled is not None: + body["enabled"] = self.enabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PropagationConfig: + """Deserializes the PropagationConfig from a dictionary.""" + return cls( + conflict_resolution=_from_dict(d, "conflict_resolution", ConflictResolutionPolicy), + enabled=d.get("enabled", None), + ) + + @dataclass class TagAssignment: entity_type: str """The type of entity to which the tag is assigned. Allowed values are apps, dashboards, - geniespaces""" + geniespaces, notebooks""" entity_id: str """The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app @@ -145,6 +239,9 @@ class TagPolicy: id: Optional[str] = None + propagation_config: Optional[PropagationConfig] = None + """Configuration that controls how tags are automatically propagated through data lineage.""" + update_time: Optional[str] = None """Timestamp when the tag policy was last updated""" @@ -159,6 +256,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.id is not None: body["id"] = self.id + if self.propagation_config: + body["propagation_config"] = self.propagation_config.as_dict() if self.tag_key is not None: body["tag_key"] = self.tag_key if self.update_time is not None: @@ -176,6 +275,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.id is not None: body["id"] = self.id + if self.propagation_config: + body["propagation_config"] = self.propagation_config if self.tag_key is not None: body["tag_key"] = self.tag_key if self.update_time is not None: @@ -191,6 +292,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TagPolicy: create_time=d.get("create_time", None), description=d.get("description", None), id=d.get("id", None), + propagation_config=_from_dict(d, "propagation_config", PropagationConfig), tag_key=d.get("tag_key", None), update_time=d.get("update_time", None), values=_repeated_dict(d, "values", Value), @@ -418,7 +520,8 @@ def delete_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str): """Delete a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str @@ -443,7 +546,8 @@ def get_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str) -> """Get a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str @@ -471,7 +575,8 @@ def list_tag_assignments( """List the tag assignments for an entity :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param page_size: int (optional) @@ -512,7 +617,8 @@ def update_tag_assignment( """Update a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index a7a11add2..18c67d2f2 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -22,6 +22,51 @@ # all definitions in this file are in alphabetical order +@dataclass +class AdjustedThroughputRequest: + """Adjusted throughput request parameters""" + + concurrency: Optional[float] = None + """Adjusted concurrency (total CPU) for the endpoint""" + + maximum_concurrency_allowed: Optional[float] = None + """Adjusted maximum concurrency allowed for the endpoint""" + + minimal_concurrency_allowed: Optional[float] = None + """Adjusted minimum concurrency allowed for the endpoint""" + + def as_dict(self) -> dict: + """Serializes the AdjustedThroughputRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.maximum_concurrency_allowed is not None: + body["maximum_concurrency_allowed"] = self.maximum_concurrency_allowed + if self.minimal_concurrency_allowed is not None: + body["minimal_concurrency_allowed"] = self.minimal_concurrency_allowed + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AdjustedThroughputRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.maximum_concurrency_allowed is not None: + body["maximum_concurrency_allowed"] = self.maximum_concurrency_allowed + if self.minimal_concurrency_allowed is not None: + body["minimal_concurrency_allowed"] = self.minimal_concurrency_allowed + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AdjustedThroughputRequest: + """Deserializes the AdjustedThroughputRequest from a dictionary.""" + return cls( + concurrency=d.get("concurrency", None), + maximum_concurrency_allowed=d.get("maximum_concurrency_allowed", None), + minimal_concurrency_allowed=d.get("minimal_concurrency_allowed", None), + ) + + @dataclass class ColumnInfo: name: Optional[str] = None @@ -202,6 +247,11 @@ class DeltaSyncVectorIndexSpecRequest: columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -226,6 +276,10 @@ def as_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = [v for v in self.columns_to_sync] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -243,6 +297,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = self.columns_to_sync + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -260,6 +318,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" return cls( columns_to_sync=d.get("columns_to_sync", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -270,6 +330,11 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: @dataclass class DeltaSyncVectorIndexSpecResponse: + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -295,6 +360,10 @@ class DeltaSyncVectorIndexSpecResponse: def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -312,6 +381,10 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -330,6 +403,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" return cls( + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -497,6 +572,9 @@ class EndpointInfo: scaling_info: Optional[EndpointScalingInfo] = None """Scaling information for the endpoint""" + throughput_info: Optional[EndpointThroughputInfo] = None + """Throughput information for the endpoint""" + def as_dict(self) -> dict: """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} @@ -524,6 +602,8 @@ def as_dict(self) -> dict: body["num_indexes"] = self.num_indexes if self.scaling_info: body["scaling_info"] = self.scaling_info.as_dict() + if self.throughput_info: + body["throughput_info"] = self.throughput_info.as_dict() return body def as_shallow_dict(self) -> dict: @@ -553,6 +633,8 @@ def as_shallow_dict(self) -> dict: body["num_indexes"] = self.num_indexes if self.scaling_info: body["scaling_info"] = self.scaling_info + if self.throughput_info: + body["throughput_info"] = self.throughput_info return body @classmethod @@ -571,6 +653,7 @@ def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: name=d.get("name", None), num_indexes=d.get("num_indexes", None), scaling_info=_from_dict(d, "scaling_info", EndpointScalingInfo), + throughput_info=_from_dict(d, "throughput_info", EndpointThroughputInfo), ) @@ -651,6 +734,99 @@ class EndpointStatusState(Enum): YELLOW_STATE = "YELLOW_STATE" +@dataclass +class EndpointThroughputInfo: + """Throughput information for an endpoint""" + + change_request_message: Optional[str] = None + """Additional information about the throughput change request""" + + change_request_state: Optional[ThroughputChangeRequestState] = None + """The state of the most recent throughput change request""" + + current_concurrency: Optional[float] = None + """The current concurrency (total CPU) allocated to the endpoint""" + + current_concurrency_utilization_percentage: Optional[float] = None + """The current utilization of concurrency as a percentage (0-100)""" + + current_num_replicas: Optional[int] = None + """The current number of replicas allocated to the endpoint""" + + maximum_concurrency_allowed: Optional[float] = None + """The maximum concurrency allowed for this endpoint""" + + minimal_concurrency_allowed: Optional[float] = None + """The minimum concurrency allowed for this endpoint""" + + requested_concurrency: Optional[float] = None + """The requested concurrency (total CPU) for the endpoint""" + + requested_num_replicas: Optional[int] = None + """The requested number of replicas for the endpoint""" + + def as_dict(self) -> dict: + """Serializes the EndpointThroughputInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.change_request_message is not None: + body["change_request_message"] = self.change_request_message + if self.change_request_state is not None: + body["change_request_state"] = self.change_request_state.value + if self.current_concurrency is not None: + body["current_concurrency"] = self.current_concurrency + if self.current_concurrency_utilization_percentage is not None: + body["current_concurrency_utilization_percentage"] = self.current_concurrency_utilization_percentage + if self.current_num_replicas is not None: + body["current_num_replicas"] = self.current_num_replicas + if self.maximum_concurrency_allowed is not None: + body["maximum_concurrency_allowed"] = self.maximum_concurrency_allowed + if self.minimal_concurrency_allowed is not None: + body["minimal_concurrency_allowed"] = self.minimal_concurrency_allowed + if self.requested_concurrency is not None: + body["requested_concurrency"] = self.requested_concurrency + if self.requested_num_replicas is not None: + body["requested_num_replicas"] = self.requested_num_replicas + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EndpointThroughputInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.change_request_message is not None: + body["change_request_message"] = self.change_request_message + if self.change_request_state is not None: + body["change_request_state"] = self.change_request_state + if self.current_concurrency is not None: + body["current_concurrency"] = self.current_concurrency + if self.current_concurrency_utilization_percentage is not None: + body["current_concurrency_utilization_percentage"] = self.current_concurrency_utilization_percentage + if self.current_num_replicas is not None: + body["current_num_replicas"] = self.current_num_replicas + if self.maximum_concurrency_allowed is not None: + body["maximum_concurrency_allowed"] = self.maximum_concurrency_allowed + if self.minimal_concurrency_allowed is not None: + body["minimal_concurrency_allowed"] = self.minimal_concurrency_allowed + if self.requested_concurrency is not None: + body["requested_concurrency"] = self.requested_concurrency + if self.requested_num_replicas is not None: + body["requested_num_replicas"] = self.requested_num_replicas + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EndpointThroughputInfo: + """Deserializes the EndpointThroughputInfo from a dictionary.""" + return cls( + change_request_message=d.get("change_request_message", None), + change_request_state=_enum(d, "change_request_state", ThroughputChangeRequestState), + current_concurrency=d.get("current_concurrency", None), + current_concurrency_utilization_percentage=d.get("current_concurrency_utilization_percentage", None), + current_num_replicas=d.get("current_num_replicas", None), + maximum_concurrency_allowed=d.get("maximum_concurrency_allowed", None), + minimal_concurrency_allowed=d.get("minimal_concurrency_allowed", None), + requested_concurrency=d.get("requested_concurrency", None), + requested_num_replicas=d.get("requested_num_replicas", None), + ) + + class EndpointType(Enum): """Type of endpoint.""" @@ -1016,6 +1192,50 @@ def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyResponse: return cls(effective_budget_policy_id=d.get("effective_budget_policy_id", None)) +@dataclass +class PatchEndpointThroughputResponse: + adjusted_request: Optional[AdjustedThroughputRequest] = None + """The adjusted request if the original request could not be fully fulfilled. This is only + populated when the request was adjusted.""" + + message: Optional[str] = None + """Message explaining the status or any adjustments made""" + + status: Optional[ThroughputPatchStatus] = None + """The status of the throughput change request""" + + def as_dict(self) -> dict: + """Serializes the PatchEndpointThroughputResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.adjusted_request: + body["adjusted_request"] = self.adjusted_request.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PatchEndpointThroughputResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.adjusted_request: + body["adjusted_request"] = self.adjusted_request + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointThroughputResponse: + """Deserializes the PatchEndpointThroughputResponse from a dictionary.""" + return cls( + adjusted_request=_from_dict(d, "adjusted_request", AdjustedThroughputRequest), + message=d.get("message", None), + status=_enum(d, "status", ThroughputPatchStatus), + ) + + class PipelineType(Enum): """Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, @@ -1316,6 +1536,25 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncIndexResponse: return cls() +class ThroughputChangeRequestState(Enum): + """Throughput change request state""" + + CHANGE_ADJUSTED = "CHANGE_ADJUSTED" + CHANGE_FAILED = "CHANGE_FAILED" + CHANGE_IN_PROGRESS = "CHANGE_IN_PROGRESS" + CHANGE_REACHED_MAXIMUM = "CHANGE_REACHED_MAXIMUM" + CHANGE_REACHED_MINIMUM = "CHANGE_REACHED_MINIMUM" + CHANGE_SUCCESS = "CHANGE_SUCCESS" + + +class ThroughputPatchStatus(Enum): + """Response status for throughput change requests""" + + PATCH_ACCEPTED = "PATCH_ACCEPTED" + PATCH_FAILED = "PATCH_FAILED" + PATCH_REJECTED = "PATCH_REJECTED" + + @dataclass class UpdateEndpointCustomTagsResponse: custom_tags: Optional[List[CustomTag]] = None @@ -1348,6 +1587,41 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsResponse: return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), name=d.get("name", None)) +@dataclass +class UpdateVectorIndexUsagePolicyResponse: + effective_usage_policy_id: Optional[str] = None + """The effective usage policy id applied to the vector search index""" + + usage_policy_id: Optional[str] = None + """The updated usage policy id""" + + def as_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateVectorIndexUsagePolicyResponse: + """Deserializes the UpdateVectorIndexUsagePolicyResponse from a dictionary.""" + return cls( + effective_usage_policy_id=d.get("effective_usage_policy_id", None), + usage_policy_id=d.get("usage_policy_id", None), + ) + + @dataclass class UpsertDataResult: failed_primary_keys: Optional[List[str]] = None @@ -1663,6 +1937,8 @@ def create_endpoint( *, budget_policy_id: Optional[str] = None, min_qps: Optional[int] = None, + num_replicas: Optional[int] = None, + usage_policy_id: Optional[str] = None, ) -> Wait[EndpointInfo]: """Create a new endpoint. @@ -1675,6 +1951,10 @@ def create_endpoint( :param min_qps: int (optional) Min QPS for the endpoint. Mutually exclusive with num_replicas. The actual replica count is calculated at index creation/sync time based on this value. + :param num_replicas: int (optional) + Initial number of replicas for the endpoint. If not specified, defaults to 1. + :param usage_policy_id: str (optional) + The usage policy id to be applied once we've migrated to usage policies :returns: Long-running operation waiter for :class:`EndpointInfo`. @@ -1690,6 +1970,10 @@ def create_endpoint( body["min_qps"] = min_qps if name is not None: body["name"] = name + if num_replicas is not None: + body["num_replicas"] = num_replicas + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -1713,10 +1997,17 @@ def create_endpoint_and_wait( *, budget_policy_id: Optional[str] = None, min_qps: Optional[int] = None, + num_replicas: Optional[int] = None, + usage_policy_id: Optional[str] = None, timeout=timedelta(minutes=20), ) -> EndpointInfo: return self.create_endpoint( - budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, min_qps=min_qps, name=name + budget_policy_id=budget_policy_id, + endpoint_type=endpoint_type, + min_qps=min_qps, + name=name, + num_replicas=num_replicas, + usage_policy_id=usage_policy_id, ).result(timeout=timeout) def delete_endpoint(self, endpoint_name: str): @@ -1813,6 +2104,63 @@ def patch_endpoint(self, endpoint_name: str, *, min_qps: Optional[int] = None) - res = self._api.do("PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}", body=body, headers=headers) return EndpointInfo.from_dict(res) + def patch_endpoint_throughput( + self, + endpoint_name: str, + *, + all_or_nothing: Optional[bool] = None, + concurrency: Optional[float] = None, + maximum_concurrency_allowed: Optional[float] = None, + minimal_concurrency_allowed: Optional[float] = None, + num_replicas: Optional[int] = None, + ) -> PatchEndpointThroughputResponse: + """Update the throughput (concurrency) of an endpoint + + :param endpoint_name: str + Name of the vector search endpoint + :param all_or_nothing: bool (optional) + If true, the request will fail if the requested concurrency or limits cannot be exactly met. If + false, the request will be adjusted to the closest possible value. + :param concurrency: float (optional) + Requested concurrency (total CPU) for the endpoint. If not specified, the current concurrency is + maintained. + :param maximum_concurrency_allowed: float (optional) + Maximum concurrency allowed for the endpoint. If not specified, the current maximum is maintained. + :param minimal_concurrency_allowed: float (optional) + Minimum concurrency allowed for the endpoint. If not specified, the current minimum is maintained. + :param num_replicas: int (optional) + Requested number of data copies for the endpoint (including primary). For example: num_replicas=2 + means 2 total copies of the data (1 primary + 1 replica). If not specified, the current replication + factor is maintained. Valid range: 1-6 (where 1 = no replication, 6 = 1 primary + 5 replicas). + + :returns: :class:`PatchEndpointThroughputResponse` + """ + + body = {} + if all_or_nothing is not None: + body["all_or_nothing"] = all_or_nothing + if concurrency is not None: + body["concurrency"] = concurrency + if maximum_concurrency_allowed is not None: + body["maximum_concurrency_allowed"] = maximum_concurrency_allowed + if minimal_concurrency_allowed is not None: + body["minimal_concurrency_allowed"] = minimal_concurrency_allowed + if num_replicas is not None: + body["num_replicas"] = num_replicas + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}/throughput", body=body, headers=headers + ) + return PatchEndpointThroughputResponse.from_dict(res) + def retrieve_user_visible_metrics( self, name: str, @@ -2264,6 +2612,36 @@ def sync_index(self, index_name: str): self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) + def update_index_budget_policy( + self, index_name: str, *, usage_policy_id: Optional[str] = None + ) -> UpdateVectorIndexUsagePolicyResponse: + """Update the budget policy of an index + + :param index_name: str + Name of the vector search index + :param usage_policy_id: str (optional) + The usage policy id to be applied + + :returns: :class:`UpdateVectorIndexUsagePolicyResponse` + """ + + body = {} + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + cfg = self._api._cfg + if cfg.host_type == HostType.UNIFIED and cfg.workspace_id: + headers["X-Databricks-Org-Id"] = cfg.workspace_id + + res = self._api.do( + "PATCH", f"/api/2.0/vector-search/indexes/{index_name}/usage-policy", body=body, headers=headers + ) + return UpdateVectorIndexUsagePolicyResponse.from_dict(res) + def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: """Handles the upserting of data into a specified vector index. diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 721ddd0b7..437901c23 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -398,6 +398,12 @@ class ExportFormat(Enum): SOURCE = "SOURCE" +class ExportOutputs(Enum): + + ALL = "ALL" + NONE = "NONE" + + @dataclass class ExportResponse: """The request field `direct_download` determines whether a JSON response or binary contents are @@ -2748,7 +2754,9 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) - def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: + def export( + self, path: str, *, format: Optional[ExportFormat] = None, outputs: Optional[ExportOutputs] = None + ) -> ExportResponse: """Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -2770,6 +2778,11 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. + :param outputs: :class:`ExportOutputs` (optional) + This specifies which cell outputs should be included in the export (if the export format allows it). + If not specified, the behavior is determined by the format. For JUPYTER format, the default is to + include all outputs. This is a public endpoint, but only ALL or NONE is documented publically, + DATABRICKS is internal only :returns: :class:`ExportResponse` """ @@ -2777,6 +2790,8 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR query = {} if format is not None: query["format"] = format.value + if outputs is not None: + query["outputs"] = outputs.value if path is not None: query["path"] = path headers = { diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst index 3c2cbd92e..2ee10bcea 100644 --- a/docs/account/billing/budget_policy.rst +++ b/docs/account/billing/budget_policy.rst @@ -62,7 +62,7 @@ :returns: Iterator over :class:`BudgetPolicy` - .. py:method:: update(policy_id: str, policy: BudgetPolicy [, limit_config: Optional[LimitConfig]]) -> BudgetPolicy + .. py:method:: update(policy_id: str, policy: BudgetPolicy [, limit_config: Optional[LimitConfig], update_mask: Optional[str]]) -> BudgetPolicy Updates a policy @@ -73,6 +73,8 @@ specified even if not changed. The `policy_id` is used to identify the policy to update. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy + :param update_mask: str (optional) + Field mask specifying which fields to update. When not provided, all fields are updated. :returns: :class:`BudgetPolicy` \ No newline at end of file diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst index b8b317616..08241184d 100644 --- a/docs/account/billing/index.rst +++ b/docs/account/billing/index.rst @@ -11,4 +11,5 @@ Configure different aspects of Databricks billing and usage. budget_policy budgets log_delivery - usage_dashboards \ No newline at end of file + usage_dashboards + usage_policy \ No newline at end of file diff --git a/docs/account/billing/usage_policy.rst b/docs/account/billing/usage_policy.rst new file mode 100644 index 000000000..95a65c111 --- /dev/null +++ b/docs/account/billing/usage_policy.rst @@ -0,0 +1,69 @@ +``a.usage_policy``: Usage Policy +================================ +.. currentmodule:: databricks.sdk.service.billing + +.. py:class:: UsagePolicyAPI + + A service serves REST API about Usage policies + + .. py:method:: create( [, policy: Optional[UsagePolicy], request_id: Optional[str]]) -> UsagePolicy + + Creates a new usage policy. + + :param policy: :class:`UsagePolicy` (optional) + The policy to create. `policy_id` needs to be empty as it will be generated + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. + + :returns: :class:`UsagePolicy` + + + .. py:method:: delete(policy_id: str) + + Deletes a usage policy + + :param policy_id: str + The Id of the policy. + + + + + .. py:method:: get(policy_id: str) -> UsagePolicy + + Retrieves a usage policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`UsagePolicy` + + + .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[UsagePolicy] + + Lists all usage policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of usage policies to return. + :param page_token: str (optional) + A page token, received from a previous `ListUsagePolicies` call. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`UsagePolicy` + + + .. py:method:: update(policy_id: str, policy: UsagePolicy [, limit_config: Optional[LimitConfig]]) -> UsagePolicy + + Updates a usage policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param policy: :class:`UsagePolicy` + The policy to update. `creator_user_id` cannot be specified in the request. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy + + :returns: :class:`UsagePolicy` + \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index ca78b86df..527ae5b1f 100755 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -30,7 +30,7 @@ :returns: :class:`WorkspacePermissions` - .. py:method:: list(workspace_id: int) -> Iterator[PermissionAssignment] + .. py:method:: list(workspace_id: int [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[PermissionAssignment] Usage: @@ -43,14 +43,18 @@ a = AccountClient() - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - all = a.workspace_assignment.list(list=workspace_id) + all = a.workspace_assignment.list(workspace_id=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. :param workspace_id: int The workspace ID for the account. + :param max_results: int (optional) + Maximum number of permission assignments to return. + :param page_token: str (optional) + Page token returned by previous call to retrieve the next page of results. :returns: Iterator over :class:`PermissionAssignment` diff --git a/docs/account/iamv2/iam_v2.rst b/docs/account/iamv2/iam_v2.rst index ea9c48287..e12110d77 100644 --- a/docs/account/iamv2/iam_v2.rst +++ b/docs/account/iamv2/iam_v2.rst @@ -6,6 +6,175 @@ These APIs are used to manage identities and the workspace access of these identities in . + .. py:method:: create_account_access_identity_rule(account_access_identity_rule: AccountAccessIdentityRule) -> AccountAccessIdentityRule + + Creates a new account access identity rule for a given account. This allows administrators to + explicitly allow or deny specific principals from accessing the account. + + :param account_access_identity_rule: :class:`AccountAccessIdentityRule` + Required. The rule to create. + + :returns: :class:`AccountAccessIdentityRule` + + + .. py:method:: create_group(group: Group) -> Group + + TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + + + .. py:method:: create_group_membership(group_id: int, group_membership: GroupMembership) -> GroupMembership + + Creates a group membership (assigns a principal to a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param group_membership: :class:`GroupMembership` + Required. The group membership to create. + + :returns: :class:`GroupMembership` + + + .. py:method:: create_service_principal(service_principal: ServicePrincipal) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + + + .. py:method:: create_user(user: User) -> User + + TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + + + .. py:method:: create_workspace_assignment_detail(workspace_id: int, workspace_assignment_detail: WorkspaceAssignmentDetail) -> WorkspaceAssignmentDetail + + Creates a workspace assignment detail for a principal. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment detail is being created. + :param workspace_assignment_detail: :class:`WorkspaceAssignmentDetail` + Required. Workspace assignment detail to be created in . + + :returns: :class:`WorkspaceAssignmentDetail` + + + .. py:method:: delete_account_access_identity_rule(external_id: str) + + Deletes an account access identity rule for a given principal. + + :param external_id: str + Required. The external ID of the principal whose rule should be deleted. + + + + + .. py:method:: delete_group(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + + + .. py:method:: delete_group_membership(group_id: int, principal_id: int) + + Deletes a group membership (unassigns a principal from a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param principal_id: int + Required. Internal ID of the principal to be unassigned from the group. + + + + + .. py:method:: delete_service_principal(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + + + .. py:method:: delete_user(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + + + .. py:method:: delete_workspace_assignment_detail(workspace_id: int, principal_id: int) + + Deletes a workspace assignment detail for a principal. + + :param workspace_id: int + The workspace ID where the principal has access. + :param principal_id: int + Required. ID of the principal in Databricks to delete workspace assignment for. + + + + + .. py:method:: get_account_access_identity_rule(external_id: str) -> AccountAccessIdentityRule + + Gets an account access identity rule for a given principal. + + :param external_id: str + Required. The external ID of the principal whose rule should be retrieved. + + :returns: :class:`AccountAccessIdentityRule` + + + .. py:method:: get_group(internal_id: int) -> Group + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + + + .. py:method:: get_service_principal(internal_id: int) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + + + .. py:method:: get_user(internal_id: int) -> User + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + + .. py:method:: get_workspace_access_detail(workspace_id: int, principal_id: int [, view: Optional[WorkspaceAccessDetailView]]) -> WorkspaceAccessDetail Returns the access details for a principal in a workspace. Allows for checking access details for any @@ -25,6 +194,145 @@ :returns: :class:`WorkspaceAccessDetail` + .. py:method:: get_workspace_assignment_detail(workspace_id: int, principal_id: int) -> WorkspaceAssignmentDetail + + Returns the assignment details for a principal in a workspace. + + :param workspace_id: int + Required. The workspace ID for which the assignment details are being requested. + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the assignment details are + being requested. + + :returns: :class:`WorkspaceAssignmentDetail` + + + .. py:method:: list_account_access_identity_rules( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListAccountAccessIdentityRulesResponse + + Lists all account access identity rules for a given account. These rules control which principals + (users, service principals, groups) from the customer's IdP are allowed or denied access to the + Databricks account. + + :param filter: str (optional) + Optional. Filter to apply to the list. Supports filtering by displayName. + :param page_size: int (optional) + Optional. The maximum number of rules to return. The service may return fewer than this value. + :param page_token: str (optional) + Optional. A page token, received from a previous call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListAccountAccessIdentityRulesResponse` + + + .. py:method:: list_direct_group_members(group_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListDirectGroupMembersResponse + + Lists provisioned direct members of a group with their membership source (internal or from identity + provider). + + :param group_id: int + Required. Internal ID of the group in Databricks whose direct members are being listed. + :param page_size: int (optional) + The maximum number of members to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListDirectGroupMembers call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListDirectGroupMembersResponse` + + + .. py:method:: list_groups( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListGroupsResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering groups by group name or external id. + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + + + .. py:method:: list_service_principals( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListServicePrincipalsResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering service principals by application id or external id. + :param page_size: int (optional) + The maximum number of service principals to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + + + .. py:method:: list_transitive_parent_groups(principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListTransitiveParentGroupsResponse + + Lists all transitive parent groups of a principal. + + :param principal_id: int + Required. Internal ID of the principal in Databricks whose transitive parent groups are being + listed. + :param page_size: int (optional) + The maximum number of parent groups to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListTransitiveParentGroups call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListTransitiveParentGroupsResponse` + + + .. py:method:: list_users( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListUsersResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering users by username or external id. + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + + + .. py:method:: list_workspace_access_details(workspace_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListWorkspaceAccessDetailsResponse + + TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID for which the workspace access details are being fetched. + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + + + .. py:method:: list_workspace_assignment_details(workspace_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListWorkspaceAssignmentDetailsResponse + + Lists workspace assignment details for a workspace. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment details are being fetched. + :param page_size: int (optional) + The maximum number of workspace assignment details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAssignmentDetails call. Provide this to retrieve + the subsequent page. + + :returns: :class:`ListWorkspaceAssignmentDetailsResponse` + + .. py:method:: resolve_group(external_id: str) -> ResolveGroupResponse Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it @@ -59,4 +367,62 @@ Required. The external ID of the user in the customer's IdP. :returns: :class:`ResolveUserResponse` + + + .. py:method:: update_group(internal_id: int, group: Group, update_mask: str) -> Group + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + + + .. py:method:: update_service_principal(internal_id: int, service_principal: ServicePrincipal, update_mask: str) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service Principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + + + .. py:method:: update_user(internal_id: int, user: User, update_mask: str) -> User + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + + + .. py:method:: update_workspace_assignment_detail(workspace_id: int, principal_id: int, workspace_assignment_detail: WorkspaceAssignmentDetail, update_mask: FieldMask) -> WorkspaceAssignmentDetail + + Updates a workspace assignment detail for a principal. + + :param workspace_id: int + Required. The workspace ID for which the workspace assignment detail is being updated. + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_assignment_detail: :class:`WorkspaceAssignmentDetail` + Required. Workspace assignment detail to be updated in . + :param update_mask: FieldMask + Required. The list of fields to update. + + :returns: :class:`WorkspaceAssignmentDetail` \ No newline at end of file diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index b9f080e36..25ee5abaa 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -26,11 +25,8 @@ storage = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) - - # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 6f68e5148..02834856f 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -11,7 +11,7 @@ These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - .. py:method:: create( [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], compute_mode: Optional[CustomerFacingComputeMode], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], workspace_name: Optional[str]]) -> Wait[Workspace] + .. py:method:: create( [, aws_region: Optional[str], azure_workspace_info: Optional[AzureWorkspaceInfo], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], compute_mode: Optional[CustomerFacingComputeMode], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], workspace_name: Optional[str]]) -> Wait[Workspace] Usage: @@ -81,6 +81,7 @@ [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html :param aws_region: str (optional) + :param azure_workspace_info: :class:`AzureWorkspaceInfo` (optional) :param cloud: str (optional) DEPRECATED: This field is being ignored by the server and will be removed in the future. The cloud name. This field always has the value `gcp`. @@ -152,7 +153,7 @@ See :method:wait_get_workspace_running for more details. - .. py:method:: create_and_wait( [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], compute_mode: Optional[CustomerFacingComputeMode], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], workspace_name: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace + .. py:method:: create_and_wait( [, aws_region: Optional[str], azure_workspace_info: Optional[AzureWorkspaceInfo], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], compute_mode: Optional[CustomerFacingComputeMode], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], workspace_name: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace .. py:method:: delete(workspace_id: int) -> Workspace diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst index 90c885c17..a2a2009c3 100644 --- a/docs/account/settings/network_connectivity.rst +++ b/docs/account/settings/network_connectivity.rst @@ -108,12 +108,13 @@ :returns: Iterator over :class:`NetworkConnectivityConfiguration` - .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccPrivateEndpointRule] + .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[NccPrivateEndpointRule] Gets an array of private endpoint rules. :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. + :param page_size: int (optional) :param page_token: str (optional) Pagination token to go to next page based on previous query. diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index c35421999..0c9ad04e2 100755 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -54,6 +54,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: AppManifestAppResourceAppSpec + :members: + :undoc-members: + +.. py:class:: AppManifestAppResourceAppSpecAppPermission + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + .. autoclass:: AppManifestAppResourceExperimentSpec :members: :undoc-members: @@ -87,6 +96,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: IS_OWNER :value: "IS_OWNER" +.. autoclass:: AppManifestAppResourcePostgresSpec + :members: + :undoc-members: + +.. py:class:: AppManifestAppResourcePostgresSpecPostgresPermission + + .. py:attribute:: CAN_CONNECT_AND_CREATE + :value: "CAN_CONNECT_AND_CREATE" + .. autoclass:: AppManifestAppResourceSecretSpec :members: :undoc-members: @@ -150,6 +168,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MANAGE :value: "MANAGE" + .. py:attribute:: MODIFY + :value: "MODIFY" + .. py:attribute:: READ_VOLUME :value: "READ_VOLUME" @@ -206,6 +227,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AppResourceAppAppPermission + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + .. autoclass:: AppResourceDatabase :members: :undoc-members: @@ -405,6 +431,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: LARGE :value: "LARGE" + .. py:attribute:: LIQUID + :value: "LIQUID" + .. py:attribute:: MEDIUM :value: "MEDIUM" diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index b58ec1528..ca3cee231 100755 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -140,6 +140,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListUsagePoliciesResponse + :members: + :undoc-members: + .. py:class:: LogDeliveryConfigStatus * Log Delivery Status @@ -216,6 +220,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE :value: "USAGE_DASHBOARD_TYPE_WORKSPACE" +.. autoclass:: UsagePolicy + :members: + :undoc-members: + .. autoclass:: WrappedLogDeliveryConfiguration :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 705e89751..08946f9fd 100755 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -257,6 +257,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE_TYPE :value: "TABLE_TYPE" + .. py:attribute:: TIME + :value: "TIME" + .. py:attribute:: TIMESTAMP :value: "TIMESTAMP" @@ -269,6 +272,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VARIANT :value: "VARIANT" +.. autoclass:: ConditionalDisplay + :members: + :undoc-members: + .. autoclass:: ConnectionDependency :members: :undoc-members: @@ -305,6 +312,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ORACLE :value: "ORACLE" + .. py:attribute:: PALANTIR + :value: "PALANTIR" + .. py:attribute:: POSTGRESQL :value: "POSTGRESQL" @@ -345,6 +355,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ConversionInfo + :members: + :undoc-members: + +.. py:class:: ConversionInfoState + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: IN_PROGRESS + :value: "IN_PROGRESS" + .. autoclass:: CreateAccessRequest :members: :undoc-members: @@ -616,6 +638,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INTERNAL_AND_EXTERNAL :value: "INTERNAL_AND_EXTERNAL" +.. autoclass:: DenyOptions + :members: + :undoc-members: + .. autoclass:: Dependency :members: :undoc-members: @@ -645,6 +671,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DrReplicationInfo + :members: + :undoc-members: + +.. py:class:: DrReplicationStatus + + .. py:attribute:: DR_REPLICATION_STATUS_PRIMARY + :value: "DR_REPLICATION_STATUS_PRIMARY" + + .. py:attribute:: DR_REPLICATION_STATUS_SECONDARY + :value: "DR_REPLICATION_STATUS_SECONDARY" + .. autoclass:: EffectivePermissionsList :members: :undoc-members: @@ -692,6 +730,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnvironmentSettings + :members: + :undoc-members: + .. autoclass:: ExternalLineageExternalMetadata :members: :undoc-members: @@ -947,6 +989,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListPrivilegeAssignmentsResponse + :members: + :undoc-members: + .. autoclass:: ListQuotasResponse :members: :undoc-members: @@ -1245,6 +1291,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: OPTION_SERVICE_CREDENTIAL :value: "OPTION_SERVICE_CREDENTIAL" + .. py:attribute:: OPTION_STORAGE_CREDENTIAL + :value: "OPTION_STORAGE_CREDENTIAL" + .. py:attribute:: OPTION_STRING :value: "OPTION_STRING" @@ -1280,6 +1329,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: POLICY_TYPE_COLUMN_MASK :value: "POLICY_TYPE_COLUMN_MASK" + .. py:attribute:: POLICY_TYPE_DENY + :value: "POLICY_TYPE_DENY" + .. py:attribute:: POLICY_TYPE_ROW_FILTER :value: "POLICY_TYPE_ROW_FILTER" @@ -1382,6 +1434,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CREATE_VOLUME :value: "CREATE_VOLUME" + .. py:attribute:: DELETE + :value: "DELETE" + .. py:attribute:: EXECUTE :value: "EXECUTE" @@ -1391,12 +1446,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: EXTERNAL_USE_SCHEMA :value: "EXTERNAL_USE_SCHEMA" + .. py:attribute:: INSERT + :value: "INSERT" + .. py:attribute:: MANAGE :value: "MANAGE" .. py:attribute:: MANAGE_ALLOWLIST :value: "MANAGE_ALLOWLIST" + .. py:attribute:: MANAGE_GRANTS + :value: "MANAGE_GRANTS" + .. py:attribute:: MODIFY :value: "MODIFY" @@ -1421,6 +1482,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SET_SHARE_PERMISSION :value: "SET_SHARE_PERMISSION" + .. py:attribute:: UPDATE + :value: "UPDATE" + .. py:attribute:: USAGE :value: "USAGE" @@ -1445,6 +1509,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USE_SHARE :value: "USE_SHARE" + .. py:attribute:: USE_VOLUME + :value: "USE_VOLUME" + + .. py:attribute:: VIEW_ADMIN_METADATA + :value: "VIEW_ADMIN_METADATA" + + .. py:attribute:: VIEW_METADATA + :value: "VIEW_METADATA" + + .. py:attribute:: VIEW_OBJECT + :value: "VIEW_OBJECT" + .. py:attribute:: WRITE_FILES :value: "WRITE_FILES" @@ -1627,6 +1703,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE_FOREIGN_ORACLE :value: "TABLE_FOREIGN_ORACLE" + .. py:attribute:: TABLE_FOREIGN_PALANTIR + :value: "TABLE_FOREIGN_PALANTIR" + .. py:attribute:: TABLE_FOREIGN_POSTGRESQL :value: "TABLE_FOREIGN_POSTGRESQL" @@ -1962,6 +2041,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UcEncryptedToken + :members: + :undoc-members: + .. autoclass:: UnassignResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 3ff0371ca..26b39be0a 100755 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -51,6 +51,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SPOT_WITH_FALLBACK_AZURE :value: "SPOT_WITH_FALLBACK_AZURE" +.. py:class:: BaseEnvironmentType + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto + + .. py:attribute:: CPU + :value: "CPU" + + .. py:attribute:: GPU + :value: "GPU" + .. autoclass:: CancelResponse :members: :undoc-members: @@ -224,6 +234,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ConfidentialComputeType + + Confidential computing technology for GCP instances. Aligns with gcloud's --confidential-compute-type flag and the REST API's confidentialInstanceConfig.confidentialInstanceType field. See: https://cloud.google.com/confidential-computing/confidential-vm/docs/create-a-confidential-vm-instance + + .. py:attribute:: SEV_SNP + :value: "SEV_SNP" + .. py:class:: ContextStatus .. py:attribute:: ERROR @@ -317,6 +334,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DefaultBaseEnvironment + :members: + :undoc-members: + +.. autoclass:: DefaultBaseEnvironmentCache + :members: + :undoc-members: + +.. py:class:: DefaultBaseEnvironmentCacheStatus + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: EXPIRED + :value: "EXPIRED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: INVALID + :value: "INVALID" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: REFRESHING + :value: "REFRESHING" + .. autoclass:: DeleteClusterResponse :members: :undoc-members: @@ -465,6 +510,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DECOMMISSION_STARTED :value: "DECOMMISSION_STARTED" + .. py:attribute:: DEFERRED_POLICY_ENFORCEMENT_FAILED + :value: "DEFERRED_POLICY_ENFORCEMENT_FAILED" + + .. py:attribute:: DEFERRED_POLICY_ENFORCEMENT_SCHEDULED + :value: "DEFERRED_POLICY_ENFORCEMENT_SCHEDULED" + .. py:attribute:: DID_NOT_EXPAND_DISK :value: "DID_NOT_EXPAND_DISK" @@ -860,6 +911,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DEFAULT :value: "DEFAULT" +.. autoclass:: ListDefaultBaseEnvironmentsResponse + :members: + :undoc-members: + .. autoclass:: ListGlobalInitScriptsResponse :members: :undoc-members: @@ -912,6 +967,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: MaterializedEnvironment + :members: + :undoc-members: + .. autoclass:: MavenLibrary :members: :undoc-members: @@ -956,6 +1015,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RefreshDefaultBaseEnvironmentsResponse + :members: + :undoc-members: + .. autoclass:: RemoveResponse :members: :undoc-members: @@ -1255,6 +1318,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DOCKER_INVALID_OS_EXCEPTION :value: "DOCKER_INVALID_OS_EXCEPTION" + .. py:attribute:: DRIVER_DNS_RESOLUTION_FAILURE + :value: "DRIVER_DNS_RESOLUTION_FAILURE" + .. py:attribute:: DRIVER_EVICTION :value: "DRIVER_EVICTION" @@ -1483,6 +1549,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NFS_MOUNT_FAILURE :value: "NFS_MOUNT_FAILURE" + .. py:attribute:: NO_ACTIVATED_K8S + :value: "NO_ACTIVATED_K8S" + + .. py:attribute:: NO_ACTIVATED_K8S_TESTING_TAG + :value: "NO_ACTIVATED_K8S_TESTING_TAG" + .. py:attribute:: NO_MATCHED_K8S :value: "NO_MATCHED_K8S" @@ -1522,6 +1594,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SECRET_RESOLUTION_ERROR :value: "SECRET_RESOLUTION_ERROR" + .. py:attribute:: SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION + :value: "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" + .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index 4b3139df3..25fa3b6e5 100755 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -12,6 +12,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CancelQueryExecutionResponse + :members: + :undoc-members: + +.. autoclass:: CancelQueryExecutionResponseStatus + :members: + :undoc-members: + .. autoclass:: CronSchedule :members: :undoc-members: @@ -25,6 +33,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DASHBOARD_VIEW_BASIC :value: "DASHBOARD_VIEW_BASIC" +.. autoclass:: Empty + :members: + :undoc-members: + +.. py:class:: EvaluationStatusType + + .. py:attribute:: DONE + :value: "DONE" + + .. py:attribute:: EVALUATION_CANCELLED + :value: "EVALUATION_CANCELLED" + + .. py:attribute:: EVALUATION_FAILED + :value: "EVALUATION_FAILED" + + .. py:attribute:: EVALUATION_TIMEOUT + :value: "EVALUATION_TIMEOUT" + + .. py:attribute:: NOT_STARTED + :value: "NOT_STARTED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + +.. autoclass:: ExecuteQueryResponse + :members: + :undoc-members: + .. autoclass:: GenieAttachment :members: :undoc-members: @@ -37,6 +73,41 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GenieEvalAssessment + + .. py:attribute:: BAD + :value: "BAD" + + .. py:attribute:: GOOD + :value: "GOOD" + + .. py:attribute:: NEEDS_REVIEW + :value: "NEEDS_REVIEW" + +.. autoclass:: GenieEvalResponse + :members: + :undoc-members: + +.. py:class:: GenieEvalResponseType + + .. py:attribute:: SQL + :value: "SQL" + + .. py:attribute:: TEXT + :value: "TEXT" + +.. autoclass:: GenieEvalResult + :members: + :undoc-members: + +.. autoclass:: GenieEvalResultDetails + :members: + :undoc-members: + +.. autoclass:: GenieEvalRunResponse + :members: + :undoc-members: + .. autoclass:: GenieFeedback :members: :undoc-members: @@ -66,6 +137,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieListConversationCommentsResponse + :members: + :undoc-members: + .. autoclass:: GenieListConversationMessagesResponse :members: :undoc-members: @@ -74,6 +149,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieListEvalResultsResponse + :members: + :undoc-members: + +.. autoclass:: GenieListEvalRunsResponse + :members: + :undoc-members: + +.. autoclass:: GenieListMessageCommentsResponse + :members: + :undoc-members: + .. autoclass:: GenieListSpacesResponse :members: :undoc-members: @@ -82,6 +169,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieMessageComment + :members: + :undoc-members: + .. autoclass:: GenieQueryAttachment :members: :undoc-members: @@ -102,6 +193,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetPublishedDashboardEmbeddedResponse + :members: + :undoc-members: + .. autoclass:: GetPublishedDashboardTokenInfoResponse :members: :undoc-members: @@ -238,6 +333,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION :value: "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" + .. py:attribute:: INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION + :value: "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" + .. py:attribute:: INVALID_CHAT_COMPLETION_JSON_EXCEPTION :value: "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" @@ -362,6 +460,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUBMITTED :value: "SUBMITTED" +.. autoclass:: PendingStatus + :members: + :undoc-members: + +.. autoclass:: PollQueryStatusResponse + :members: + :undoc-members: + +.. autoclass:: PollQueryStatusResponseData + :members: + :undoc-members: + .. autoclass:: PublishedDashboard :members: :undoc-members: @@ -370,6 +480,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryResponseStatus + :members: + :undoc-members: + +.. py:class:: ResponsePhase + + .. py:attribute:: RESPONSE_PHASE_THINKING + :value: "RESPONSE_PHASE_THINKING" + + .. py:attribute:: RESPONSE_PHASE_VERIFYING + :value: "RESPONSE_PHASE_VERIFYING" + .. autoclass:: Result :members: :undoc-members: @@ -386,6 +508,83 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UNPAUSED :value: "UNPAUSED" +.. py:class:: ScoreReason + + .. py:attribute:: COLUMN_TYPE_DIFFERENCE + :value: "COLUMN_TYPE_DIFFERENCE" + + .. py:attribute:: EMPTY_GOOD_SQL + :value: "EMPTY_GOOD_SQL" + + .. py:attribute:: EMPTY_RESULT + :value: "EMPTY_RESULT" + + .. py:attribute:: LLM_JUDGE_FORMATTING_ERROR + :value: "LLM_JUDGE_FORMATTING_ERROR" + + .. py:attribute:: LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT + :value: "LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT" + + .. py:attribute:: LLM_JUDGE_INCORRECT_FUNCTION_USAGE + :value: "LLM_JUDGE_INCORRECT_FUNCTION_USAGE" + + .. py:attribute:: LLM_JUDGE_INCORRECT_METRIC_CALCULATION + :value: "LLM_JUDGE_INCORRECT_METRIC_CALCULATION" + + .. py:attribute:: LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE + :value: "LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE" + + .. py:attribute:: LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC + :value: "LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC" + + .. py:attribute:: LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST + :value: "LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST" + + .. py:attribute:: LLM_JUDGE_MISSING_JOIN + :value: "LLM_JUDGE_MISSING_JOIN" + + .. py:attribute:: LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION + :value: "LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION" + + .. py:attribute:: LLM_JUDGE_MISSING_OR_INCORRECT_FILTER + :value: "LLM_JUDGE_MISSING_OR_INCORRECT_FILTER" + + .. py:attribute:: LLM_JUDGE_MISSING_OR_INCORRECT_JOIN + :value: "LLM_JUDGE_MISSING_OR_INCORRECT_JOIN" + + .. py:attribute:: LLM_JUDGE_OTHER + :value: "LLM_JUDGE_OTHER" + + .. py:attribute:: LLM_JUDGE_SEMANTIC_ERROR + :value: "LLM_JUDGE_SEMANTIC_ERROR" + + .. py:attribute:: LLM_JUDGE_SYNTAX_ERROR + :value: "LLM_JUDGE_SYNTAX_ERROR" + + .. py:attribute:: LLM_JUDGE_WRONG_AGGREGATION + :value: "LLM_JUDGE_WRONG_AGGREGATION" + + .. py:attribute:: LLM_JUDGE_WRONG_COLUMNS + :value: "LLM_JUDGE_WRONG_COLUMNS" + + .. py:attribute:: LLM_JUDGE_WRONG_FILTER + :value: "LLM_JUDGE_WRONG_FILTER" + + .. py:attribute:: RESULT_EXTRA_COLUMNS + :value: "RESULT_EXTRA_COLUMNS" + + .. py:attribute:: RESULT_EXTRA_ROWS + :value: "RESULT_EXTRA_ROWS" + + .. py:attribute:: RESULT_MISSING_COLUMNS + :value: "RESULT_MISSING_COLUMNS" + + .. py:attribute:: RESULT_MISSING_ROWS + :value: "RESULT_MISSING_ROWS" + + .. py:attribute:: SINGLE_CELL_DIFFERENCE + :value: "SINGLE_CELL_DIFFERENCE" + .. autoclass:: Subscriber :members: :undoc-members: @@ -402,6 +601,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SuccessStatus + :members: + :undoc-members: + .. autoclass:: TextAttachment :members: :undoc-members: @@ -420,3 +623,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: UnpublishDashboardResponse :members: :undoc-members: + +.. autoclass:: VerificationMetadata + :members: + :undoc-members: + +.. py:class:: VerificationSection + + Verification workflow section - indicates which stage of verification this attachment belongs to These sections are used for grouping and ordering attachments in the frontend UI + + .. py:attribute:: VERIFICATION_SECTION_FINAL_DECISION + :value: "VERIFICATION_SECTION_FINAL_DECISION" + + .. py:attribute:: VERIFICATION_SECTION_PROPOSED_IMPROVEMENT + :value: "VERIFICATION_SECTION_PROPOSED_IMPROVEMENT" + + .. py:attribute:: VERIFICATION_SECTION_SQL_EXAMPLES_VALIDATION + :value: "VERIFICATION_SECTION_SQL_EXAMPLES_VALIDATION" + + .. py:attribute:: VERIFICATION_SECTION_VERIFICATION_QUERIES + :value: "VERIFICATION_SECTION_VERIFICATION_QUERIES" diff --git a/docs/dbdataclasses/dataclassification.rst b/docs/dbdataclasses/dataclassification.rst new file mode 100644 index 000000000..c6291a9d9 --- /dev/null +++ b/docs/dbdataclasses/dataclassification.rst @@ -0,0 +1,40 @@ +Data Classify +============= + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.dataclassification`` module. + +.. py:currentmodule:: databricks.sdk.service.dataclassification +.. autoclass:: AutoTaggingConfig + :members: + :undoc-members: + +.. py:class:: AutoTaggingConfigAutoTaggingMode + + Auto-tagging mode. + + .. py:attribute:: AUTO_TAGGING_DISABLED + :value: "AUTO_TAGGING_DISABLED" + + .. py:attribute:: AUTO_TAGGING_ENABLED + :value: "AUTO_TAGGING_ENABLED" + +.. autoclass:: CatalogConfig + :members: + :undoc-members: + +.. autoclass:: CatalogConfigSchemaNames + :members: + :undoc-members: + +.. py:class:: HostType + + Enum representing the type of Databricks host. + + .. py:attribute:: ACCOUNTS + :value: "ACCOUNTS" + + .. py:attribute:: WORKSPACE + :value: "WORKSPACE" + + .. py:attribute:: UNIFIED + :value: "UNIFIED" diff --git a/docs/dbdataclasses/dataquality.rst b/docs/dbdataclasses/dataquality.rst index 7c572b4c3..ad6fa710b 100755 --- a/docs/dbdataclasses/dataquality.rst +++ b/docs/dbdataclasses/dataquality.rst @@ -42,6 +42,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AnomalyDetectionJobType + + Anomaly Detection job type. + + .. py:attribute:: ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN + :value: "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + + .. py:attribute:: ANOMALY_DETECTION_JOB_TYPE_NORMAL + :value: "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + .. autoclass:: CancelRefreshResponse :members: :undoc-members: @@ -147,6 +157,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PercentNullValidityCheck + :members: + :undoc-members: + +.. autoclass:: RangeValidityCheck + :members: + :undoc-members: + .. autoclass:: Refresh :members: :undoc-members: @@ -196,3 +214,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: TimeSeriesConfig :members: :undoc-members: + +.. autoclass:: UniquenessValidityCheck + :members: + :undoc-members: + +.. autoclass:: ValidityCheckConfiguration + :members: + :undoc-members: diff --git a/docs/dbdataclasses/environments.rst b/docs/dbdataclasses/environments.rst new file mode 100644 index 000000000..5d69f4d58 --- /dev/null +++ b/docs/dbdataclasses/environments.rst @@ -0,0 +1,325 @@ +Data Classify +============= + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.environments`` module. + +.. py:currentmodule:: databricks.sdk.service.environments +.. py:class:: BaseEnvironmentType + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto + + .. py:attribute:: CPU + :value: "CPU" + + .. py:attribute:: GPU + :value: "GPU" + +.. autoclass:: DatabricksServiceExceptionWithDetailsProto + :members: + :undoc-members: + +.. autoclass:: DefaultWorkspaceBaseEnvironment + :members: + :undoc-members: + +.. py:class:: ErrorCode + + Error codes returned by Databricks APIs to indicate specific failure conditions. + + .. py:attribute:: ABORTED + :value: "ABORTED" + + .. py:attribute:: ALREADY_EXISTS + :value: "ALREADY_EXISTS" + + .. py:attribute:: BAD_REQUEST + :value: "BAD_REQUEST" + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: CATALOG_ALREADY_EXISTS + :value: "CATALOG_ALREADY_EXISTS" + + .. py:attribute:: CATALOG_DOES_NOT_EXIST + :value: "CATALOG_DOES_NOT_EXIST" + + .. py:attribute:: CATALOG_NOT_EMPTY + :value: "CATALOG_NOT_EMPTY" + + .. py:attribute:: COULD_NOT_ACQUIRE_LOCK + :value: "COULD_NOT_ACQUIRE_LOCK" + + .. py:attribute:: CUSTOMER_UNAUTHORIZED + :value: "CUSTOMER_UNAUTHORIZED" + + .. py:attribute:: DAC_ALREADY_EXISTS + :value: "DAC_ALREADY_EXISTS" + + .. py:attribute:: DAC_DOES_NOT_EXIST + :value: "DAC_DOES_NOT_EXIST" + + .. py:attribute:: DATA_LOSS + :value: "DATA_LOSS" + + .. py:attribute:: DEADLINE_EXCEEDED + :value: "DEADLINE_EXCEEDED" + + .. py:attribute:: DEPLOYMENT_TIMEOUT + :value: "DEPLOYMENT_TIMEOUT" + + .. py:attribute:: DIRECTORY_NOT_EMPTY + :value: "DIRECTORY_NOT_EMPTY" + + .. py:attribute:: DIRECTORY_PROTECTED + :value: "DIRECTORY_PROTECTED" + + .. py:attribute:: DRY_RUN_FAILED + :value: "DRY_RUN_FAILED" + + .. py:attribute:: ENDPOINT_NOT_FOUND + :value: "ENDPOINT_NOT_FOUND" + + .. py:attribute:: EXTERNAL_LOCATION_ALREADY_EXISTS + :value: "EXTERNAL_LOCATION_ALREADY_EXISTS" + + .. py:attribute:: EXTERNAL_LOCATION_DOES_NOT_EXIST + :value: "EXTERNAL_LOCATION_DOES_NOT_EXIST" + + .. py:attribute:: FEATURE_DISABLED + :value: "FEATURE_DISABLED" + + .. py:attribute:: GIT_CONFLICT + :value: "GIT_CONFLICT" + + .. py:attribute:: GIT_REMOTE_ERROR + :value: "GIT_REMOTE_ERROR" + + .. py:attribute:: GIT_SENSITIVE_TOKEN_DETECTED + :value: "GIT_SENSITIVE_TOKEN_DETECTED" + + .. py:attribute:: GIT_UNKNOWN_REF + :value: "GIT_UNKNOWN_REF" + + .. py:attribute:: GIT_URL_NOT_ON_ALLOW_LIST + :value: "GIT_URL_NOT_ON_ALLOW_LIST" + + .. py:attribute:: INSECURE_PARTNER_RESPONSE + :value: "INSECURE_PARTNER_RESPONSE" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: INVALID_PARAMETER_VALUE + :value: "INVALID_PARAMETER_VALUE" + + .. py:attribute:: INVALID_STATE + :value: "INVALID_STATE" + + .. py:attribute:: INVALID_STATE_TRANSITION + :value: "INVALID_STATE_TRANSITION" + + .. py:attribute:: IO_ERROR + :value: "IO_ERROR" + + .. py:attribute:: IPYNB_FILE_IN_REPO + :value: "IPYNB_FILE_IN_REPO" + + .. py:attribute:: MALFORMED_PARTNER_RESPONSE + :value: "MALFORMED_PARTNER_RESPONSE" + + .. py:attribute:: MALFORMED_REQUEST + :value: "MALFORMED_REQUEST" + + .. py:attribute:: MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST + :value: "MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST" + + .. py:attribute:: MAX_BLOCK_SIZE_EXCEEDED + :value: "MAX_BLOCK_SIZE_EXCEEDED" + + .. py:attribute:: MAX_CHILD_NODE_SIZE_EXCEEDED + :value: "MAX_CHILD_NODE_SIZE_EXCEEDED" + + .. py:attribute:: MAX_LIST_SIZE_EXCEEDED + :value: "MAX_LIST_SIZE_EXCEEDED" + + .. py:attribute:: MAX_NOTEBOOK_SIZE_EXCEEDED + :value: "MAX_NOTEBOOK_SIZE_EXCEEDED" + + .. py:attribute:: MAX_READ_SIZE_EXCEEDED + :value: "MAX_READ_SIZE_EXCEEDED" + + .. py:attribute:: METASTORE_ALREADY_EXISTS + :value: "METASTORE_ALREADY_EXISTS" + + .. py:attribute:: METASTORE_DOES_NOT_EXIST + :value: "METASTORE_DOES_NOT_EXIST" + + .. py:attribute:: METASTORE_NOT_EMPTY + :value: "METASTORE_NOT_EMPTY" + + .. py:attribute:: NOT_FOUND + :value: "NOT_FOUND" + + .. py:attribute:: NOT_IMPLEMENTED + :value: "NOT_IMPLEMENTED" + + .. py:attribute:: PARTIAL_DELETE + :value: "PARTIAL_DELETE" + + .. py:attribute:: PERMISSION_DENIED + :value: "PERMISSION_DENIED" + + .. py:attribute:: PERMISSION_NOT_PROPAGATED + :value: "PERMISSION_NOT_PROPAGATED" + + .. py:attribute:: PRINCIPAL_DOES_NOT_EXIST + :value: "PRINCIPAL_DOES_NOT_EXIST" + + .. py:attribute:: PROJECTS_OPERATION_TIMEOUT + :value: "PROJECTS_OPERATION_TIMEOUT" + + .. py:attribute:: PROVIDER_ALREADY_EXISTS + :value: "PROVIDER_ALREADY_EXISTS" + + .. py:attribute:: PROVIDER_DOES_NOT_EXIST + :value: "PROVIDER_DOES_NOT_EXIST" + + .. py:attribute:: PROVIDER_SHARE_NOT_ACCESSIBLE + :value: "PROVIDER_SHARE_NOT_ACCESSIBLE" + + .. py:attribute:: QUOTA_EXCEEDED + :value: "QUOTA_EXCEEDED" + + .. py:attribute:: RECIPIENT_ALREADY_EXISTS + :value: "RECIPIENT_ALREADY_EXISTS" + + .. py:attribute:: RECIPIENT_DOES_NOT_EXIST + :value: "RECIPIENT_DOES_NOT_EXIST" + + .. py:attribute:: REQUEST_LIMIT_EXCEEDED + :value: "REQUEST_LIMIT_EXCEEDED" + + .. py:attribute:: RESOURCE_ALREADY_EXISTS + :value: "RESOURCE_ALREADY_EXISTS" + + .. py:attribute:: RESOURCE_CONFLICT + :value: "RESOURCE_CONFLICT" + + .. py:attribute:: RESOURCE_DOES_NOT_EXIST + :value: "RESOURCE_DOES_NOT_EXIST" + + .. py:attribute:: RESOURCE_EXHAUSTED + :value: "RESOURCE_EXHAUSTED" + + .. py:attribute:: RESOURCE_LIMIT_EXCEEDED + :value: "RESOURCE_LIMIT_EXCEEDED" + + .. py:attribute:: SCHEMA_ALREADY_EXISTS + :value: "SCHEMA_ALREADY_EXISTS" + + .. py:attribute:: SCHEMA_DOES_NOT_EXIST + :value: "SCHEMA_DOES_NOT_EXIST" + + .. py:attribute:: SCHEMA_NOT_EMPTY + :value: "SCHEMA_NOT_EMPTY" + + .. py:attribute:: SEARCH_QUERY_TOO_LONG + :value: "SEARCH_QUERY_TOO_LONG" + + .. py:attribute:: SEARCH_QUERY_TOO_SHORT + :value: "SEARCH_QUERY_TOO_SHORT" + + .. py:attribute:: SERVICE_UNDER_MAINTENANCE + :value: "SERVICE_UNDER_MAINTENANCE" + + .. py:attribute:: SHARE_ALREADY_EXISTS + :value: "SHARE_ALREADY_EXISTS" + + .. py:attribute:: SHARE_DOES_NOT_EXIST + :value: "SHARE_DOES_NOT_EXIST" + + .. py:attribute:: STORAGE_CREDENTIAL_ALREADY_EXISTS + :value: "STORAGE_CREDENTIAL_ALREADY_EXISTS" + + .. py:attribute:: STORAGE_CREDENTIAL_DOES_NOT_EXIST + :value: "STORAGE_CREDENTIAL_DOES_NOT_EXIST" + + .. py:attribute:: TABLE_ALREADY_EXISTS + :value: "TABLE_ALREADY_EXISTS" + + .. py:attribute:: TABLE_DOES_NOT_EXIST + :value: "TABLE_DOES_NOT_EXIST" + + .. py:attribute:: TEMPORARILY_UNAVAILABLE + :value: "TEMPORARILY_UNAVAILABLE" + + .. py:attribute:: UNAUTHENTICATED + :value: "UNAUTHENTICATED" + + .. py:attribute:: UNAVAILABLE + :value: "UNAVAILABLE" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + + .. py:attribute:: UNPARSEABLE_HTTP_ERROR + :value: "UNPARSEABLE_HTTP_ERROR" + + .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE + :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" + +.. py:class:: HostType + + Enum representing the type of Databricks host. + + .. py:attribute:: ACCOUNTS + :value: "ACCOUNTS" + + .. py:attribute:: WORKSPACE + :value: "WORKSPACE" + + .. py:attribute:: UNIFIED + :value: "UNIFIED" + +.. autoclass:: ListWorkspaceBaseEnvironmentsResponse + :members: + :undoc-members: + +.. autoclass:: Operation + :members: + :undoc-members: + +.. autoclass:: WorkspaceBaseEnvironment + :members: + :undoc-members: + +.. autoclass:: WorkspaceBaseEnvironmentCache + :members: + :undoc-members: + +.. py:class:: WorkspaceBaseEnvironmentCacheStatus + + Status of the environment materialization. + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: EXPIRED + :value: "EXPIRED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: INVALID + :value: "INVALID" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: REFRESHING + :value: "REFRESHING" + +.. autoclass:: WorkspaceBaseEnvironmentOperationMetadata + :members: + :undoc-members: diff --git a/docs/dbdataclasses/iamv2.rst b/docs/dbdataclasses/iamv2.rst index 446848b05..2c95592cf 100755 --- a/docs/dbdataclasses/iamv2.rst +++ b/docs/dbdataclasses/iamv2.rst @@ -4,10 +4,39 @@ Identity and Access Management These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.iamv2`` module. .. py:currentmodule:: databricks.sdk.service.iamv2 +.. autoclass:: AccountAccessIdentityRule + :members: + :undoc-members: + +.. py:class:: AccountAccessRuleAction + + The action type for an account access identity rule (currently DENY only). + + .. py:attribute:: DENY + :value: "DENY" + +.. autoclass:: DirectGroupMember + :members: + :undoc-members: + .. autoclass:: Group :members: :undoc-members: +.. autoclass:: GroupMembership + :members: + :undoc-members: + +.. py:class:: GroupMembershipSource + + The source of the group membership (internal or from identity provider). + + .. py:attribute:: IDENTITY_PROVIDER + :value: "IDENTITY_PROVIDER" + + .. py:attribute:: INTERNAL + :value: "INTERNAL" + .. py:class:: HostType Enum representing the type of Databricks host. @@ -21,6 +50,38 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UNIFIED :value: "UNIFIED" +.. autoclass:: ListAccountAccessIdentityRulesResponse + :members: + :undoc-members: + +.. autoclass:: ListDirectGroupMembersResponse + :members: + :undoc-members: + +.. autoclass:: ListGroupsResponse + :members: + :undoc-members: + +.. autoclass:: ListServicePrincipalsResponse + :members: + :undoc-members: + +.. autoclass:: ListTransitiveParentGroupsResponse + :members: + :undoc-members: + +.. autoclass:: ListUsersResponse + :members: + :undoc-members: + +.. autoclass:: ListWorkspaceAccessDetailsResponse + :members: + :undoc-members: + +.. autoclass:: ListWorkspaceAssignmentDetailsResponse + :members: + :undoc-members: + .. py:class:: PrincipalType The type of the principal (user/sp/group). @@ -60,6 +121,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INACTIVE :value: "INACTIVE" +.. autoclass:: TransitiveParentGroup + :members: + :undoc-members: + .. autoclass:: User :members: :undoc-members: @@ -92,6 +157,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: FULL :value: "FULL" +.. autoclass:: WorkspaceAssignmentDetail + :members: + :undoc-members: + .. py:class:: WorkspacePermission The type of permission a principal has to a workspace (admin/user). diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst index 3b2d17da3..0bf3b115e 100644 --- a/docs/dbdataclasses/index.rst +++ b/docs/dbdataclasses/index.rst @@ -13,11 +13,14 @@ Dataclasses compute dashboards database + dataclassification dataquality + environments files iam iamv2 jobs + knowledgeassistants marketplace ml networking @@ -31,6 +34,7 @@ Dataclasses settingsv2 sharing sql + supervisoragents tags vectorsearch workspace \ No newline at end of file diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 81b7f00f3..33ee4e2c7 100755 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -4,6 +4,26 @@ Jobs These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.jobs`` module. .. py:currentmodule:: databricks.sdk.service.jobs +.. py:class:: AlertEvaluationState + + Same alert evaluation state as in redash-v2/api/proto/alertsv2/alerts.proto + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + +.. autoclass:: AlertTaskOutput + :members: + :undoc-members: + .. py:class:: AuthenticationMethod .. py:attribute:: OAUTH @@ -875,6 +895,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UNKNOWN :value: "UNKNOWN" +.. autoclass:: SqlConditionConfiguration + :members: + :undoc-members: + +.. autoclass:: SqlConditionRunInfoDetails + :members: + :undoc-members: + +.. autoclass:: SqlConditionState + :members: + :undoc-members: + .. autoclass:: SqlDashboardOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/knowledgeassistants.rst b/docs/dbdataclasses/knowledgeassistants.rst new file mode 100644 index 000000000..6e4cfb18e --- /dev/null +++ b/docs/dbdataclasses/knowledgeassistants.rst @@ -0,0 +1,68 @@ +Data Classify +============= + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.knowledgeassistants`` module. + +.. py:currentmodule:: databricks.sdk.service.knowledgeassistants +.. autoclass:: FileTableSpec + :members: + :undoc-members: + +.. autoclass:: FilesSpec + :members: + :undoc-members: + +.. py:class:: HostType + + Enum representing the type of Databricks host. + + .. py:attribute:: ACCOUNTS + :value: "ACCOUNTS" + + .. py:attribute:: WORKSPACE + :value: "WORKSPACE" + + .. py:attribute:: UNIFIED + :value: "UNIFIED" + +.. autoclass:: IndexSpec + :members: + :undoc-members: + +.. autoclass:: KnowledgeAssistant + :members: + :undoc-members: + +.. py:class:: KnowledgeAssistantState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: FAILED + :value: "FAILED" + +.. autoclass:: KnowledgeSource + :members: + :undoc-members: + +.. py:class:: KnowledgeSourceState + + .. py:attribute:: FAILED_UPDATE + :value: "FAILED_UPDATE" + + .. py:attribute:: UPDATED + :value: "UPDATED" + + .. py:attribute:: UPDATING + :value: "UPDATING" + +.. autoclass:: ListKnowledgeAssistantsResponse + :members: + :undoc-members: + +.. autoclass:: ListKnowledgeSourcesResponse + :members: + :undoc-members: diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 51875abea..c37bd1d06 100755 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -86,6 +86,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MEDIA :value: "MEDIA" + .. py:attribute:: OPEN_SOURCE + :value: "OPEN_SOURCE" + .. py:attribute:: OTHER :value: "OTHER" diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index a5e5b6f42..d21c13c40 100755 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -389,6 +389,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetLoggedModelsRequestResponse + :members: + :undoc-members: + .. autoclass:: GetMetricHistoryResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 88dcc3c06..1264b713b 100755 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -4,6 +4,10 @@ Delta Live Tables These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.pipelines`` module. .. py:currentmodule:: databricks.sdk.service.pipelines +.. autoclass:: ApplyEnvironmentRequestResponse + :members: + :undoc-members: + .. autoclass:: AutoFullRefreshPolicy :members: :undoc-members: @@ -23,6 +27,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ConnectorOptions + :members: + :undoc-members: + +.. py:class:: ConnectorType + + For certain database sources LakeFlow Connect offers both query based and cdc ingestion, ConnectorType can bse used to convey the type of ingestion. If connection_name is provided for database sources, we default to Query Based ingestion + + .. py:attribute:: CDC + :value: "CDC" + + .. py:attribute:: QUERY_BASED + :value: "QUERY_BASED" + .. autoclass:: CreatePipelineResponse :members: :undoc-members: @@ -35,6 +53,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DataStagingOptions + :members: + :undoc-members: + .. py:class:: DayOfWeek Days of week in which the window is allowed to happen. If not specified all days of the week will be used. @@ -99,6 +121,59 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: FileFilter + :members: + :undoc-members: + +.. autoclass:: FileIngestionOptions + :members: + :undoc-members: + +.. py:class:: FileIngestionOptionsFileFormat + + .. py:attribute:: AVRO + :value: "AVRO" + + .. py:attribute:: BINARYFILE + :value: "BINARYFILE" + + .. py:attribute:: CSV + :value: "CSV" + + .. py:attribute:: EXCEL + :value: "EXCEL" + + .. py:attribute:: JSON + :value: "JSON" + + .. py:attribute:: ORC + :value: "ORC" + + .. py:attribute:: PARQUET + :value: "PARQUET" + + .. py:attribute:: XML + :value: "XML" + +.. py:class:: FileIngestionOptionsSchemaEvolutionMode + + Based on https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#how-does-auto-loader-schema-evolution-work + + .. py:attribute:: ADD_NEW_COLUMNS + :value: "ADD_NEW_COLUMNS" + + .. py:attribute:: ADD_NEW_COLUMNS_WITH_TYPE_WIDENING + :value: "ADD_NEW_COLUMNS_WITH_TYPE_WIDENING" + + .. py:attribute:: FAIL_ON_NEW_COLUMNS + :value: "FAIL_ON_NEW_COLUMNS" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: RESCUE + :value: "RESCUE" + .. autoclass:: FileLibrary :members: :undoc-members: @@ -129,6 +204,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GoogleAdsOptions + :members: + :undoc-members: + +.. autoclass:: GoogleDriveOptions + :members: + :undoc-members: + +.. py:class:: GoogleDriveOptionsGoogleDriveEntityType + + .. py:attribute:: FILE + :value: "FILE" + + .. py:attribute:: FILE_METADATA + :value: "FILE_METADATA" + + .. py:attribute:: PERMISSION + :value: "PERMISSION" + .. py:class:: HostType Enum representing the type of Databricks host. @@ -154,6 +248,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: IngestionPipelineDefinitionConfluenceOptions + :members: + :undoc-members: + +.. autoclass:: IngestionPipelineDefinitionJiraOptions + :members: + :undoc-members: + .. autoclass:: IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig :members: :undoc-members: @@ -168,9 +270,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: IngestionSourceType + .. py:attribute:: ADOBE_CAMPAIGNS + :value: "ADOBE_CAMPAIGNS" + + .. py:attribute:: AKAMAI_WAF + :value: "AKAMAI_WAF" + .. py:attribute:: BIGQUERY :value: "BIGQUERY" + .. py:attribute:: BING_ADS + :value: "BING_ADS" + + .. py:attribute:: CONFLUENCE + :value: "CONFLUENCE" + + .. py:attribute:: CROWDSTRIKE_EVENT_STREAM + :value: "CROWDSTRIKE_EVENT_STREAM" + .. py:attribute:: DYNAMICS365 :value: "DYNAMICS365" @@ -180,39 +297,123 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: GA4_RAW_DATA :value: "GA4_RAW_DATA" + .. py:attribute:: GITHUB + :value: "GITHUB" + + .. py:attribute:: GOOGLE_ADS + :value: "GOOGLE_ADS" + + .. py:attribute:: GOOGLE_SEARCH_CONSOLE + :value: "GOOGLE_SEARCH_CONSOLE" + + .. py:attribute:: GUIDEWIRE + :value: "GUIDEWIRE" + + .. py:attribute:: HUBSPOT + :value: "HUBSPOT" + + .. py:attribute:: LINKEDIN_ADS + :value: "LINKEDIN_ADS" + + .. py:attribute:: M365_AUDIT_LOGS + :value: "M365_AUDIT_LOGS" + .. py:attribute:: MANAGED_POSTGRESQL :value: "MANAGED_POSTGRESQL" + .. py:attribute:: META_MARKETING + :value: "META_MARKETING" + + .. py:attribute:: MICROSOFT_TEAMS + :value: "MICROSOFT_TEAMS" + .. py:attribute:: MYSQL :value: "MYSQL" .. py:attribute:: NETSUITE :value: "NETSUITE" + .. py:attribute:: OKTA_SYSTEM_LOGS + :value: "OKTA_SYSTEM_LOGS" + + .. py:attribute:: ONE_PASSWORD_EVENT_LOGS + :value: "ONE_PASSWORD_EVENT_LOGS" + .. py:attribute:: ORACLE :value: "ORACLE" + .. py:attribute:: OUTLOOK + :value: "OUTLOOK" + + .. py:attribute:: PINTEREST_ADS + :value: "PINTEREST_ADS" + .. py:attribute:: POSTGRESQL :value: "POSTGRESQL" + .. py:attribute:: PROOFPOINT_SIEM + :value: "PROOFPOINT_SIEM" + + .. py:attribute:: REDDIT_ADS + :value: "REDDIT_ADS" + + .. py:attribute:: REDSHIFT + :value: "REDSHIFT" + .. py:attribute:: SALESFORCE :value: "SALESFORCE" + .. py:attribute:: SALESFORCE_MARKETING_CLOUD + :value: "SALESFORCE_MARKETING_CLOUD" + .. py:attribute:: SERVICENOW :value: "SERVICENOW" .. py:attribute:: SHAREPOINT :value: "SHAREPOINT" + .. py:attribute:: SLACK_AUDIT_LOGS + :value: "SLACK_AUDIT_LOGS" + + .. py:attribute:: SMARTSHEET + :value: "SMARTSHEET" + + .. py:attribute:: SQLDW + :value: "SQLDW" + .. py:attribute:: SQLSERVER :value: "SQLSERVER" .. py:attribute:: TERADATA :value: "TERADATA" + .. py:attribute:: TIKTOK_ADS + :value: "TIKTOK_ADS" + + .. py:attribute:: VEEVA + :value: "VEEVA" + + .. py:attribute:: VEEVA_VAULT + :value: "VEEVA_VAULT" + + .. py:attribute:: WIZ_AUDIT_LOGS + :value: "WIZ_AUDIT_LOGS" + + .. py:attribute:: WORKDAY_ACTIVITY_LOGGING + :value: "WORKDAY_ACTIVITY_LOGGING" + + .. py:attribute:: WORKDAY_HCM + :value: "WORKDAY_HCM" + .. py:attribute:: WORKDAY_RAAS :value: "WORKDAY_RAAS" + .. py:attribute:: X_ADS + :value: "X_ADS" + + .. py:attribute:: ZENDESK + :value: "ZENDESK" + .. autoclass:: ListPipelineEventsResponse :members: :undoc-members: @@ -258,6 +459,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: OutlookOptions + :members: + :undoc-members: + .. autoclass:: PathPattern :members: :undoc-members: @@ -415,6 +620,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RestorePipelineRequestResponse + :members: + :undoc-members: + .. autoclass:: RewindDatasetSpec :members: :undoc-members: @@ -439,6 +648,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SharepointOptions + :members: + :undoc-members: + +.. py:class:: SharepointOptionsSharepointEntityType + + .. py:attribute:: FILE + :value: "FILE" + + .. py:attribute:: FILE_METADATA + :value: "FILE_METADATA" + + .. py:attribute:: LIST + :value: "LIST" + + .. py:attribute:: PERMISSION + :value: "PERMISSION" + .. autoclass:: SourceCatalogConfig :members: :undoc-members: @@ -505,6 +732,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SCD_TYPE_2 :value: "SCD_TYPE_2" +.. autoclass:: TikTokAdsOptions + :members: + :undoc-members: + +.. py:class:: TikTokAdsOptionsTikTokDataLevel + + Data level for TikTok Ads report aggregation. + + .. py:attribute:: AUCTION_AD + :value: "AUCTION_AD" + + .. py:attribute:: AUCTION_ADGROUP + :value: "AUCTION_ADGROUP" + + .. py:attribute:: AUCTION_ADVERTISER + :value: "AUCTION_ADVERTISER" + + .. py:attribute:: AUCTION_CAMPAIGN + :value: "AUCTION_CAMPAIGN" + +.. py:class:: TikTokAdsOptionsTikTokReportType + + Report type for TikTok Ads API. + + .. py:attribute:: AUDIENCE + :value: "AUDIENCE" + + .. py:attribute:: BASIC + :value: "BASIC" + + .. py:attribute:: BUSINESS_CENTER + :value: "BUSINESS_CENTER" + + .. py:attribute:: DSA + :value: "DSA" + + .. py:attribute:: GMV_MAX + :value: "GMV_MAX" + + .. py:attribute:: PLAYABLE_AD + :value: "PLAYABLE_AD" + .. autoclass:: Truncation :members: :undoc-members: @@ -579,6 +848,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WAITING_FOR_RESOURCES :value: "WAITING_FOR_RESOURCES" +.. py:class:: UpdateMode + + .. py:attribute:: CONTINUOUS + :value: "CONTINUOUS" + + .. py:attribute:: DEFAULT + :value: "DEFAULT" + .. autoclass:: UpdateStateInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/postgres.rst b/docs/dbdataclasses/postgres.rst index 8713169d7..b6705eced 100755 --- a/docs/dbdataclasses/postgres.rst +++ b/docs/dbdataclasses/postgres.rst @@ -39,6 +39,36 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: RESETTING :value: "RESETTING" +.. autoclass:: Catalog + :members: + :undoc-members: + +.. autoclass:: ComputeInstance + :members: + :undoc-members: + +.. py:class:: ComputeInstanceComputeState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: IDLE + :value: "IDLE" + + .. py:attribute:: INIT + :value: "INIT" + +.. py:class:: ComputeInstanceComputeType + + .. py:attribute:: HOT_STANDBY + :value: "HOT_STANDBY" + + .. py:attribute:: READ_ONLY + :value: "READ_ONLY" + + .. py:attribute:: READ_WRITE + :value: "READ_WRITE" + .. autoclass:: Database :members: :undoc-members: @@ -63,6 +93,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeltaTableSyncInfo + :members: + :undoc-members: + +.. autoclass:: DisableForwardEtlResponse + :members: + :undoc-members: + .. autoclass:: Endpoint :members: :undoc-members: @@ -368,6 +406,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" +.. autoclass:: ForwardEtlConfig + :members: + :undoc-members: + +.. autoclass:: ForwardEtlDatabase + :members: + :undoc-members: + +.. autoclass:: ForwardEtlMetadata + :members: + :undoc-members: + +.. autoclass:: ForwardEtlSchema + :members: + :undoc-members: + +.. autoclass:: ForwardEtlStatus + :members: + :undoc-members: + +.. autoclass:: ForwardEtlTableMapping + :members: + :undoc-members: + .. py:class:: HostType Enum representing the type of Databricks host. @@ -389,6 +451,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListComputeInstancesResponse + :members: + :undoc-members: + .. autoclass:: ListDatabasesResponse :members: :undoc-members: @@ -405,6 +471,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NewPipelineSpec + :members: + :undoc-members: + .. autoclass:: Operation :members: :undoc-members: @@ -433,6 +503,39 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ProvisioningInfoState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DEGRADED + :value: "DEGRADED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + + .. py:attribute:: UPDATING + :value: "UPDATING" + +.. py:class:: ProvisioningPhase + + Copied from database_table_statuses.proto to decouple SDK packages. + + .. py:attribute:: PROVISIONING_PHASE_INDEX_SCAN + :value: "PROVISIONING_PHASE_INDEX_SCAN" + + .. py:attribute:: PROVISIONING_PHASE_INDEX_SORT + :value: "PROVISIONING_PHASE_INDEX_SORT" + + .. py:attribute:: PROVISIONING_PHASE_MAIN + :value: "PROVISIONING_PHASE_MAIN" + .. autoclass:: RequestedClaims :members: :undoc-members: @@ -498,3 +601,93 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: RoleRoleStatus :members: :undoc-members: + +.. autoclass:: SyncedTable + :members: + :undoc-members: + +.. autoclass:: SyncedTableContinuousUpdateStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTableFailedStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTablePipelineProgress + :members: + :undoc-members: + +.. autoclass:: SyncedTablePosition + :members: + :undoc-members: + +.. autoclass:: SyncedTableProvisioningStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTableSpec + :members: + :undoc-members: + +.. py:class:: SyncedTableSpecSyncedTableSchedulingPolicy + + Scheduling policy of the synced table's underlying pipeline. These should be kept in sync with the scheudling policy enums for SyncedDatabaseTables with DatabaseInstances + + .. py:attribute:: CONTINUOUS + :value: "CONTINUOUS" + + .. py:attribute:: SNAPSHOT + :value: "SNAPSHOT" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + +.. py:class:: SyncedTableState + + The state of a synced table. Copied from database_table_statuses.proto to decouple SDK packages. + + .. py:attribute:: SYNCED_TABLED_OFFLINE + :value: "SYNCED_TABLED_OFFLINE" + + .. py:attribute:: SYNCED_TABLE_OFFLINE_FAILED + :value: "SYNCED_TABLE_OFFLINE_FAILED" + + .. py:attribute:: SYNCED_TABLE_ONLINE + :value: "SYNCED_TABLE_ONLINE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE + :value: "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE + :value: "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_PIPELINE_FAILED + :value: "SYNCED_TABLE_ONLINE_PIPELINE_FAILED" + + .. py:attribute:: SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE + :value: "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES + :value: "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING + :value: "SYNCED_TABLE_PROVISIONING" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT + :value: "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES + :value: "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES" + +.. autoclass:: SyncedTableStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTableTriggeredUpdateStatus + :members: + :undoc-members: + +.. autoclass:: Table + :members: + :undoc-members: diff --git a/docs/dbdataclasses/qualitymonitorv2.rst b/docs/dbdataclasses/qualitymonitorv2.rst index 3bdd70a46..2b238ef9d 100755 --- a/docs/dbdataclasses/qualitymonitorv2.rst +++ b/docs/dbdataclasses/qualitymonitorv2.rst @@ -8,6 +8,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AnomalyDetectionJobType + + .. py:attribute:: ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN + :value: "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + + .. py:attribute:: ANOMALY_DETECTION_JOB_TYPE_NORMAL + :value: "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + .. py:class:: AnomalyDetectionRunStatus Status of Anomaly Detection Job Run @@ -36,6 +44,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR :value: "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" +.. autoclass:: ColumnMatcher + :members: + :undoc-members: + +.. autoclass:: CustomCheckConfiguration + :members: + :undoc-members: + +.. autoclass:: CustomCheckThresholds + :members: + :undoc-members: + +.. autoclass:: CustomScalarCheck + :members: + :undoc-members: + .. py:class:: HostType Enum representing the type of Databricks host. @@ -65,6 +89,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Threshold + :members: + :undoc-members: + +.. py:class:: ThresholdType + + .. py:attribute:: THRESHOLD_TYPE_AUTO + :value: "THRESHOLD_TYPE_AUTO" + + .. py:attribute:: THRESHOLD_TYPE_MANUAL + :value: "THRESHOLD_TYPE_MANUAL" + + .. py:attribute:: THRESHOLD_TYPE_UNBOUNDED + :value: "THRESHOLD_TYPE_UNBOUNDED" + .. autoclass:: UniquenessValidityCheck :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 5c655d651..dbdb0f294 100755 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -125,6 +125,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo Compliance standard for SHIELD customers. See README.md for how instructions of how to add new standards. + .. py:attribute:: ARC_AMPE + :value: "ARC_AMPE" + .. py:attribute:: CANADA_PROTECTED_B :value: "CANADA_PROTECTED_B" @@ -514,6 +517,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GcpEndpoint + :members: + :undoc-members: + .. autoclass:: GenericWebhookConfig :members: :undoc-members: @@ -687,6 +694,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REJECTED :value: "REJECTED" +.. autoclass:: NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule + :members: + :undoc-members: + .. autoclass:: NetworkConnectivityConfiguration :members: :undoc-members: @@ -815,6 +826,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateTokenResponse + :members: + :undoc-members: + .. autoclass:: WorkspaceNetworkOption :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index c81f5f4f0..6c27dae1a 100755 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -387,6 +387,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ReplicationConfig + :members: + :undoc-members: + .. autoclass:: RetrieveTokenResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 06eee9f88..5a75dd256 100755 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -310,6 +310,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PRO :value: "PRO" + .. py:attribute:: REYDEN + :value: "REYDEN" + .. py:attribute:: TYPE_UNSPECIFIED :value: "TYPE_UNSPECIFIED" @@ -455,6 +458,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PRO :value: "PRO" + .. py:attribute:: REYDEN + :value: "REYDEN" + .. py:attribute:: TYPE_UNSPECIFIED :value: "TYPE_UNSPECIFIED" @@ -486,6 +492,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PRO :value: "PRO" + .. py:attribute:: REYDEN + :value: "REYDEN" + .. py:attribute:: TYPE_UNSPECIFIED :value: "TYPE_UNSPECIFIED" @@ -554,6 +563,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PRO :value: "PRO" + .. py:attribute:: REYDEN + :value: "REYDEN" + .. py:attribute:: TYPE_UNSPECIFIED :value: "TYPE_UNSPECIFIED" @@ -820,6 +832,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ANALYZE :value: "ANALYZE" + .. py:attribute:: CALL + :value: "CALL" + .. py:attribute:: COPY :value: "COPY" @@ -1337,6 +1352,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DOCKER_INVALID_OS_EXCEPTION :value: "DOCKER_INVALID_OS_EXCEPTION" + .. py:attribute:: DRIVER_DNS_RESOLUTION_FAILURE + :value: "DRIVER_DNS_RESOLUTION_FAILURE" + .. py:attribute:: DRIVER_EVICTION :value: "DRIVER_EVICTION" @@ -1565,6 +1583,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NFS_MOUNT_FAILURE :value: "NFS_MOUNT_FAILURE" + .. py:attribute:: NO_ACTIVATED_K8S + :value: "NO_ACTIVATED_K8S" + + .. py:attribute:: NO_ACTIVATED_K8S_TESTING_TAG + :value: "NO_ACTIVATED_K8S_TESTING_TAG" + .. py:attribute:: NO_MATCHED_K8S :value: "NO_MATCHED_K8S" @@ -1604,6 +1628,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SECRET_RESOLUTION_ERROR :value: "SECRET_RESOLUTION_ERROR" + .. py:attribute:: SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION + :value: "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" + .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" @@ -1803,6 +1830,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PRO :value: "PRO" + .. py:attribute:: REYDEN + :value: "REYDEN" + .. py:attribute:: TYPE_UNSPECIFIED :value: "TYPE_UNSPECIFIED" diff --git a/docs/dbdataclasses/supervisoragents.rst b/docs/dbdataclasses/supervisoragents.rst new file mode 100644 index 000000000..fabbb0b54 --- /dev/null +++ b/docs/dbdataclasses/supervisoragents.rst @@ -0,0 +1,34 @@ +Data Classify +============= + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.supervisoragents`` module. + +.. py:currentmodule:: databricks.sdk.service.supervisoragents +.. py:class:: HostType + + Enum representing the type of Databricks host. + + .. py:attribute:: ACCOUNTS + :value: "ACCOUNTS" + + .. py:attribute:: WORKSPACE + :value: "WORKSPACE" + + .. py:attribute:: UNIFIED + :value: "UNIFIED" + +.. autoclass:: ListToolsResponse + :members: + :undoc-members: + +.. autoclass:: SupervisorAgent + :members: + :undoc-members: + +.. autoclass:: SupervisorAgentListResponse + :members: + :undoc-members: + +.. autoclass:: Tool + :members: + :undoc-members: diff --git a/docs/dbdataclasses/tags.rst b/docs/dbdataclasses/tags.rst index 7799deb50..630cca71b 100755 --- a/docs/dbdataclasses/tags.rst +++ b/docs/dbdataclasses/tags.rst @@ -4,6 +4,14 @@ Tags These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.tags`` module. .. py:currentmodule:: databricks.sdk.service.tags +.. autoclass:: ConflictResolutionPolicy + :members: + :undoc-members: + +.. autoclass:: DefaultValueOverridePolicy + :members: + :undoc-members: + .. py:class:: HostType Enum representing the type of Databricks host. @@ -25,6 +33,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PropagationConfig + :members: + :undoc-members: + .. autoclass:: TagAssignment :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 083cf67f8..44903dc3b 100755 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -4,6 +4,10 @@ Vector Search These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.vectorsearch`` module. .. py:currentmodule:: databricks.sdk.service.vectorsearch +.. autoclass:: AdjustedThroughputRequest + :members: + :undoc-members: + .. autoclass:: ColumnInfo :members: :undoc-members: @@ -93,6 +97,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: YELLOW_STATE :value: "YELLOW_STATE" +.. autoclass:: EndpointThroughputInfo + :members: + :undoc-members: + .. py:class:: EndpointType Type of endpoint. @@ -153,6 +161,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PatchEndpointThroughputResponse + :members: + :undoc-members: + .. py:class:: PipelineType Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh. @@ -210,10 +222,49 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ThroughputChangeRequestState + + Throughput change request state + + .. py:attribute:: CHANGE_ADJUSTED + :value: "CHANGE_ADJUSTED" + + .. py:attribute:: CHANGE_FAILED + :value: "CHANGE_FAILED" + + .. py:attribute:: CHANGE_IN_PROGRESS + :value: "CHANGE_IN_PROGRESS" + + .. py:attribute:: CHANGE_REACHED_MAXIMUM + :value: "CHANGE_REACHED_MAXIMUM" + + .. py:attribute:: CHANGE_REACHED_MINIMUM + :value: "CHANGE_REACHED_MINIMUM" + + .. py:attribute:: CHANGE_SUCCESS + :value: "CHANGE_SUCCESS" + +.. py:class:: ThroughputPatchStatus + + Response status for throughput change requests + + .. py:attribute:: PATCH_ACCEPTED + :value: "PATCH_ACCEPTED" + + .. py:attribute:: PATCH_FAILED + :value: "PATCH_FAILED" + + .. py:attribute:: PATCH_REJECTED + :value: "PATCH_REJECTED" + .. autoclass:: UpdateEndpointCustomTagsResponse :members: :undoc-members: +.. autoclass:: UpdateVectorIndexUsagePolicyResponse + :members: + :undoc-members: + .. autoclass:: UpsertDataResult :members: :undoc-members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index af9beaa2a..930a052a4 100755 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -78,6 +78,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SOURCE :value: "SOURCE" +.. py:class:: ExportOutputs + + .. py:attribute:: ALL + :value: "ALL" + + .. py:attribute:: NONE + :value: "NONE" + .. autoclass:: ExportResponse :members: :undoc-members: diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index 9907eced7..152bf21ae 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -286,6 +286,26 @@ class Generator: "Networking", "Manage network connectivity configurations for Databricks workspaces." ), + Package( + "dataclassification", + "Data Classify", + "Manage network connectivity configurations for Databricks workspaces." + ), + Package( + "environments", + "Data Classify", + "Manage network connectivity configurations for Databricks workspaces." + ), + Package( + "knowledgeassistants", + "Data Classify", + "Manage network connectivity configurations for Databricks workspaces." + ), + Package( + "supervisoragents", + "Data Classify", + "Manage network connectivity configurations for Databricks workspaces." + ), ] def __init__(self): diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 258f994d3..d4e337aa1 100755 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -11,7 +11,7 @@ the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog. - .. py:method:: create(name: str [, comment: Optional[str], connection_name: Optional[str], options: Optional[Dict[str, str]], properties: Optional[Dict[str, str]], provider_name: Optional[str], share_name: Optional[str], storage_root: Optional[str]]) -> CatalogInfo + .. py:method:: create(name: str [, comment: Optional[str], connection_name: Optional[str], conversion_info: Optional[ConversionInfo], dr_replication_info: Optional[DrReplicationInfo], options: Optional[Dict[str, str]], properties: Optional[Dict[str, str]], provider_name: Optional[str], share_name: Optional[str], storage_root: Optional[str]]) -> CatalogInfo Usage: @@ -24,10 +24,10 @@ w = WorkspaceClient() - created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created_catalog.name, force=True) + w.catalogs.delete(name=created.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -38,6 +38,10 @@ User-provided free-form text description. :param connection_name: str (optional) The name of the connection to an external data source. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param options: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param properties: Dict[str,str] (optional) @@ -145,7 +149,7 @@ :returns: Iterator over :class:`CatalogInfo` - .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo + .. py:method:: update(name: str [, comment: Optional[str], conversion_info: Optional[ConversionInfo], dr_replication_info: Optional[DrReplicationInfo], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo Usage: @@ -155,13 +159,12 @@ import time from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) + _ = w.catalogs.update(name=created.name, comment="updated") # cleanup w.catalogs.delete(name=created.name, force=True) @@ -173,6 +176,10 @@ The name of the catalog. :param comment: str (optional) User-provided free-form text description. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) Whether predictive optimization should be enabled for this object and objects under it. :param isolation_mode: :class:`CatalogIsolationMode` (optional) diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index acfeecd53..0674d749d 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -13,7 +13,7 @@ objects based on cloud storage. Users may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings. - .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo + .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], environment_settings: Optional[EnvironmentSettings], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo Usage: @@ -54,6 +54,8 @@ A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -153,7 +155,7 @@ :returns: Iterator over :class:`ConnectionInfo` - .. py:method:: update(name: str, options: Dict[str, str] [, new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo + .. py:method:: update(name: str, options: Dict[str, str] [, environment_settings: Optional[EnvironmentSettings], new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo Usage: @@ -196,6 +198,8 @@ Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) diff --git a/docs/workspace/catalog/entity_tag_assignments.rst b/docs/workspace/catalog/entity_tag_assignments.rst index f1111bffe..3e26773a1 100644 --- a/docs/workspace/catalog/entity_tag_assignments.rst +++ b/docs/workspace/catalog/entity_tag_assignments.rst @@ -51,7 +51,7 @@ - .. py:method:: get(entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssignment + .. py:method:: get(entity_type: str, entity_name: str, tag_key: str [, include_inherited: Optional[bool]]) -> EntityTagAssignment Gets a tag assignment for an Unity Catalog entity by tag key. @@ -62,11 +62,13 @@ The fully qualified name of the entity to which the tag is assigned :param tag_key: str Required. The key of the tag + :param include_inherited: bool (optional) + Boolean which indicates whether this tag is inherited. :returns: :class:`EntityTagAssignment` - .. py:method:: list(entity_type: str, entity_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[EntityTagAssignment] + .. py:method:: list(entity_type: str, entity_name: str [, include_inherited: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[EntityTagAssignment] List tag assignments for an Unity Catalog entity @@ -79,6 +81,8 @@ columns, volumes. :param entity_name: str The fully qualified name of the entity to which the tag is assigned + :param include_inherited: bool (optional) + Boolean which indicates whether this tag is inherited. :param max_results: int (optional) Optional. Maximum number of tag assignments to return in a single page :param page_token: str (optional) diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst index 69f2dd6c5..5ce76ef7a 100644 --- a/docs/workspace/catalog/grants.rst +++ b/docs/workspace/catalog/grants.rst @@ -14,7 +14,7 @@ within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema. - .. py:method:: get(securable_type: str, full_name: str [, max_results: Optional[int], page_token: Optional[str], principal: Optional[str]]) -> GetPermissionsResponse + .. py:method:: get(securable_type: str, full_name: str [, include_deleted_principals: Optional[bool], max_results: Optional[int], page_token: Optional[str], principal: Optional[str]]) -> GetPermissionsResponse Usage: @@ -73,6 +73,8 @@ Type of securable. :param full_name: str Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. :param max_results: int (optional) Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the @@ -171,6 +173,35 @@ :returns: :class:`EffectivePermissionsList` + .. py:method:: list(securable_type: str, full_name: str [, include_deleted_principals: Optional[bool], page_size: Optional[int], page_token: Optional[str], principal: Optional[str]]) -> Iterator[PrivilegeAssignment] + + Lists the privilege assignments for a securable. Does not include inherited privileges. Paginated + version of Get Permissions API. + + :param securable_type: str + Type of securable. + :param full_name: str + Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. + :param page_size: int (optional) + Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment + present in a single page response is guaranteed to contain all the privileges granted on the + requested Securable for the respective principal. + + If not set, page length is the server configured value. If set to - lesser than 0: invalid parameter + error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: + invalid parameter error (this is to ensure that server is able to return at least one complete + PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page length is the + minimum of this value and a server configured value + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :param principal: str (optional) + If provided, only the permissions for the specified principal (user or group) are returned. + + :returns: Iterator over :class:`PrivilegeAssignment` + + .. py:method:: update(securable_type: str, full_name: str [, changes: Optional[List[PermissionsChange]]]) -> UpdatePermissionsResponse diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index c174e87a3..92da2c568 100755 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,14 +30,13 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) + w.storage_credentials.delete(name=created.name) Creates a new storage credential. @@ -99,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(name=created.name) + by_name = w.storage_credentials.get(get=created.name) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. @@ -124,11 +123,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) + all = w.storage_credentials.list() Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst index 5992f2c4c..14c53941e 100755 --- a/docs/workspace/compute/instance_pools.rst +++ b/docs/workspace/compute/instance_pools.rst @@ -19,7 +19,7 @@ Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing. - .. py:method:: create(instance_pool_name: str, node_type_id: str [, aws_attributes: Optional[InstancePoolAwsAttributes], azure_attributes: Optional[InstancePoolAzureAttributes], custom_tags: Optional[Dict[str, str]], disk_spec: Optional[DiskSpec], enable_elastic_disk: Optional[bool], gcp_attributes: Optional[InstancePoolGcpAttributes], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], node_type_flexibility: Optional[NodeTypeFlexibility], preloaded_docker_images: Optional[List[DockerImage]], preloaded_spark_versions: Optional[List[str]], remote_disk_throughput: Optional[int], total_initial_remote_disk_size: Optional[int]]) -> CreateInstancePoolResponse + .. py:method:: create(instance_pool_name: str, node_type_id: str [, aws_attributes: Optional[InstancePoolAwsAttributes], azure_attributes: Optional[InstancePoolAzureAttributes], custom_tags: Optional[Dict[str, str]], disk_spec: Optional[DiskSpec], enable_auto_alternate_node_types: Optional[bool], enable_elastic_disk: Optional[bool], gcp_attributes: Optional[InstancePoolGcpAttributes], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], node_type_flexibility: Optional[NodeTypeFlexibility], preloaded_docker_images: Optional[List[DockerImage]], preloaded_spark_versions: Optional[List[str]], remote_disk_throughput: Optional[int], total_initial_remote_disk_size: Optional[int]]) -> CreateInstancePoolResponse Usage: @@ -62,6 +62,11 @@ - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This field was + deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove this field + after sufficient time has passed for all clients to migrate. :param enable_elastic_disk: bool (optional) Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -109,7 +114,7 @@ - .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], remote_disk_throughput: Optional[int], total_initial_remote_disk_size: Optional[int]]) + .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], enable_auto_alternate_node_types: Optional[bool], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], remote_disk_throughput: Optional[int], total_initial_remote_disk_size: Optional[int]]) Usage: @@ -152,6 +157,11 @@ EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. DEPRECATED: This field was + deprecated before entering PuPr and should no longer be used. TODO(CJ-71514): Remove this field + after sufficient time has passed for all clients to migrate. :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst index dfafea7aa..aaf85d4ce 100644 --- a/docs/workspace/compute/libraries.rst +++ b/docs/workspace/compute/libraries.rst @@ -41,6 +41,42 @@ :returns: Iterator over :class:`LibraryFullStatus` + .. py:method:: create_default_base_environment(default_base_environment: DefaultBaseEnvironment [, request_id: Optional[str], workspace_base_environment_id: Optional[str]]) -> DefaultBaseEnvironment + + Create a default base environment within workspaces to define the environment version and a list of + dependencies to be used in serverless notebooks and jobs. This process will asynchronously generate a + cache to optimize dependency resolution. + + :param default_base_environment: :class:`DefaultBaseEnvironment` + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a `request_id` is provided. + :param workspace_base_environment_id: str (optional) + + :returns: :class:`DefaultBaseEnvironment` + + + .. py:method:: delete_default_base_environment(id: str) + + Delete the default base environment given an ID. The default base environment may be used by + downstream workloads. Please ensure that the deletion is intentional. + + :param id: str + + + + + .. py:method:: get_default_base_environment(id: str [, trace_id: Optional[str]]) -> DefaultBaseEnvironment + + Return the default base environment details for a given ID. + + :param id: str + :param trace_id: str (optional) + Deprecated: use ctx.requestId instead + + :returns: :class:`DefaultBaseEnvironment` + + .. py:method:: install(cluster_id: str, libraries: List[Library]) Add libraries to install on a cluster. The installation is asynchronous; it happens in the background @@ -54,6 +90,26 @@ + .. py:method:: list_default_base_environments( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DefaultBaseEnvironment] + + List default base environments defined in the workspaces for the requested user. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`DefaultBaseEnvironment` + + + .. py:method:: refresh_default_base_environments(ids: List[str]) + + Refresh the cached default base environments for the given IDs. This process will asynchronously + regenerate the caches. The existing caches remains available until it expires. + + :param ids: List[str] + + + + .. py:method:: uninstall(cluster_id: str, libraries: List[Library]) Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is @@ -65,4 +121,26 @@ The libraries to uninstall. + + + .. py:method:: update_default_base_environment(id: str, default_base_environment: DefaultBaseEnvironment) -> DefaultBaseEnvironment + + Update the default base environment for the given ID. This process will asynchronously regenerate the + cache. The existing cache remains available until it expires. + + :param id: str + :param default_base_environment: :class:`DefaultBaseEnvironment` + + :returns: :class:`DefaultBaseEnvironment` + + + .. py:method:: update_default_default_base_environment( [, base_environment_type: Optional[BaseEnvironmentType], id: Optional[str]]) -> DefaultBaseEnvironment + + Set the default base environment for the workspace. This marks the specified DBE as the workspace + default. + + :param base_environment_type: :class:`BaseEnvironmentType` (optional) + :param id: str (optional) + + :returns: :class:`DefaultBaseEnvironment` \ No newline at end of file diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index a7df902d3..36a2c858f 100755 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -29,6 +29,22 @@ .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + .. py:method:: create_message_comment(space_id: str, conversation_id: str, message_id: str, content: str) -> GenieMessageComment + + Create a comment on a conversation message. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param message_id: str + The ID associated with the message. + :param content: str + Comment text content. + + :returns: :class:`GenieMessageComment` + + .. py:method:: create_space(warehouse_id: str, serialized_space: str [, description: Optional[str], parent_path: Optional[str], title: Optional[str]]) -> GenieSpace Creates a Genie space from a serialized payload. @@ -143,6 +159,70 @@ :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` + .. py:method:: genie_create_eval_run(space_id: str [, benchmark_question_ids: Optional[List[str]]]) -> GenieEvalRunResponse + + Create and run evaluations for multiple benchmark questions in a Genie space. + + :param space_id: str + The ID associated with the Genie space where the evaluations will be executed. + :param benchmark_question_ids: List[str] (optional) + List of benchmark question IDs to evaluate. These questions must exist in the specified Genie space. + If none are specified, then all benchmark questions are evaluated. + + :returns: :class:`GenieEvalRunResponse` + + + .. py:method:: genie_get_eval_result_details(space_id: str, eval_run_id: str, result_id: str) -> GenieEvalResultDetails + + Get details for evaluation results + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + The unique identifier for the evaluation run. + :param result_id: str + The unique identifier for the evaluation result. + + :returns: :class:`GenieEvalResultDetails` + + + .. py:method:: genie_get_eval_run(space_id: str, eval_run_id: str) -> GenieEvalRunResponse + + Get evaluation run details + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + + :returns: :class:`GenieEvalRunResponse` + + + .. py:method:: genie_list_eval_results(space_id: str, eval_run_id: str) -> GenieListEvalResultsResponse + + List evaluation results for a specific evaluation run + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param eval_run_id: str + The unique identifier for the evaluation run. + + :returns: :class:`GenieListEvalResultsResponse` + + + .. py:method:: genie_list_eval_runs(space_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListEvalRunsResponse + + Lists all evaluation runs in a space + + :param space_id: str + The ID associated with the Genie space where the evaluation run is located. + :param page_size: int (optional) + Maximum number of messages to return per page + :param page_token: str (optional) + Token to get the next page of results + + :returns: :class:`GenieListEvalRunsResponse` + + .. py:method:: get_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str, download_id_signature: str) -> GenieGetDownloadFullQueryResultResponse After [Generating a Full Query Result Download](:method:genie/generatedownloadfullqueryresult) and @@ -260,6 +340,22 @@ :returns: :class:`GenieSpace` + .. py:method:: list_conversation_comments(space_id: str, conversation_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListConversationCommentsResponse + + List all comments across all messages in a conversation. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param page_size: int (optional) + Maximum number of comments to return per page. + :param page_token: str (optional) + Pagination token for getting the next page of results. + + :returns: :class:`GenieListConversationCommentsResponse` + + .. py:method:: list_conversation_messages(space_id: str, conversation_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListConversationMessagesResponse List messages in a conversation @@ -293,6 +389,24 @@ :returns: :class:`GenieListConversationsResponse` + .. py:method:: list_message_comments(space_id: str, conversation_id: str, message_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListMessageCommentsResponse + + List comments on a specific conversation message. + + :param space_id: str + The ID associated with the Genie space. + :param conversation_id: str + The ID associated with the conversation. + :param message_id: str + The ID associated with the message. + :param page_size: int (optional) + Maximum number of comments to return per page. + :param page_token: str (optional) + Pagination token for getting the next page of results. + + :returns: :class:`GenieListMessageCommentsResponse` + + .. py:method:: list_spaces( [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListSpacesResponse Get list of Genie Spaces. @@ -305,7 +419,7 @@ :returns: :class:`GenieListSpacesResponse` - .. py:method:: send_message_feedback(space_id: str, conversation_id: str, message_id: str, rating: GenieFeedbackRating) + .. py:method:: send_message_feedback(space_id: str, conversation_id: str, message_id: str, rating: GenieFeedbackRating [, comment: Optional[str]]) Send feedback for a message. @@ -317,6 +431,8 @@ The ID associated with the message to provide feedback for. :param rating: :class:`GenieFeedbackRating` The rating (POSITIVE, NEGATIVE, or NONE). + :param comment: str (optional) + Optional text feedback that will be stored as a comment. diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index 877891d25..940efa5dd 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -9,4 +9,5 @@ Manage Lakeview dashboards genie lakeview - lakeview_embedded \ No newline at end of file + lakeview_embedded + query_execution \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst index 80eb5bc48..f48bf63bf 100644 --- a/docs/workspace/dashboards/lakeview_embedded.rst +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -6,6 +6,16 @@ Token-based Lakeview APIs for embedding dashboards in external applications. + .. py:method:: get_published_dashboard_embedded(dashboard_id: str) + + Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + + .. py:method:: get_published_dashboard_token_info(dashboard_id: str [, external_value: Optional[str], external_viewer_id: Optional[str]]) -> GetPublishedDashboardTokenInfoResponse Get a required authorization details and scopes of a published dashboard to mint an OAuth token. diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst index f57d874ac..1f13c3f52 100644 --- a/docs/workspace/dashboards/query_execution.rst +++ b/docs/workspace/dashboards/query_execution.rst @@ -35,7 +35,8 @@ .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse - Poll the results for the a query for a published, embedded dashboard. + Poll the results for the a query for a published, embedded dashboard. Supports both GET and POST + methods. POST is recommended for polling many tokens to avoid URL length limitations. :param dashboard_name: str :param dashboard_revision_id: str diff --git a/docs/workspace/database/database.rst b/docs/workspace/database/database.rst index 9ce91817f..d0c8faa26 100755 --- a/docs/workspace/database/database.rst +++ b/docs/workspace/database/database.rst @@ -118,6 +118,17 @@ + .. py:method:: failover_database_instance(name: str [, failover_target_database_instance_name: Optional[str]]) -> DatabaseInstance + + Failover the primary node of a Database Instance to a secondary. + + :param name: str + Name of the instance to failover. + :param failover_target_database_instance_name: str (optional) + + :returns: :class:`DatabaseInstance` + + .. py:method:: find_database_instance_by_uid( [, uid: Optional[str]]) -> DatabaseInstance Find a Database Instance by uid. @@ -275,6 +286,19 @@ :returns: :class:`DatabaseInstance` + .. py:method:: update_database_instance_role(instance_name: str, name: str, database_instance_role: DatabaseInstanceRole [, database_instance_name: Optional[str]]) -> DatabaseInstanceRole + + Update a role for a Database Instance. + + :param instance_name: str + :param name: str + The name of the role. This is the unique identifier for the role in an instance. + :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) + + :returns: :class:`DatabaseInstanceRole` + + .. py:method:: update_synced_database_table(name: str, synced_table: SyncedDatabaseTable, update_mask: str) -> SyncedDatabaseTable This API is currently unimplemented, but exposed for Terraform support. diff --git a/docs/workspace/dataclassification/data_classification.rst b/docs/workspace/dataclassification/data_classification.rst new file mode 100644 index 000000000..5b6909bf5 --- /dev/null +++ b/docs/workspace/dataclassification/data_classification.rst @@ -0,0 +1,71 @@ +``w.data_classification``: DataClassification.v1 +================================================ +.. currentmodule:: databricks.sdk.service.dataclassification + +.. py:class:: DataClassificationAPI + + Manage data classification for Unity Catalog catalogs. Data classification automatically identifies and + tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most one configuration + resource that controls scanning behavior and auto-tagging rules. + + .. py:method:: create_catalog_config(parent: str, catalog_config: CatalogConfig) -> CatalogConfig + + Create Data Classification configuration for a catalog. + + Creates a new config resource, which enables Data Classification for the specified catalog. The config + must not already exist. + + :param parent: str + Parent resource in the format: catalogs/{catalog_name} + :param catalog_config: :class:`CatalogConfig` + The configuration to create. + + :returns: :class:`CatalogConfig` + + + .. py:method:: delete_catalog_config(name: str) + + Delete Data Classification configuration for a catalog. + + Deletes the config resource, which disables Data Classification for the specified catalog. + + This operation is idempotent: deleting a non-existent config succeeds. However, if the catalog itself + doesn't exist, returns RESOURCE_DOES_NOT_EXIST. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config + + + + + .. py:method:: get_catalog_config(name: str) -> CatalogConfig + + Get the Data Classification configuration for a catalog. + + Returns the CatalogConfig resource for the specified catalog. If the config doesn't exist (Data + Classification not enabled), returns RESOURCE_DOES_NOT_EXIST error. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config + + :returns: :class:`CatalogConfig` + + + .. py:method:: update_catalog_config(name: str, catalog_config: CatalogConfig, update_mask: FieldMask) -> CatalogConfig + + Update the Data Classification configuration for a catalog. + + Updates fields specified in the update_mask. The config must already exist. Use field mask to perform + partial updates of the configuration. + + :param name: str + Resource name in the format: catalogs/{catalog_name}/config Set by server in responses. Required in + Update requests to identify the resource. + :param catalog_config: :class:`CatalogConfig` + The configuration to apply to the catalog. The name field in catalog_config identifies which + resource to update. + :param update_mask: FieldMask + Field mask specifying which fields to update. + + :returns: :class:`CatalogConfig` + \ No newline at end of file diff --git a/docs/workspace/dataclassification/index.rst b/docs/workspace/dataclassification/index.rst new file mode 100644 index 000000000..944faf757 --- /dev/null +++ b/docs/workspace/dataclassification/index.rst @@ -0,0 +1,10 @@ + +Data Classify +============= + +Manage network connectivity configurations for Databricks workspaces. + +.. toctree:: + :maxdepth: 1 + + data_classification \ No newline at end of file diff --git a/docs/workspace/environments/environments.rst b/docs/workspace/environments/environments.rst new file mode 100644 index 000000000..4866c1d7c --- /dev/null +++ b/docs/workspace/environments/environments.rst @@ -0,0 +1,134 @@ +``w.environments``: Environments.v1 +=================================== +.. currentmodule:: databricks.sdk.service.environments + +.. py:class:: EnvironmentsAPI + + APIs to manage environment resources. + + The Environments API provides management capabilities for different types of environments including + workspace-level base environments that define the environment version and dependencies to be used in + serverless notebooks and jobs. + + .. py:method:: create_workspace_base_environment(workspace_base_environment: WorkspaceBaseEnvironment [, request_id: Optional[str], workspace_base_environment_id: Optional[str]]) -> CreateWorkspaceBaseEnvironmentOperation + + Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The operation will + asynchronously generate a materialized environment to optimize dependency resolution and is only + marked as done when the materialized environment has been successfully generated or has failed. + + :param workspace_base_environment: :class:`WorkspaceBaseEnvironment` + Required. The workspace base environment to create. + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a request_id is provided. + :param workspace_base_environment_id: str (optional) + The ID to use for the workspace base environment, which will become the final component of the + resource name. This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + + :returns: :class:`Operation` + + + .. py:method:: delete_workspace_base_environment(name: str) + + Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact linked notebooks and jobs. + This operation is irreversible and should be performed only when you are certain the environment is no + longer needed. + + :param name: str + Required. The resource name of the workspace base environment to delete. Format: + workspace-base-environments/{workspace_base_environment} + + + + + .. py:method:: get_default_workspace_base_environment(name: str) -> DefaultWorkspaceBaseEnvironment + + Gets the default WorkspaceBaseEnvironment configuration for the workspace. Returns the current default + base environment settings for both CPU and GPU compute. + + :param name: str + A static resource name of the default workspace base environment. Format: + default-workspace-base-environment + + :returns: :class:`DefaultWorkspaceBaseEnvironment` + + + .. py:method:: get_operation(name: str) -> Operation + + Gets the status of a long-running operation. Clients can use this method to poll the operation result. + + :param name: str + The name of the operation resource. + + :returns: :class:`Operation` + + + .. py:method:: get_workspace_base_environment(name: str) -> WorkspaceBaseEnvironment + + Retrieves a WorkspaceBaseEnvironment by its name. + + :param name: str + Required. The resource name of the workspace base environment to retrieve. Format: + workspace-base-environments/{workspace_base_environment} + + :returns: :class:`WorkspaceBaseEnvironment` + + + .. py:method:: list_workspace_base_environments( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBaseEnvironment] + + Lists all WorkspaceBaseEnvironments in the workspace. + + :param page_size: int (optional) + The maximum number of environments to return per page. Default is 1000. + :param page_token: str (optional) + Page token for pagination. Received from a previous ListWorkspaceBaseEnvironments call. + + :returns: Iterator over :class:`WorkspaceBaseEnvironment` + + + .. py:method:: refresh_workspace_base_environment(name: str) -> RefreshWorkspaceBaseEnvironmentOperation + + Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is a long-running + operation. The operation will asynchronously regenerate the materialized environment and is only + marked as done when the materialized environment has been successfully generated or has failed. The + existing materialized environment remains available until it expires. + + :param name: str + Required. The resource name of the workspace base environment to delete. Format: + workspace-base-environments/{workspace_base_environment} + + :returns: :class:`Operation` + + + .. py:method:: update_default_workspace_base_environment(name: str, default_workspace_base_environment: DefaultWorkspaceBaseEnvironment, update_mask: FieldMask) -> DefaultWorkspaceBaseEnvironment + + Updates the default WorkspaceBaseEnvironment configuration for the workspace. Sets the specified base + environments as the workspace defaults for CPU and/or GPU compute. + + :param name: str + The resource name of this singleton resource. Format: default-workspace-base-environment + :param default_workspace_base_environment: :class:`DefaultWorkspaceBaseEnvironment` + Required. The default workspace base environment configuration to update. + :param update_mask: FieldMask + Field mask specifying which fields to update. To specify multiple fields in the field mask, use + comma as the separator (no space). The special value '*' indicate that all fields should be updated + (full replacement). Valid field paths: cpu_workspace_base_environment, + gpu_workspace_base_environment + + :returns: :class:`DefaultWorkspaceBaseEnvironment` + + + .. py:method:: update_workspace_base_environment(name: str, workspace_base_environment: WorkspaceBaseEnvironment) -> UpdateWorkspaceBaseEnvironmentOperation + + Updates an existing WorkspaceBaseEnvironment. This is a long-running operation. The operation will + asynchronously regenerate the materialized environment and is only marked as done when the + materialized environment has been successfully generated or has failed. The existing materialized + environment remains available until it expires. + + :param name: str + :param workspace_base_environment: :class:`WorkspaceBaseEnvironment` + Required. The workspace base environment with updated fields. The name field is used to identify the + environment to update. + + :returns: :class:`Operation` + \ No newline at end of file diff --git a/docs/workspace/environments/index.rst b/docs/workspace/environments/index.rst new file mode 100644 index 000000000..1466d6a3e --- /dev/null +++ b/docs/workspace/environments/index.rst @@ -0,0 +1,10 @@ + +Data Classify +============= + +Manage network connectivity configurations for Databricks workspaces. + +.. toctree:: + :maxdepth: 1 + + environments \ No newline at end of file diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 15524c53e..bf8411654 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -51,9 +51,9 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -83,9 +83,9 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str :returns: :class:`GetPermissionLevelsResponse` @@ -131,9 +131,9 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -148,9 +148,9 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories, + experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, + repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/docs/workspace/iamv2/workspace_iam_v2.rst b/docs/workspace/iamv2/workspace_iam_v2.rst index 4bc594c67..86126cfbb 100644 --- a/docs/workspace/iamv2/workspace_iam_v2.rst +++ b/docs/workspace/iamv2/workspace_iam_v2.rst @@ -6,6 +6,120 @@ These APIs are used to manage identities and the workspace access of these identities in . + .. py:method:: create_group_membership_proxy(group_id: int, group_membership: GroupMembership) -> GroupMembership + + Creates a group membership (assigns a principal to a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param group_membership: :class:`GroupMembership` + Required. The group membership to create. + + :returns: :class:`GroupMembership` + + + .. py:method:: create_group_proxy(group: Group) -> Group + + TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + + + .. py:method:: create_service_principal_proxy(service_principal: ServicePrincipal) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + + + .. py:method:: create_user_proxy(user: User) -> User + + TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + + + .. py:method:: delete_group_membership_proxy(group_id: int, principal_id: int) + + Deletes a group membership (unassigns a principal from a group). + + :param group_id: int + Required. Internal ID of the group in Databricks. + :param principal_id: int + Required. Internal ID of the principal to be unassigned from the group. + + + + + .. py:method:: delete_group_proxy(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + + + .. py:method:: delete_service_principal_proxy(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + + + .. py:method:: delete_user_proxy(internal_id: int) + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + + + .. py:method:: get_group_proxy(internal_id: int) -> Group + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + + + .. py:method:: get_service_principal_proxy(internal_id: int) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + + + .. py:method:: get_user_proxy(internal_id: int) -> User + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + + .. py:method:: get_workspace_access_detail_local(principal_id: int [, view: Optional[WorkspaceAccessDetailView]]) -> WorkspaceAccessDetail Returns the access details for a principal in the current workspace. Allows for checking access @@ -23,6 +137,98 @@ :returns: :class:`WorkspaceAccessDetail` + .. py:method:: list_direct_group_members_proxy(group_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListDirectGroupMembersResponse + + Lists provisioned direct members of a group with their membership source (internal or from identity + provider). + + :param group_id: int + Required. Internal ID of the group in Databricks whose direct members are being listed. + :param page_size: int (optional) + The maximum number of members to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListDirectGroupMembersProxy call. Provide this to retrieve + the subsequent page. + + :returns: :class:`ListDirectGroupMembersResponse` + + + .. py:method:: list_groups_proxy( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListGroupsResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering groups by group name or external id. + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + + + .. py:method:: list_service_principals_proxy( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListServicePrincipalsResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering service principals by application id or external id. + :param page_size: int (optional) + The maximum number of SPs to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + + + .. py:method:: list_transitive_parent_groups_proxy(principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> ListTransitiveParentGroupsResponse + + Lists all transitive parent groups of a principal. + + :param principal_id: int + Required. Internal ID of the principal in Databricks whose transitive parent groups are being + listed. + :param page_size: int (optional) + The maximum number of parent groups to return. The service may return fewer than this value. If not + provided, defaults to 1000 (also the maximum allowed). + :param page_token: str (optional) + A page token, received from a previous ListTransitiveParentGroups call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListTransitiveParentGroupsResponse` + + + .. py:method:: list_users_proxy( [, filter: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> ListUsersResponse + + TODO: Write description later when this method is implemented + + :param filter: str (optional) + Optional. Allows filtering users by username or external id. + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + + + .. py:method:: list_workspace_access_details_local( [, page_size: Optional[int], page_token: Optional[str]]) -> ListWorkspaceAccessDetailsResponse + + TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + + .. py:method:: resolve_group_proxy(external_id: str) -> ResolveGroupResponse Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it @@ -57,4 +263,46 @@ Required. The external ID of the user in the customer's IdP. :returns: :class:`ResolveUserResponse` + + + .. py:method:: update_group_proxy(internal_id: int, group: Group, update_mask: str) -> Group + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + + + .. py:method:: update_service_principal_proxy(internal_id: int, service_principal: ServicePrincipal, update_mask: str) -> ServicePrincipal + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + + + .. py:method:: update_user_proxy(internal_id: int, user: User, update_mask: str) -> User + + TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` \ No newline at end of file diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index c9c9b27ed..3430d2c0e 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -14,11 +14,14 @@ These APIs are available from WorkspaceClient compute/index dashboards/index database/index + dataclassification/index dataquality/index + environments/index files/index iam/index iamv2/index jobs/index + knowledgeassistants/index marketplace/index ml/index oauth2/index @@ -30,6 +33,7 @@ These APIs are available from WorkspaceClient settingsv2/index sharing/index sql/index + supervisoragents/index tags/index vectorsearch/index workspace/index \ No newline at end of file diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index ea5f34dc8..ed2d46299 100755 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -126,7 +126,7 @@ .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run - .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], performance_target: Optional[PerformanceTarget], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], usage_policy_id: Optional[str], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse + .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], parent_path: Optional[str], performance_target: Optional[PerformanceTarget], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], usage_policy_id: Optional[str], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse Usage: @@ -224,6 +224,9 @@ `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param parent_path: str (optional) + Path of the job parent folder in workspace file tree. If absent, the job doesn't have a workspace + object. :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. The performance target does not apply to tasks that run on Serverless @@ -359,23 +362,21 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - created_job = w.jobs.create( - name=f"sdk-{time.time_ns()}", + run = w.jobs.submit( + run_name=f"sdk-{time.time_ns()}", tasks=[ - jobs.Task( - description="test", + jobs.SubmitTask( existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key="test", - timeout_seconds=0, + task_key=f"sdk-{time.time_ns()}", ) ], - ) + ).result() - by_id = w.jobs.get(job_id=created_job.job_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete(job_id=created_job.job_id) + w.jobs.delete_run(run_id=run.run_id) Get a single job. diff --git a/docs/workspace/knowledgeassistants/index.rst b/docs/workspace/knowledgeassistants/index.rst new file mode 100644 index 000000000..be0ff84f8 --- /dev/null +++ b/docs/workspace/knowledgeassistants/index.rst @@ -0,0 +1,10 @@ + +Data Classify +============= + +Manage network connectivity configurations for Databricks workspaces. + +.. toctree:: + :maxdepth: 1 + + knowledge_assistants \ No newline at end of file diff --git a/docs/workspace/knowledgeassistants/knowledge_assistants.rst b/docs/workspace/knowledgeassistants/knowledge_assistants.rst new file mode 100644 index 000000000..292043797 --- /dev/null +++ b/docs/workspace/knowledgeassistants/knowledge_assistants.rst @@ -0,0 +1,143 @@ +``w.knowledge_assistants``: KnowledgeAssistants.v1 +================================================== +.. currentmodule:: databricks.sdk.service.knowledgeassistants + +.. py:class:: KnowledgeAssistantsAPI + + Manage Knowledge Assistants and related resources. + + .. py:method:: create_knowledge_assistant(knowledge_assistant: KnowledgeAssistant) -> KnowledgeAssistant + + Creates a Knowledge Assistant. + + :param knowledge_assistant: :class:`KnowledgeAssistant` + The Knowledge Assistant to create. + + :returns: :class:`KnowledgeAssistant` + + + .. py:method:: create_knowledge_source(parent: str, knowledge_source: KnowledgeSource) -> KnowledgeSource + + Creates a Knowledge Source under a Knowledge Assistant. + + :param parent: str + Parent resource where this source will be created. Format: + knowledge-assistants/{knowledge_assistant_id} + :param knowledge_source: :class:`KnowledgeSource` + + :returns: :class:`KnowledgeSource` + + + .. py:method:: delete_knowledge_assistant(name: str) + + Deletes a Knowledge Assistant. + + :param name: str + The resource name of the knowledge assistant to be deleted. Format: + knowledge-assistants/{knowledge_assistant_id} + + + + + .. py:method:: delete_knowledge_source(name: str) + + Deletes a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source to delete. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + + + + + .. py:method:: get_knowledge_assistant(name: str) -> KnowledgeAssistant + + Gets a Knowledge Assistant. + + :param name: str + The resource name of the knowledge assistant. Format: knowledge-assistants/{knowledge_assistant_id} + + :returns: :class:`KnowledgeAssistant` + + + .. py:method:: get_knowledge_source(name: str) -> KnowledgeSource + + Gets a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + + :returns: :class:`KnowledgeSource` + + + .. py:method:: list_knowledge_assistants( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[KnowledgeAssistant] + + List Knowledge Assistants + + :param page_size: int (optional) + The maximum number of knowledge assistants to return. If unspecified, at most 100 knowledge + assistants will be returned. The maximum value is 100; values above 100 will be coerced to 100. + :param page_token: str (optional) + A page token, received from a previous `ListKnowledgeAssistants` call. Provide this to retrieve the + subsequent page. If unspecified, the first page will be returned. + + :returns: Iterator over :class:`KnowledgeAssistant` + + + .. py:method:: list_knowledge_sources(parent: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[KnowledgeSource] + + Lists Knowledge Sources under a Knowledge Assistant. + + :param parent: str + Parent resource to list from. Format: knowledge-assistants/{knowledge_assistant_id} + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`KnowledgeSource` + + + .. py:method:: sync_knowledge_sources(name: str) + + Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require sync) + + :param name: str + The resource name of the Knowledge Assistant. Format: knowledge-assistants/{knowledge_assistant_id} + + + + + .. py:method:: update_knowledge_assistant(name: str, knowledge_assistant: KnowledgeAssistant, update_mask: FieldMask) -> KnowledgeAssistant + + Updates a Knowledge Assistant. + + :param name: str + The resource name of the Knowledge Assistant. Format: knowledge-assistants/{knowledge_assistant_id} + :param knowledge_assistant: :class:`KnowledgeAssistant` + The Knowledge Assistant update payload. Only fields listed in update_mask are updated. REQUIRED + annotations on Knowledge Assistant fields describe create-time requirements and do not mean all + those fields are required for update. + :param update_mask: FieldMask + Comma-delimited list of fields to update on the Knowledge Assistant. Allowed values: `display_name`, + `description`, `instructions`. Examples: - `display_name` - `description,instructions` + + :returns: :class:`KnowledgeAssistant` + + + .. py:method:: update_knowledge_source(name: str, knowledge_source: KnowledgeSource, update_mask: FieldMask) -> KnowledgeSource + + Updates a Knowledge Source. + + :param name: str + The resource name of the Knowledge Source to update. Format: + knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id} + :param knowledge_source: :class:`KnowledgeSource` + The Knowledge Source update payload. Only fields listed in update_mask are updated. REQUIRED + annotations on Knowledge Source fields describe create-time requirements and do not mean all those + fields are required for update. + :param update_mask: FieldMask + Comma-delimited list of fields to update on the Knowledge Source. Allowed values: `display_name`, + `description`. Examples: - `display_name` - `display_name,description` + + :returns: :class:`KnowledgeSource` + \ No newline at end of file diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index 0a514b33c..abc64624b 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -274,6 +274,16 @@ :returns: :class:`GetLoggedModelResponse` + .. py:method:: get_logged_models( [, model_ids: Optional[List[str]]]) -> GetLoggedModelsRequestResponse + + Batch endpoint for getting logged models from a list of model IDs + + :param model_ids: List[str] (optional) + The IDs of the logged models to retrieve. Max threshold is 100. + + :returns: :class:`GetLoggedModelsRequestResponse` + + .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse Gets the permission levels that a user can have on an object. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 46c3a4565..4b9ee524e 100755 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -91,8 +91,6 @@ w = WorkspaceClient() model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -736,13 +734,14 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - model = w.model_registry.get_model(name=created.registered_model.name) + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") - w.model_registry.update_model( - name=model.registered_model_databricks.name, + w.model_registry.update_model_version( description=f"sdk-{time.time_ns()}", + name=created.model_version.name, + version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index dda32ca30..aa529862d 100755 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -16,6 +16,16 @@ step. You can also enforce data quality with Spark Declarative Pipelines expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. + .. py:method:: apply_environment(pipeline_id: str) -> ApplyEnvironmentRequestResponse + + * Applies the current pipeline environment onto the pipeline compute. The environment applied can be + used by subsequent dev-mode updates. + + :param pipeline_id: str + + :returns: :class:`ApplyEnvironmentRequestResponse` + + .. py:method:: clone(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clone_mode: Optional[CloneMode], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger], usage_policy_id: Optional[str]]) -> ClonePipelineResponse Creates a new pipeline using Unity Catalog from a pipeline using Hive Metastore. This method returns @@ -210,12 +220,17 @@ :returns: :class:`CreatePipelineResponse` - .. py:method:: delete(pipeline_id: str [, force: Optional[bool]]) + .. py:method:: delete(pipeline_id: str [, cascade: Optional[bool], delete_datasets: Optional[bool], force: Optional[bool]]) Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade to all pipeline tables. Please reach out to Databricks support for assistance to undo this action. :param pipeline_id: str + :param cascade: bool (optional) + If false, pipeline deletion will not cascade to its datasets (MVs, STs, Views). By default, this + parameter will be true and all tables will be deleted with the pipeline. + :param delete_datasets: bool (optional) + Deprecated: Use `cascade` instead. :param force: bool (optional) If true, deletion will proceed even if resource cleanup fails. By default, deletion will fail if resources cleanup is required but fails. @@ -418,6 +433,17 @@ :returns: :class:`ListUpdatesResponse` + .. py:method:: restore_pipeline(pipeline_id: str) -> RestorePipelineRequestResponse + + * Restores a pipeline that was previously deleted, if within the restoration window. All tables + deleted at pipeline deletion will be undropped as well. + + :param pipeline_id: str + The ID of the pipeline to restore + + :returns: :class:`RestorePipelineRequestResponse` + + .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct diff --git a/docs/workspace/postgres/postgres.rst b/docs/workspace/postgres/postgres.rst index a7c1cf1b1..4b61bef94 100755 --- a/docs/workspace/postgres/postgres.rst +++ b/docs/workspace/postgres/postgres.rst @@ -34,6 +34,15 @@ :returns: :class:`Operation` + .. py:method:: create_catalog(catalog: Catalog) -> Catalog + + Register a Database in UC. + + :param catalog: :class:`Catalog` + + :returns: :class:`Catalog` + + .. py:method:: create_database(parent: str, database: Database [, database_id: Optional[str]]) -> CreateDatabaseOperation Create a Database. @@ -108,6 +117,24 @@ :returns: :class:`Operation` + .. py:method:: create_synced_table(synced_table: SyncedTable) -> SyncedTable + + Create a Synced Table. + + :param synced_table: :class:`SyncedTable` + + :returns: :class:`SyncedTable` + + + .. py:method:: create_table(table: Table) -> Table + + Create a Table (non-synced database table for Autoscaling v2 Lakebase projects). + + :param table: :class:`Table` + + :returns: :class:`Table` + + .. py:method:: delete_branch(name: str) -> DeleteBranchOperation Deletes the specified database branch. @@ -118,6 +145,15 @@ :returns: :class:`Operation` + .. py:method:: delete_catalog(name: str) + + Delete a Database Catalog. + + :param name: str + + + + .. py:method:: delete_database(name: str) -> DeleteDatabaseOperation Delete a Database. @@ -169,7 +205,45 @@ :returns: :class:`Operation` - .. py:method:: generate_database_credential(endpoint: str [, claims: Optional[List[RequestedClaims]]]) -> DatabaseCredential + .. py:method:: delete_synced_table(name: str) + + Delete a Synced Table. + + :param name: str + Full three-part (catalog, schema, table) name of the synced table. + + + + + .. py:method:: delete_table(name: str) + + Delete a Table (non-synced database table for Autoscaling v2 Lakebase projects). + + :param name: str + Full three-part (catalog, schema, table) name of the table. + + + + + .. py:method:: disable_forward_etl(parent: str [, pg_database_oid: Optional[int], pg_schema_oid: Optional[int], tenant_id: Optional[str], timeline_id: Optional[str]]) -> DisableForwardEtlResponse + + Disable Forward ETL for a branch. + + :param parent: str + The Branch to disable Forward ETL for. Format: projects/{project_id}/branches/{branch_id} + :param pg_database_oid: int (optional) + PostgreSQL database OID to disable. + :param pg_schema_oid: int (optional) + PostgreSQL schema OID to disable. + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`DisableForwardEtlResponse` + + + .. py:method:: generate_database_credential(endpoint: str [, claims: Optional[List[RequestedClaims]], expire_time: Optional[Timestamp], group_name: Optional[str], ttl: Optional[Duration]]) -> DatabaseCredential Generate OAuth credentials for a Postgres database. @@ -178,6 +252,14 @@ projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} :param claims: List[:class:`RequestedClaims`] (optional) The returned token will be scoped to UC tables with the specified permissions. + :param expire_time: Timestamp (optional) + Timestamp in UTC of when this credential should expire. Expire time should be within 1 hour of the + current time. + :param group_name: str (optional) + Databricks workspace group name. When provided, credentials are generated with permissions scoped to + this group. + :param ttl: Duration (optional) + The requested time-to-live for the generated credential token. Maximum allowed duration is 1 hour. :returns: :class:`DatabaseCredential` @@ -192,6 +274,27 @@ :returns: :class:`Branch` + .. py:method:: get_catalog(name: str) -> Catalog + + Get a Database Catalog. + + :param name: str + + :returns: :class:`Catalog` + + + .. py:method:: get_compute_instance(name: str) -> ComputeInstance + + Lists the specific compute instance under an endpoint. Note: ComputeInstances are managed via the + parent Endpoint resource, and cannot be created, updated, or deleted directly. + + :param name: str + The full resource path of the compute instance to retrieve. Format: + projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}/compute-instances/{compute_instance_id} + + :returns: :class:`ComputeInstance` + + .. py:method:: get_database(name: str) -> Database Get a Database. @@ -215,6 +318,34 @@ :returns: :class:`Endpoint` + .. py:method:: get_forward_etl_metadata(parent: str [, tenant_id: Optional[str], timeline_id: Optional[str]]) -> ForwardEtlMetadata + + Get Forward ETL metadata (database and schema OIDs). + + :param parent: str + The Branch to get metadata for. Format: projects/{project_id}/branches/{branch_id} + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`ForwardEtlMetadata` + + + .. py:method:: get_forward_etl_status(parent: str [, tenant_id: Optional[str], timeline_id: Optional[str]]) -> ForwardEtlStatus + + Get Forward ETL configuration and status for a branch. + + :param parent: str + The Branch to get Forward ETL status for. Format: projects/{project_id}/branches/{branch_id} + :param tenant_id: str (optional) + Tenant ID (dashless UUID format). + :param timeline_id: str (optional) + Timeline ID (dashless UUID format). + + :returns: :class:`ForwardEtlStatus` + + .. py:method:: get_operation(name: str) -> Operation Retrieves the status of a long-running operation. @@ -247,6 +378,26 @@ :returns: :class:`Role` + .. py:method:: get_synced_table(name: str) -> SyncedTable + + Get a Synced Table. + + :param name: str + Full three-part (catalog, schema, table) name of the synced table. + + :returns: :class:`SyncedTable` + + + .. py:method:: get_table(name: str) -> Table + + Get a Table (non-synced database table for Autoscaling v2 Lakebase projects). + + :param name: str + Full three-part (catalog, schema, table) name of the table. + + :returns: :class:`Table` + + .. py:method:: list_branches(parent: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Branch] Returns a paginated list of database branches in the project. @@ -261,6 +412,29 @@ :returns: Iterator over :class:`Branch` + .. py:method:: list_compute_instances(parent: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ComputeInstance] + + Lists all compute instances that have been created under the specified endpoint. Note: + ComputeInstances are managed via the parent Endpoint resource, and cannot be created, updated, or + deleted directly. + + :param parent: str + The parent, which owns the compute instances. + :param page_size: int (optional) + The maximum number of compute instances to return. The service may return fewer than this value. + + If unspecified, at most 50 compute instances will be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + :param page_token: str (optional) + A page token, received from a previous `ListInstances` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters provided to `ListInstances` must match the call that provided + the page token. + + :returns: Iterator over :class:`ComputeInstance` + + .. py:method:: list_databases(parent: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Database] List Databases. @@ -387,4 +561,23 @@ The list of fields to update. If unspecified, all fields will be updated when possible. :returns: :class:`Operation` + + + .. py:method:: update_role(name: str, role: Role, update_mask: FieldMask) -> UpdateRoleOperation + + Update a role for a branch. + + :param name: str + Output only. The full resource path of the role. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + :param role: :class:`Role` + The Postgres Role to update. + + The role's `name` field is used to identify the role to update. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + :param update_mask: FieldMask + The list of fields to update in Postgres Role. If unspecified, all fields will be updated when + possible. + + :returns: :class:`Operation` \ No newline at end of file diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index ee0e831b8..911852dd7 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -7,7 +7,7 @@ Enables administrators to get all tokens and delete tokens for other users. Admins can either get every token, get a specific token by ID, or get all tokens for a particular user. - .. py:method:: create_obo_token(application_id: str [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateOboTokenResponse + .. py:method:: create_obo_token(application_id: str [, comment: Optional[str], lifetime_seconds: Optional[int], scopes: Optional[List[str]]]) -> CreateOboTokenResponse Usage: @@ -42,6 +42,7 @@ Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. + :param scopes: List[str] (optional) :returns: :class:`CreateOboTokenResponse` diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst index 4904ba652..c820eb848 100755 --- a/docs/workspace/settings/tokens.rst +++ b/docs/workspace/settings/tokens.rst @@ -7,7 +7,7 @@ The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs. - .. py:method:: create( [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateTokenResponse + .. py:method:: create( [, comment: Optional[str], lifetime_seconds: Optional[int], scopes: Optional[List[str]]]) -> CreateTokenResponse Usage: @@ -35,6 +35,8 @@ The lifetime of the token, in seconds. If the lifetime is not specified, this token remains valid for 2 years. + :param scopes: List[str] (optional) + Optional scopes of the token. :returns: :class:`CreateTokenResponse` @@ -68,4 +70,30 @@ :returns: Iterator over :class:`PublicTokenInfo` + + + .. py:method:: update(token_id: str, token: PublicTokenInfo, update_mask: FieldMask) -> UpdateTokenResponse + + Updates the comment or scopes of a token. + + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. + + :param token_id: str + The SHA-256 hash of the token to be updated. + :param token: :class:`PublicTokenInfo` + :param update_mask: FieldMask + A list of field name under PublicTokenInfo, For example in request use {"update_mask": + "comment,scopes"} + + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`UpdateTokenResponse` \ No newline at end of file diff --git a/docs/workspace/sharing/recipient_federation_policies.rst b/docs/workspace/sharing/recipient_federation_policies.rst index 770f9b1ca..0cdcd8559 100644 --- a/docs/workspace/sharing/recipient_federation_policies.rst +++ b/docs/workspace/sharing/recipient_federation_policies.rst @@ -93,4 +93,24 @@ :param page_token: str (optional) :returns: Iterator over :class:`FederationPolicy` + + + .. py:method:: update(recipient_name: str, name: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy + + Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the + recipient. + + :param recipient_name: str + Name of the recipient. This is the name of the recipient for which the policy is being updated. + :param name: str + Name of the policy. This is the name of the current name of the policy. + :param policy: :class:`FederationPolicy` + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'comment,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 80c5d11b6..4c7a9d03e 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -9,7 +9,7 @@ register data assets under their original name, qualified by their original schema, or provide alternate exposed names. - .. py:method:: create(name: str [, comment: Optional[str], storage_root: Optional[str]]) -> ShareInfo + .. py:method:: create(name: str [, comment: Optional[str], replication_config: Optional[ReplicationConfig], serverless_budget_policy_id: Optional[str], storage_root: Optional[str]]) -> ShareInfo Usage: @@ -34,6 +34,11 @@ Name of the share. :param comment: str (optional) User-provided free-form text description. + :param replication_config: :class:`ReplicationConfig` (optional) + Configuration for share replication. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. @@ -134,7 +139,7 @@ :returns: :class:`GetSharePermissionsResponse` - .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], storage_root: Optional[str], updates: Optional[List[SharedDataObjectUpdate]]]) -> ShareInfo + .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], serverless_budget_policy_id: Optional[str], storage_root: Optional[str], updates: Optional[List[SharedDataObjectUpdate]]]) -> ShareInfo Usage: @@ -210,6 +215,9 @@ New name for the share. :param owner: str (optional) Username of current owner of share. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) diff --git a/docs/workspace/supervisoragents/index.rst b/docs/workspace/supervisoragents/index.rst new file mode 100644 index 000000000..03fb310f3 --- /dev/null +++ b/docs/workspace/supervisoragents/index.rst @@ -0,0 +1,10 @@ + +Data Classify +============= + +Manage network connectivity configurations for Databricks workspaces. + +.. toctree:: + :maxdepth: 1 + + supervisor_agents \ No newline at end of file diff --git a/docs/workspace/supervisoragents/supervisor_agents.rst b/docs/workspace/supervisoragents/supervisor_agents.rst new file mode 100644 index 000000000..9c3824f6e --- /dev/null +++ b/docs/workspace/supervisoragents/supervisor_agents.rst @@ -0,0 +1,78 @@ +``w.supervisor_agents``: SupervisorAgents.v1 +============================================ +.. currentmodule:: databricks.sdk.service.supervisoragents + +.. py:class:: SupervisorAgentsAPI + + Manage Supervisor Agents and related resources. + + .. py:method:: create_supervisor_agent(supervisor_agent: SupervisorAgent) -> SupervisorAgent + + + .. py:method:: create_tool(parent: str, tool: Tool) -> Tool + + Creates a Tool under a Supervisor Agent. + + :param parent: str + Parent resource where this tool will be created. Format: supervisor-agents/{supervisor_agent_id} + :param tool: :class:`Tool` + + :returns: :class:`Tool` + + + .. py:method:: delete_supervisor_agent(name: str) + + + .. py:method:: delete_tool(name: str) + + Deletes a Tool. + + :param name: str + The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + + + + + .. py:method:: get_supervisor_agent(name: str) -> SupervisorAgent + + + .. py:method:: get_tool(name: str) -> Tool + + Gets a Tool. + + :param name: str + The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + + :returns: :class:`Tool` + + + .. py:method:: list_supervisor_agents( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SupervisorAgent] + + + .. py:method:: list_tools(parent: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Tool] + + Lists Tools under a Supervisor Agent. + + :param parent: str + Parent resource to list from. Format: supervisor-agents/{supervisor_agent_id} + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Tool` + + + .. py:method:: update_supervisor_agent(name: str, supervisor_agent: SupervisorAgent, update_mask: FieldMask) -> SupervisorAgent + + + .. py:method:: update_tool(name: str, tool: Tool, update_mask: FieldMask) -> Tool + + Updates a Tool. + + :param name: str + :param tool: :class:`Tool` + The Tool to update. + :param update_mask: FieldMask + Field mask for fields to be updated. + + :returns: :class:`Tool` + \ No newline at end of file diff --git a/docs/workspace/tags/workspace_entity_tag_assignments.rst b/docs/workspace/tags/workspace_entity_tag_assignments.rst index 3c8d254cd..93e50b704 100755 --- a/docs/workspace/tags/workspace_entity_tag_assignments.rst +++ b/docs/workspace/tags/workspace_entity_tag_assignments.rst @@ -20,7 +20,8 @@ Delete a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str @@ -34,7 +35,8 @@ Get a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str @@ -48,7 +50,8 @@ List the tag assignments for an entity :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param page_size: int (optional) @@ -64,7 +67,8 @@ Update a tag assignment :param entity_type: str - The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces + The type of entity to which the tag is assigned. Allowed values are apps, dashboards, geniespaces, + notebooks :param entity_id: str The identifier of the entity to which the tag is assigned. For apps, the entity_id is the app name :param tag_key: str diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst index 6857dd0fa..3fb864803 100755 --- a/docs/workspace/vectorsearch/vector_search_endpoints.rst +++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst @@ -6,7 +6,7 @@ **Endpoint**: Represents the compute resources to host vector search indexes. - .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str], min_qps: Optional[int]]) -> Wait[EndpointInfo] + .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str], min_qps: Optional[int], num_replicas: Optional[int], usage_policy_id: Optional[str]]) -> Wait[EndpointInfo] Create a new endpoint. @@ -19,13 +19,17 @@ :param min_qps: int (optional) Min QPS for the endpoint. Mutually exclusive with num_replicas. The actual replica count is calculated at index creation/sync time based on this value. + :param num_replicas: int (optional) + Initial number of replicas for the endpoint. If not specified, defaults to 1. + :param usage_policy_id: str (optional) + The usage policy id to be applied once we've migrated to usage policies :returns: Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. - .. py:method:: create_endpoint_and_wait(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str], min_qps: Optional[int], timeout: datetime.timedelta = 0:20:00]) -> EndpointInfo + .. py:method:: create_endpoint_and_wait(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str], min_qps: Optional[int], num_replicas: Optional[int], usage_policy_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> EndpointInfo .. py:method:: delete_endpoint(endpoint_name: str) @@ -70,6 +74,30 @@ :returns: :class:`EndpointInfo` + .. py:method:: patch_endpoint_throughput(endpoint_name: str [, all_or_nothing: Optional[bool], concurrency: Optional[float], maximum_concurrency_allowed: Optional[float], minimal_concurrency_allowed: Optional[float], num_replicas: Optional[int]]) -> PatchEndpointThroughputResponse + + Update the throughput (concurrency) of an endpoint + + :param endpoint_name: str + Name of the vector search endpoint + :param all_or_nothing: bool (optional) + If true, the request will fail if the requested concurrency or limits cannot be exactly met. If + false, the request will be adjusted to the closest possible value. + :param concurrency: float (optional) + Requested concurrency (total CPU) for the endpoint. If not specified, the current concurrency is + maintained. + :param maximum_concurrency_allowed: float (optional) + Maximum concurrency allowed for the endpoint. If not specified, the current maximum is maintained. + :param minimal_concurrency_allowed: float (optional) + Minimum concurrency allowed for the endpoint. If not specified, the current minimum is maintained. + :param num_replicas: int (optional) + Requested number of data copies for the endpoint (including primary). For example: num_replicas=2 + means 2 total copies of the data (1 primary + 1 replica). If not specified, the current replication + factor is maintained. Valid range: 1-6 (where 1 = no replication, 6 = 1 primary + 5 replicas). + + :returns: :class:`PatchEndpointThroughputResponse` + + .. py:method:: retrieve_user_visible_metrics(name: str [, end_time: Optional[str], granularity_in_seconds: Optional[int], metrics: Optional[List[Metric]], page_token: Optional[str], start_time: Optional[str]]) -> RetrieveUserVisibleMetricsResponse Retrieve user-visible metrics for an endpoint diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst index a634d9dff..d33c8402c 100644 --- a/docs/workspace/vectorsearch/vector_search_indexes.rst +++ b/docs/workspace/vectorsearch/vector_search_indexes.rst @@ -158,6 +158,18 @@ + .. py:method:: update_index_budget_policy(index_name: str [, usage_policy_id: Optional[str]]) -> UpdateVectorIndexUsagePolicyResponse + + Update the budget policy of an index + + :param index_name: str + Name of the vector search index + :param usage_policy_id: str (optional) + The usage policy id to be applied + + :returns: :class:`UpdateVectorIndexUsagePolicyResponse` + + .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse Handles the upserting of data into a specified vector index. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 133c59e7e..be7f2f188 100755 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -63,7 +63,7 @@ :return: file-like `io.BinaryIO` of the `path` contents. - .. py:method:: export(path: str [, format: Optional[ExportFormat]]) -> ExportResponse + .. py:method:: export(path: str [, format: Optional[ExportFormat], outputs: Optional[ExportOutputs]]) -> ExportResponse Usage: @@ -79,7 +79,7 @@ notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook) + export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) Exports an object or the contents of an entire directory. @@ -102,6 +102,11 @@ Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. + :param outputs: :class:`ExportOutputs` (optional) + This specifies which cell outputs should be included in the export (if the export format allows it). + If not specified, the behavior is determined by the format. For JUPYTER format, the default is to + include all outputs. This is a public endpoint, but only ALL or NONE is documented publically, + DATABRICKS is internal only :returns: :class:`ExportResponse` @@ -185,9 +190,7 @@ language=workspace.Language.PYTHON, content=base64.b64encode( ( - """import time - time.sleep(10) - dbutils.notebook.exit('hello') + """print(1) """ ).encode() ).decode(), @@ -234,14 +237,16 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient w = WorkspaceClient() - names = [] - for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): - names.append(i.path) - assert len(names) > 0 + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + objects = w.workspace.list(path=os.path.dirname(notebook)) List workspace objects