diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 62d390339..1a48101bf 100755
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-9e9cd2a1a802f6df10f3a5ffe6aa97b588d5884a
\ No newline at end of file
+b2acebf0af39a39d83fdac76ae48d761001e7052
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 4710c40d0..0222a7691 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -432,6 +432,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTem
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccessRequestDestinationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true
@@ -647,6 +649,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPa
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java linguist-generated=true
@@ -687,6 +692,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationR
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeOperation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java linguist-generated=true
@@ -816,6 +822,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExec
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ConfidentialComputeType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java linguist-generated=true
@@ -1202,6 +1209,30 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Snapsho
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateFailoverGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateStableUrlRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteFailoverGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteStableUrlRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequestFailoverType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroup.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroupState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetFailoverGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetStableUrlRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMapping.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMappingEntry.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/StableUrl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcCatalog.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcReplicationConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UpdateFailoverGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/WorkspaceSet.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/BaseEnvironmentType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentRequest.java linguist-generated=true
@@ -1628,27 +1659,44 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateExampleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteExampleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/Example.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetExampleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermission.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionLevel.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissions.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsDescription.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateExampleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java linguist-generated=true
@@ -2180,6 +2228,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullR
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CloneMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConfluenceConnectorOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java linguist-generated=true
@@ -2209,6 +2258,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipeli
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponseHealth.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptionsGoogleDriveEntityType.java linguist-generated=true
@@ -2219,6 +2269,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Ingestion
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinitionWorkdayReportParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/JiraConnectorOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java linguist-generated=true
@@ -2231,6 +2282,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookL
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookAttachmentMode.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookBodyFormat.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java linguist-generated=true
@@ -2268,6 +2322,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencin
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptionsSharepointEntityType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SmartsheetOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceCatalogConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java linguist-generated=true
@@ -2395,6 +2450,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTabl
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpecSyncedTableSchedulingPolicy.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectOperation.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateDatabaseOperation.java linguist-generated=true
@@ -3356,7 +3413,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java lin
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java linguist-generated=true
@@ -3374,6 +3430,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Su
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcConnection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java linguist-generated=true
diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml
index e22f794e0..f8d421a08 100755
--- a/.github/workflows/tagging.yml
+++ b/.github/workflows/tagging.yml
@@ -4,7 +4,12 @@ name: tagging
on:
# Manual dispatch.
workflow_dispatch:
- # No inputs are required for the manual dispatch.
+ inputs:
+ packages:
+ description: 'Comma-separated list of packages to tag (e.g. "pkg1,pkg2"). Leave empty to tag all packages with pending releases.'
+ required: false
+ type: string
+ default: ''
# NOTE: Temporarily disable automated releases.
#
@@ -61,7 +66,13 @@ jobs:
env:
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
GITHUB_REPOSITORY: ${{ github.repository }}
- run: uv run --locked tagging.py
+ PACKAGES: ${{ inputs.packages }}
+ run: |
+ if [ -n "$PACKAGES" ]; then
+ uv run --locked tagging.py --package "$PACKAGES"
+ else
+ uv run --locked tagging.py
+ fi
- name: Upload created tags artifact
if: always()
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index 5dd8581f1..3af76fe7b 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -15,3 +15,24 @@
### Internal Changes
### API Changes
+* Add `com.databricks.sdk.service.disasterrecovery` package.
+* Add `workspaceClient.temporaryVolumeCredentials()` service.
+* Add `accountClient.disasterRecovery()` service.
+* Add `createExample()`, `deleteExample()`, `getExample()`, `getPermissionLevels()`, `getPermissions()`, `listExamples()`, `setPermissions()`, `updateExample()` and `updatePermissions()` methods for `workspaceClient.knowledgeAssistants()` service.
+* Add `undeleteProject()` method for `workspaceClient.postgres()` service.
+* Add `thumbnailUrl` field for `com.databricks.sdk.service.apps.App`.
+* Add `confidentialComputeType` field for `com.databricks.sdk.service.compute.GcpAttributes`.
+* Add `deltaTableName` field for `com.databricks.sdk.service.ml.BackfillSource`.
+* Add `confluenceOptions`, `jiraOptions`, `outlookOptions` and `smartsheetOptions` fields for `com.databricks.sdk.service.pipelines.ConnectorOptions`.
+* Add `googleAdsConfig` field for `com.databricks.sdk.service.pipelines.SourceConfig`.
+* Add `replaceExisting` field for `com.databricks.sdk.service.postgres.CreateBranchRequest`.
+* Add `replaceExisting` field for `com.databricks.sdk.service.postgres.CreateEndpointRequest`.
+* Add `purge` field for `com.databricks.sdk.service.postgres.DeleteProjectRequest`.
+* Add `showDeleted` field for `com.databricks.sdk.service.postgres.ListProjectsRequest`.
+* Add `deleteTime` and `purgeTime` fields for `com.databricks.sdk.service.postgres.Project`.
+* Add `ucConnection` field for `com.databricks.sdk.service.supervisoragents.Tool`.
+* Add `CONFLUENCE` enum value for `com.databricks.sdk.service.catalog.ConnectionType`.
+* Add `CONFLUENCE` enum value for `com.databricks.sdk.service.pipelines.IngestionSourceType`.
+* [Breaking] Change `description` field for `com.databricks.sdk.service.supervisoragents.SupervisorAgent` to no longer be required.
+* Change `description` field for `com.databricks.sdk.service.supervisoragents.SupervisorAgent` to no longer be required.
+* [Breaking] Remove `connection` field for `com.databricks.sdk.service.supervisoragents.Tool`.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index 5c0e192a1..648348bd6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -23,6 +23,8 @@
import com.databricks.sdk.service.catalog.AccountMetastoresService;
import com.databricks.sdk.service.catalog.AccountStorageCredentialsAPI;
import com.databricks.sdk.service.catalog.AccountStorageCredentialsService;
+import com.databricks.sdk.service.disasterrecovery.DisasterRecoveryAPI;
+import com.databricks.sdk.service.disasterrecovery.DisasterRecoveryService;
import com.databricks.sdk.service.iam.AccountAccessControlAPI;
import com.databricks.sdk.service.iam.AccountAccessControlService;
import com.databricks.sdk.service.iam.AccountGroupsAPI;
@@ -96,6 +98,7 @@ public class AccountClient {
private BudgetsAPI budgetsAPI;
private CredentialsAPI credentialsAPI;
private CustomAppIntegrationAPI customAppIntegrationAPI;
+ private DisasterRecoveryAPI disasterRecoveryAPI;
private EncryptionKeysAPI encryptionKeysAPI;
private EndpointsAPI endpointsAPI;
private AccountFederationPolicyAPI federationPolicyAPI;
@@ -142,6 +145,7 @@ public AccountClient(DatabricksConfig config) {
budgetsAPI = new BudgetsAPI(apiClient);
credentialsAPI = new CredentialsAPI(apiClient);
customAppIntegrationAPI = new CustomAppIntegrationAPI(apiClient);
+ disasterRecoveryAPI = new DisasterRecoveryAPI(apiClient);
encryptionKeysAPI = new EncryptionKeysAPI(apiClient);
endpointsAPI = new EndpointsAPI(apiClient);
federationPolicyAPI = new AccountFederationPolicyAPI(apiClient);
@@ -230,6 +234,11 @@ public CustomAppIntegrationAPI customAppIntegration() {
return customAppIntegrationAPI;
}
+ /** Manage disaster recovery configurations and execute failover operations. */
+ public DisasterRecoveryAPI disasterRecovery() {
+ return disasterRecoveryAPI;
+ }
+
/**
* These APIs manage encryption key configurations for this workspace (optional). A key
* configuration encapsulates the AWS KMS key information and some information about how the key
@@ -759,6 +768,17 @@ public AccountClient withCustomAppIntegrationAPI(CustomAppIntegrationAPI customA
return this;
}
+ /** Replace the default DisasterRecoveryService with a custom implementation. */
+ public AccountClient withDisasterRecoveryImpl(DisasterRecoveryService disasterRecovery) {
+ return this.withDisasterRecoveryAPI(new DisasterRecoveryAPI(disasterRecovery));
+ }
+
+ /** Replace the default DisasterRecoveryAPI with a custom implementation. */
+ public AccountClient withDisasterRecoveryAPI(DisasterRecoveryAPI disasterRecovery) {
+ this.disasterRecoveryAPI = disasterRecovery;
+ return this;
+ }
+
/** Replace the default EncryptionKeysService with a custom implementation. */
public AccountClient withEncryptionKeysImpl(EncryptionKeysService encryptionKeys) {
return this.withEncryptionKeysAPI(new EncryptionKeysAPI(encryptionKeys));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 8e3b34e3a..8ae58fa89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -66,6 +66,8 @@
import com.databricks.sdk.service.catalog.TemporaryPathCredentialsService;
import com.databricks.sdk.service.catalog.TemporaryTableCredentialsAPI;
import com.databricks.sdk.service.catalog.TemporaryTableCredentialsService;
+import com.databricks.sdk.service.catalog.TemporaryVolumeCredentialsAPI;
+import com.databricks.sdk.service.catalog.TemporaryVolumeCredentialsService;
import com.databricks.sdk.service.catalog.VolumesAPI;
import com.databricks.sdk.service.catalog.VolumesService;
import com.databricks.sdk.service.catalog.WorkspaceBindingsAPI;
@@ -387,6 +389,7 @@ public class WorkspaceClient {
private TagPoliciesAPI tagPoliciesAPI;
private TemporaryPathCredentialsAPI temporaryPathCredentialsAPI;
private TemporaryTableCredentialsAPI temporaryTableCredentialsAPI;
+ private TemporaryVolumeCredentialsAPI temporaryVolumeCredentialsAPI;
private TokenManagementAPI tokenManagementAPI;
private TokensAPI tokensAPI;
private UsersV2API usersV2API;
@@ -526,6 +529,7 @@ public WorkspaceClient(DatabricksConfig config) {
tagPoliciesAPI = new TagPoliciesAPI(apiClient);
temporaryPathCredentialsAPI = new TemporaryPathCredentialsAPI(apiClient);
temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient);
+ temporaryVolumeCredentialsAPI = new TemporaryVolumeCredentialsAPI(apiClient);
tokenManagementAPI = new TokenManagementAPI(apiClient);
tokensAPI = new TokensAPI(apiClient);
usersV2API = new UsersV2API(apiClient);
@@ -2019,6 +2023,26 @@ public TemporaryTableCredentialsAPI temporaryTableCredentials() {
return temporaryTableCredentialsAPI;
}
+ /**
+ * Temporary Volume Credentials refer to short-lived, downscoped credentials used to access cloud
+ * storage locations where volume data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary volume credentials ensure that data access is limited in scope and duration,
+ * reducing the risk of unauthorized access or misuse. To use the temporary volume credentials
+ * API, a metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can
+ * only be granted by catalog owner explicitly and is not included in schema ownership or ALL
+ * PRIVILEGES on the schema for security reasons.
+ */
+ public TemporaryVolumeCredentialsAPI temporaryVolumeCredentials() {
+ return temporaryVolumeCredentialsAPI;
+ }
+
/**
* Enables administrators to get all tokens and delete tokens for other users. Admins can either
* get every token, get a specific token by ID, or get all tokens for a particular user.
@@ -3503,6 +3527,20 @@ public WorkspaceClient withTemporaryTableCredentialsAPI(
return this;
}
+ /** Replace the default TemporaryVolumeCredentialsService with a custom implementation. */
+ public WorkspaceClient withTemporaryVolumeCredentialsImpl(
+ TemporaryVolumeCredentialsService temporaryVolumeCredentials) {
+ return this.withTemporaryVolumeCredentialsAPI(
+ new TemporaryVolumeCredentialsAPI(temporaryVolumeCredentials));
+ }
+
+ /** Replace the default TemporaryVolumeCredentialsAPI with a custom implementation. */
+ public WorkspaceClient withTemporaryVolumeCredentialsAPI(
+ TemporaryVolumeCredentialsAPI temporaryVolumeCredentials) {
+ this.temporaryVolumeCredentialsAPI = temporaryVolumeCredentials;
+ return this;
+ }
+
/** Replace the default TokenManagementService with a custom implementation. */
public WorkspaceClient withTokenManagementImpl(TokenManagementService tokenManagement) {
return this.withTokenManagementAPI(new TokenManagementAPI(tokenManagement));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/agentbricks/AgentBricksAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/agentbricks/AgentBricksAPI.java
index e281d8ddb..e34d55945 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/agentbricks/AgentBricksAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/agentbricks/AgentBricksAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.agentbricks;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
index 4d3757f2f..4a8a546fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
@@ -121,6 +121,10 @@ public class App {
@JsonProperty("telemetry_export_destinations")
private Collection telemetryExportDestinations;
+ /** The URL of the thumbnail image for the app. */
+ @JsonProperty("thumbnail_url")
+ private String thumbnailUrl;
+
/** The update time of the app. Formatted timestamp in ISO 6801. */
@JsonProperty("update_time")
private String updateTime;
@@ -358,6 +362,15 @@ public Collection getTelemetryExportDestinations() {
return telemetryExportDestinations;
}
+ public App setThumbnailUrl(String thumbnailUrl) {
+ this.thumbnailUrl = thumbnailUrl;
+ return this;
+ }
+
+ public String getThumbnailUrl() {
+ return thumbnailUrl;
+ }
+
public App setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
@@ -432,6 +445,7 @@ public boolean equals(Object o) {
&& Objects.equals(servicePrincipalName, that.servicePrincipalName)
&& Objects.equals(space, that.space)
&& Objects.equals(telemetryExportDestinations, that.telemetryExportDestinations)
+ && Objects.equals(thumbnailUrl, that.thumbnailUrl)
&& Objects.equals(updateTime, that.updateTime)
&& Objects.equals(updater, that.updater)
&& Objects.equals(url, that.url)
@@ -466,6 +480,7 @@ public int hashCode() {
servicePrincipalName,
space,
telemetryExportDestinations,
+ thumbnailUrl,
updateTime,
updater,
url,
@@ -500,6 +515,7 @@ public String toString() {
.add("servicePrincipalName", servicePrincipalName)
.add("space", space)
.add("telemetryExportDestinations", telemetryExportDestinations)
+ .add("thumbnailUrl", thumbnailUrl)
.add("updateTime", updateTime)
.add("updater", updater)
.add("url", url)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
index b07bc80ea..d4412af75 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Apps run directly on a customer's Databricks instance, integrate with their data, use and extend
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java
index 9e24ef8fa..b9f117990 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.apps;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Apps Settings manage the settings for the Apps service on a customer's Databricks instance. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateSpaceOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateSpaceOperation.java
index 5ea9cf251..fced06887 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateSpaceOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateSpaceOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.apps;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createSpace operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceOperation.java
index 9666a9818..7095036f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.apps;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteSpace operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java
index cf00c10e9..9b05c60ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.apps;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateSpace operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
index 719b1c371..079ca2947 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.billing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This API allows you to download billable usage logs for the specified account and date range.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
index 87e82cfe7..3ef7ee9e6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.billing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** A service serves REST API about Budget policies */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
index 72e444edb..f00b6a28d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.billing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage budget configurations for this account. Budgets enable you to monitor usage
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
index aa9a56bb8..fc70d6b6a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.billing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
index 72dca2cf8..ca830d2a1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.billing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
index 6e1cd7c9e..fdd574273 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** These APIs manage metastore assignments to a workspace. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
index 52fd325f0..c0cccb1bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
index 254dc846d..8c9f78d97 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** These APIs manage storage credentials for a particular metastore. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
index 45c1d3a15..91058a327 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist`
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
index c97815f48..2b0b14808 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
index 8136de14c..26403c513 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
@@ -4,10 +4,11 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 77 */
+/** Next Id: 124 */
@Generated
public enum ConnectionType {
BIGQUERY,
+ CONFLUENCE,
DATABRICKS,
GA4_RAW_DATA,
GLUE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
index 33636ebfa..ebe4368a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Connections allow for creating a connection to an external data source.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
index 9235e354e..a76a01d11 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A credential represents an authentication and authorization mechanism for accessing services on
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java
index 78471482a..af6be2968 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Tags are attributes that include keys and optional values that you can use to organize and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java
index f22736ada..e8578ce85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* External Lineage APIs enable defining and managing lineage relationships between Databricks
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index f44fc3f02..625c0454d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* An external location is an object that combines a cloud storage path with a storage credential
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java
index 611c6bfe5..10cae669e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* External Metadata objects enable customers to register and manage metadata about external systems
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
index 8a1a22cd7..da719042a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Functions implement User-Defined Functions (UDFs) in Unity Catalog.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialRequest.java
new file mode 100755
index 000000000..6e8822378
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialRequest.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Generate volume credentials RPC */
+@Generated
+public class GenerateTemporaryVolumeCredentialRequest {
+ /**
+ * The operation performed against the volume data, either READ_VOLUME or WRITE_VOLUME. If
+ * WRITE_VOLUME is specified, the credentials returned will have write permissions, otherwise, it
+ * will be read only.
+ */
+ @JsonProperty("operation")
+ private VolumeOperation operation;
+
+ /** Id of the volume to read or write. */
+ @JsonProperty("volume_id")
+ private String volumeId;
+
+ public GenerateTemporaryVolumeCredentialRequest setOperation(VolumeOperation operation) {
+ this.operation = operation;
+ return this;
+ }
+
+ public VolumeOperation getOperation() {
+ return operation;
+ }
+
+ public GenerateTemporaryVolumeCredentialRequest setVolumeId(String volumeId) {
+ this.volumeId = volumeId;
+ return this;
+ }
+
+ public String getVolumeId() {
+ return volumeId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryVolumeCredentialRequest that = (GenerateTemporaryVolumeCredentialRequest) o;
+ return Objects.equals(operation, that.operation) && Objects.equals(volumeId, that.volumeId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(operation, volumeId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryVolumeCredentialRequest.class)
+ .add("operation", operation)
+ .add("volumeId", volumeId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialResponse.java
new file mode 100755
index 000000000..4b35d5b16
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryVolumeCredentialResponse.java
@@ -0,0 +1,147 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenerateTemporaryVolumeCredentialResponse {
+ /** */
+ @JsonProperty("aws_temp_credentials")
+ private AwsCredentials awsTempCredentials;
+
+ /** */
+ @JsonProperty("azure_aad")
+ private AzureActiveDirectoryToken azureAad;
+
+ /** */
+ @JsonProperty("azure_user_delegation_sas")
+ private AzureUserDelegationSas azureUserDelegationSas;
+
+ /**
+ * Server time when the credential will expire, in epoch milliseconds. The API client is advised
+ * to cache the credential given this expiration time.
+ */
+ @JsonProperty("expiration_time")
+ private Long expirationTime;
+
+ /** */
+ @JsonProperty("gcp_oauth_token")
+ private GcpOauthToken gcpOauthToken;
+
+ /** */
+ @JsonProperty("r2_temp_credentials")
+ private R2Credentials r2TempCredentials;
+
+ /** The URL of the storage path accessible by the temporary credential. */
+ @JsonProperty("url")
+ private String url;
+
+ public GenerateTemporaryVolumeCredentialResponse setAwsTempCredentials(
+ AwsCredentials awsTempCredentials) {
+ this.awsTempCredentials = awsTempCredentials;
+ return this;
+ }
+
+ public AwsCredentials getAwsTempCredentials() {
+ return awsTempCredentials;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setAzureAad(AzureActiveDirectoryToken azureAad) {
+ this.azureAad = azureAad;
+ return this;
+ }
+
+ public AzureActiveDirectoryToken getAzureAad() {
+ return azureAad;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setAzureUserDelegationSas(
+ AzureUserDelegationSas azureUserDelegationSas) {
+ this.azureUserDelegationSas = azureUserDelegationSas;
+ return this;
+ }
+
+ public AzureUserDelegationSas getAzureUserDelegationSas() {
+ return azureUserDelegationSas;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setExpirationTime(Long expirationTime) {
+ this.expirationTime = expirationTime;
+ return this;
+ }
+
+ public Long getExpirationTime() {
+ return expirationTime;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setGcpOauthToken(GcpOauthToken gcpOauthToken) {
+ this.gcpOauthToken = gcpOauthToken;
+ return this;
+ }
+
+ public GcpOauthToken getGcpOauthToken() {
+ return gcpOauthToken;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setR2TempCredentials(
+ R2Credentials r2TempCredentials) {
+ this.r2TempCredentials = r2TempCredentials;
+ return this;
+ }
+
+ public R2Credentials getR2TempCredentials() {
+ return r2TempCredentials;
+ }
+
+ public GenerateTemporaryVolumeCredentialResponse setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryVolumeCredentialResponse that = (GenerateTemporaryVolumeCredentialResponse) o;
+ return Objects.equals(awsTempCredentials, that.awsTempCredentials)
+ && Objects.equals(azureAad, that.azureAad)
+ && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas)
+ && Objects.equals(expirationTime, that.expirationTime)
+ && Objects.equals(gcpOauthToken, that.gcpOauthToken)
+ && Objects.equals(r2TempCredentials, that.r2TempCredentials)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ awsTempCredentials,
+ azureAad,
+ azureUserDelegationSas,
+ expirationTime,
+ gcpOauthToken,
+ r2TempCredentials,
+ url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryVolumeCredentialResponse.class)
+ .add("awsTempCredentials", awsTempCredentials)
+ .add("azureAad", azureAad)
+ .add("azureUserDelegationSas", azureUserDelegationSas)
+ .add("expirationTime", expirationTime)
+ .add("gcpOauthToken", gcpOauthToken)
+ .add("r2TempCredentials", r2TempCredentials)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
index 861e4ce6b..ccffa0aaf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* In Unity Catalog, data is secure by default. Initially, users have no access to data in a
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
index a12fed217..18a7d68f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
index 0f5470030..9708c466c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
index 1562f1ba4..242bb518b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
@@ -3,14 +3,14 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Wait;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Online tables provide lower latency and higher QPS access to data from Delta tables. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java
index 160ec398a..8a9a01e54 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsAPI.java
index 53975148b..2106eeba9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Deprecated: Please use the Data Quality Monitors API instead (REST:
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
index 7dea334a3..ae9172cf1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
index 513f7bda2..191e7d4ce 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Unity Catalog enforces resource quotas on all securable objects, which limits the number of
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
index e4d4e14f6..7773bb76b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Request for Access enables users to request access for Unity Catalog securables.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index 73a1763b9..c258266b0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java
index 6a951567b..cbaa2c120 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A secret is a Unity Catalog securable object that stores sensitive credential data (such as
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index bebea1485..bc50209ce 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: ENDPOINT_LLM_PROVIDER = 317; Next id: 318 */
+/** Latest kind: CONNECTION_SLACK_ACCESS_AND_INTEGRATION_LOGS_OAUTH_U2M = 319; Next id: 320 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
index ed1d215f2..7fb47bc89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A storage credential represents an authentication and authorization mechanism for accessing data
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
index ff6a9ab0d..7fc499b01 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A system schema is a schema that lives within the system catalog. A system schema may contain
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
index 708e9b24a..4a8a02a5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Primary key and foreign key constraints encode relationships between fields in tables.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 5db6a605a..abe09ac9b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
index 933a88c22..e56a66132 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Temporary Path Credentials refer to short-lived, downscoped credentials used to access external
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
index 5a53f265a..337b5f8ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsAPI.java
new file mode 100755
index 000000000..d91363f13
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsAPI.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Temporary Volume Credentials refer to short-lived, downscoped credentials used to access cloud
+ * storage locations where volume data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ * Temporary volume credentials ensure that data access is limited in scope and duration,
+ * reducing the risk of unauthorized access or misuse. To use the temporary volume credentials API,
+ * a metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * on the schema for security reasons.
+ */
+@Generated
+public class TemporaryVolumeCredentialsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(TemporaryVolumeCredentialsAPI.class);
+
+ private final TemporaryVolumeCredentialsService impl;
+
+ /** Regular-use constructor */
+ public TemporaryVolumeCredentialsAPI(ApiClient apiClient) {
+ impl = new TemporaryVolumeCredentialsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public TemporaryVolumeCredentialsAPI(TemporaryVolumeCredentialsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Get a short-lived credential for directly accessing the volume data on cloud storage. The
+ * metastore must have **external_access_enabled** flag set to true (default false). The caller
+ * must have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can
+ * only be granted by catalog owners.
+ */
+ public GenerateTemporaryVolumeCredentialResponse generateTemporaryVolumeCredentials(
+ GenerateTemporaryVolumeCredentialRequest request) {
+ return impl.generateTemporaryVolumeCredentials(request);
+ }
+
+ public TemporaryVolumeCredentialsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsImpl.java
new file mode 100755
index 000000000..8a34df269
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsImpl.java
@@ -0,0 +1,37 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of TemporaryVolumeCredentials */
+@Generated
+class TemporaryVolumeCredentialsImpl implements TemporaryVolumeCredentialsService {
+ private final ApiClient apiClient;
+
+ public TemporaryVolumeCredentialsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public GenerateTemporaryVolumeCredentialResponse generateTemporaryVolumeCredentials(
+ GenerateTemporaryVolumeCredentialRequest request) {
+ String path = "/api/2.0/unity-catalog/temporary-volume-credentials";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenerateTemporaryVolumeCredentialResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsService.java
new file mode 100755
index 000000000..726abe937
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryVolumeCredentialsService.java
@@ -0,0 +1,36 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Temporary Volume Credentials refer to short-lived, downscoped credentials used to access cloud
+ * storage locations where volume data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary volume credentials ensure that data access is limited in scope and duration,
+ * reducing the risk of unauthorized access or misuse. To use the temporary volume credentials API,
+ * a metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * on the schema for security reasons.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface TemporaryVolumeCredentialsService {
+ /**
+ * Get a short-lived credential for directly accessing the volume data on cloud storage. The
+ * metastore must have **external_access_enabled** flag set to true (default false). The caller
+ * must have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can
+ * only be granted by catalog owners.
+ */
+ GenerateTemporaryVolumeCredentialResponse generateTemporaryVolumeCredentials(
+ GenerateTemporaryVolumeCredentialRequest generateTemporaryVolumeCredentialRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeOperation.java
new file mode 100755
index 000000000..df34fc3cc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeOperation.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum VolumeOperation {
+ READ_VOLUME,
+ WRITE_VOLUME,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
index 60e255259..8b4f922c4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
index 41882d9c3..a90ac838d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetRevisionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetRevisionsAPI.java
index 85979864f..971e60ca1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetRevisionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetRevisionsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.cleanrooms;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Clean Room Asset Revisions denote new versions of uploaded assets (e.g. notebooks) in the clean
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
index 2d8a064ba..14f374a29 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.cleanrooms;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRulesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRulesAPI.java
index 0e1b3d0d0..6d418437e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRulesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRulesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.cleanrooms;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Clean room auto-approval rules automatically create an approval on your behalf when an asset
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
index 38e341fcf..1a771b2e9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.cleanrooms;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Clean room task runs are the executions of notebooks in a clean room. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
index 9cdea0fdb..077ad9b01 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
index 3cda2663a..6bd533283 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* You can use cluster policies to control users' ability to configure clusters based on a set of
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
index 422a5844b..5c888b195 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
index a8edff03f..cb3ff1831 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
@@ -3,14 +3,14 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Wait;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ConfidentialComputeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ConfidentialComputeType.java
new file mode 100755
index 000000000..caf4cdb3c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ConfidentialComputeType.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Confidential computing technology for GCP instances. Aligns with gcloud's
+ * --confidential-compute-type flag and the REST API's
+ * confidentialInstanceConfig.confidentialInstanceType field. See:
+ * https://cloud.google.com/confidential-computing/confidential-vm/docs/create-a-confidential-vm-instance
+ */
+@Generated
+public enum ConfidentialComputeType {
+ CONFIDENTIAL_COMPUTE_TYPE_NONE,
+ SEV_SNP,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
index 125767030..33acb0da6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
@@ -22,6 +22,13 @@ public class GcpAttributes {
@JsonProperty("boot_disk_size")
private Long bootDiskSize;
+ /**
+ * The confidential computing technology for this cluster's instances. Currently only SEV_SNP is
+ * supported, and only on N2D instance types. When not set, no confidential computing is applied.
+ */
+ @JsonProperty("confidential_compute_type")
+ private ConfidentialComputeType confidentialComputeType;
+
/**
* The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This
* value should be greater than 0, to make sure the cluster driver node is placed on an on-demand
@@ -89,6 +96,15 @@ public Long getBootDiskSize() {
return bootDiskSize;
}
+ public GcpAttributes setConfidentialComputeType(ConfidentialComputeType confidentialComputeType) {
+ this.confidentialComputeType = confidentialComputeType;
+ return this;
+ }
+
+ public ConfidentialComputeType getConfidentialComputeType() {
+ return confidentialComputeType;
+ }
+
public GcpAttributes setFirstOnDemand(Long firstOnDemand) {
this.firstOnDemand = firstOnDemand;
return this;
@@ -141,6 +157,7 @@ public boolean equals(Object o) {
GcpAttributes that = (GcpAttributes) o;
return Objects.equals(availability, that.availability)
&& Objects.equals(bootDiskSize, that.bootDiskSize)
+ && Objects.equals(confidentialComputeType, that.confidentialComputeType)
&& Objects.equals(firstOnDemand, that.firstOnDemand)
&& Objects.equals(googleServiceAccount, that.googleServiceAccount)
&& Objects.equals(localSsdCount, that.localSsdCount)
@@ -153,6 +170,7 @@ public int hashCode() {
return Objects.hash(
availability,
bootDiskSize,
+ confidentialComputeType,
firstOnDemand,
googleServiceAccount,
localSsdCount,
@@ -165,6 +183,7 @@ public String toString() {
return new ToStringer(GcpAttributes.class)
.add("availability", availability)
.add("bootDiskSize", bootDiskSize)
+ .add("confidentialComputeType", confidentialComputeType)
.add("firstOnDemand", firstOnDemand)
.add("googleServiceAccount", googleServiceAccount)
.add("localSsdCount", localSsdCount)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
index cdd92aaac..a0b8c0356 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Global Init Scripts API enables Workspace administrators to configure global initialization
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
index 22b364c77..38cdd03bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
index fb14016a9..d4c6c28b3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Instance Profiles API allows admins to add, list, and remove instance profiles that users can
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
index bf8a99af3..b7e736ae8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Libraries API allows you to install and uninstall libraries and get the status of libraries
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
index 7f3fa2556..c70aea53f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The policy compliance APIs allow you to view and manage the policy compliance status of clusters
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
index a6c49c7fb..f0147ee76 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.compute;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* View available policy families. A policy family contains a policy definition providing best
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index 57b28467f..f672f229f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -3,14 +3,14 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Wait;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
index 130199e71..4217638b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.dashboards;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs provide specific management operations for Lakeview dashboards. Generic resource
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
index ed46478a1..cff7fc8df 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.dashboards;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Token-based Lakeview APIs for embedding dashboards in external applications. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index 8c96210c2..b8fbfaaae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Database Instances provide access to a database via REST API or direct SQL. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java
index 2f1999110..57fdea246 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.dataclassification;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Manage data classification for Unity Catalog catalogs. Data classification automatically
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
index 48dfedd07..620731811 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.dataquality;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateFailoverGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateFailoverGroupRequest.java
new file mode 100755
index 000000000..1b6e5b77f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateFailoverGroupRequest.java
@@ -0,0 +1,95 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateFailoverGroupRequest {
+ /** The failover group to create. */
+ @JsonProperty("failover_group")
+ private FailoverGroup failoverGroup;
+
+ /**
+ * Client-provided identifier for the failover group. Used to construct the resource name as
+ * {parent}/failover-groups/{failover_group_id}.
+ */
+ @JsonIgnore
+ @QueryParam("failover_group_id")
+ private String failoverGroupId;
+
+ /** The parent resource. Format: accounts/{account_id}. */
+ @JsonIgnore private String parent;
+
+ /** When true, validates the request without creating the failover group. */
+ @JsonIgnore
+ @QueryParam("validate_only")
+ private Boolean validateOnly;
+
+ public CreateFailoverGroupRequest setFailoverGroup(FailoverGroup failoverGroup) {
+ this.failoverGroup = failoverGroup;
+ return this;
+ }
+
+ public FailoverGroup getFailoverGroup() {
+ return failoverGroup;
+ }
+
+ public CreateFailoverGroupRequest setFailoverGroupId(String failoverGroupId) {
+ this.failoverGroupId = failoverGroupId;
+ return this;
+ }
+
+ public String getFailoverGroupId() {
+ return failoverGroupId;
+ }
+
+ public CreateFailoverGroupRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ public CreateFailoverGroupRequest setValidateOnly(Boolean validateOnly) {
+ this.validateOnly = validateOnly;
+ return this;
+ }
+
+ public Boolean getValidateOnly() {
+ return validateOnly;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateFailoverGroupRequest that = (CreateFailoverGroupRequest) o;
+ return Objects.equals(failoverGroup, that.failoverGroup)
+ && Objects.equals(failoverGroupId, that.failoverGroupId)
+ && Objects.equals(parent, that.parent)
+ && Objects.equals(validateOnly, that.validateOnly);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failoverGroup, failoverGroupId, parent, validateOnly);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateFailoverGroupRequest.class)
+ .add("failoverGroup", failoverGroup)
+ .add("failoverGroupId", failoverGroupId)
+ .add("parent", parent)
+ .add("validateOnly", validateOnly)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateStableUrlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateStableUrlRequest.java
new file mode 100755
index 000000000..cbca4e15e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/CreateStableUrlRequest.java
@@ -0,0 +1,95 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateStableUrlRequest {
+ /** The parent resource. Format: accounts/{account_id}. */
+ @JsonIgnore private String parent;
+
+ /** The stable URL to create. */
+ @JsonProperty("stable_url")
+ private StableUrl stableUrl;
+
+ /**
+ * Client-provided identifier for the stable URL. Used to construct the resource name as
+ * {parent}/stable-urls/{stable_url_id}.
+ */
+ @JsonIgnore
+ @QueryParam("stable_url_id")
+ private String stableUrlId;
+
+ /** When true, validates the request without creating the stable URL. */
+ @JsonIgnore
+ @QueryParam("validate_only")
+ private Boolean validateOnly;
+
+ public CreateStableUrlRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ public CreateStableUrlRequest setStableUrl(StableUrl stableUrl) {
+ this.stableUrl = stableUrl;
+ return this;
+ }
+
+ public StableUrl getStableUrl() {
+ return stableUrl;
+ }
+
+ public CreateStableUrlRequest setStableUrlId(String stableUrlId) {
+ this.stableUrlId = stableUrlId;
+ return this;
+ }
+
+ public String getStableUrlId() {
+ return stableUrlId;
+ }
+
+ public CreateStableUrlRequest setValidateOnly(Boolean validateOnly) {
+ this.validateOnly = validateOnly;
+ return this;
+ }
+
+ public Boolean getValidateOnly() {
+ return validateOnly;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateStableUrlRequest that = (CreateStableUrlRequest) o;
+ return Objects.equals(parent, that.parent)
+ && Objects.equals(stableUrl, that.stableUrl)
+ && Objects.equals(stableUrlId, that.stableUrlId)
+ && Objects.equals(validateOnly, that.validateOnly);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(parent, stableUrl, stableUrlId, validateOnly);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateStableUrlRequest.class)
+ .add("parent", parent)
+ .add("stableUrl", stableUrl)
+ .add("stableUrlId", stableUrlId)
+ .add("validateOnly", validateOnly)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteFailoverGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteFailoverGroupRequest.java
new file mode 100755
index 000000000..b8e8325aa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteFailoverGroupRequest.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteFailoverGroupRequest {
+ /**
+ * Opaque version string for optimistic locking. If provided, must match the current etag. If
+ * omitted, the delete proceeds without an etag check.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ /**
+ * The fully qualified resource name of the failover group to delete. Format:
+ * accounts/{account_id}/failover-groups/{failover_group_id}.
+ */
+ @JsonIgnore private String name;
+
+ public DeleteFailoverGroupRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public DeleteFailoverGroupRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteFailoverGroupRequest that = (DeleteFailoverGroupRequest) o;
+ return Objects.equals(etag, that.etag) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteFailoverGroupRequest.class)
+ .add("etag", etag)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteStableUrlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteStableUrlRequest.java
new file mode 100755
index 000000000..3263f17eb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DeleteStableUrlRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteStableUrlRequest {
+ /**
+ * The fully qualified resource name. Format: accounts/{account_id}/stable-urls/{stable_url_id}.
+ */
+ @JsonIgnore private String name;
+
+ public DeleteStableUrlRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteStableUrlRequest that = (DeleteStableUrlRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteStableUrlRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryAPI.java
new file mode 100755
index 000000000..0075e7b18
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryAPI.java
@@ -0,0 +1,124 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+
+/** Manage disaster recovery configurations and execute failover operations. */
+@Generated
+public class DisasterRecoveryAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DisasterRecoveryAPI.class);
+
+ private final DisasterRecoveryService impl;
+
+ /** Regular-use constructor */
+ public DisasterRecoveryAPI(ApiClient apiClient) {
+ impl = new DisasterRecoveryImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DisasterRecoveryAPI(DisasterRecoveryService mock) {
+ impl = mock;
+ }
+
+ /** Create a new failover group. */
+ public FailoverGroup createFailoverGroup(CreateFailoverGroupRequest request) {
+ return impl.createFailoverGroup(request);
+ }
+
+ /** Create a new stable URL. */
+ public StableUrl createStableUrl(CreateStableUrlRequest request) {
+ return impl.createStableUrl(request);
+ }
+
+ public void deleteFailoverGroup(String name) {
+ deleteFailoverGroup(new DeleteFailoverGroupRequest().setName(name));
+ }
+
+ /** Delete a failover group. */
+ public void deleteFailoverGroup(DeleteFailoverGroupRequest request) {
+ impl.deleteFailoverGroup(request);
+ }
+
+ public void deleteStableUrl(String name) {
+ deleteStableUrl(new DeleteStableUrlRequest().setName(name));
+ }
+
+ /** Delete a stable URL. */
+ public void deleteStableUrl(DeleteStableUrlRequest request) {
+ impl.deleteStableUrl(request);
+ }
+
+ /** Initiate a failover to a new primary region. */
+ public FailoverGroup failoverFailoverGroup(FailoverFailoverGroupRequest request) {
+ return impl.failoverFailoverGroup(request);
+ }
+
+ public FailoverGroup getFailoverGroup(String name) {
+ return getFailoverGroup(new GetFailoverGroupRequest().setName(name));
+ }
+
+ /** Get a failover group. */
+ public FailoverGroup getFailoverGroup(GetFailoverGroupRequest request) {
+ return impl.getFailoverGroup(request);
+ }
+
+ public StableUrl getStableUrl(String name) {
+ return getStableUrl(new GetStableUrlRequest().setName(name));
+ }
+
+ /** Get a stable URL. */
+ public StableUrl getStableUrl(GetStableUrlRequest request) {
+ return impl.getStableUrl(request);
+ }
+
+ public Iterable listFailoverGroups(String parent) {
+ return listFailoverGroups(new ListFailoverGroupsRequest().setParent(parent));
+ }
+
+ /** List failover groups. */
+ public Iterable listFailoverGroups(ListFailoverGroupsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listFailoverGroups,
+ ListFailoverGroupsResponse::getFailoverGroups,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public Iterable listStableUrls(String parent) {
+ return listStableUrls(new ListStableUrlsRequest().setParent(parent));
+ }
+
+ /** List stable URLs for an account. */
+ public Iterable listStableUrls(ListStableUrlsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listStableUrls,
+ ListStableUrlsResponse::getStableUrls,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** Update a failover group. */
+ public FailoverGroup updateFailoverGroup(UpdateFailoverGroupRequest request) {
+ return impl.updateFailoverGroup(request);
+ }
+
+ public DisasterRecoveryService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryImpl.java
new file mode 100755
index 000000000..3ee207424
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryImpl.java
@@ -0,0 +1,164 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DisasterRecovery */
+@Generated
+class DisasterRecoveryImpl implements DisasterRecoveryService {
+ private final ApiClient apiClient;
+
+ public DisasterRecoveryImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public FailoverGroup createFailoverGroup(CreateFailoverGroupRequest request) {
+ String path =
+ String.format("/api/disaster-recovery/v1/%s/failover-groups", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getFailoverGroup()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, FailoverGroup.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public StableUrl createStableUrl(CreateStableUrlRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s/stable-urls", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getStableUrl()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, StableUrl.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteFailoverGroup(DeleteFailoverGroupRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteStableUrl(DeleteStableUrlRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public FailoverGroup failoverFailoverGroup(FailoverFailoverGroupRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s/failover", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, FailoverGroup.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public FailoverGroup getFailoverGroup(GetFailoverGroupRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, FailoverGroup.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public StableUrl getStableUrl(GetStableUrlRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, StableUrl.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListFailoverGroupsResponse listFailoverGroups(ListFailoverGroupsRequest request) {
+ String path =
+ String.format("/api/disaster-recovery/v1/%s/failover-groups", request.getParent());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListFailoverGroupsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListStableUrlsResponse listStableUrls(ListStableUrlsRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s/stable-urls", request.getParent());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListStableUrlsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public FailoverGroup updateFailoverGroup(UpdateFailoverGroupRequest request) {
+ String path = String.format("/api/disaster-recovery/v1/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getFailoverGroup()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, FailoverGroup.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryService.java
new file mode 100755
index 000000000..2085eedee
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/DisasterRecoveryService.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage disaster recovery configurations and execute failover operations.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DisasterRecoveryService {
+ /** Create a new failover group. */
+ FailoverGroup createFailoverGroup(CreateFailoverGroupRequest createFailoverGroupRequest);
+
+ /** Create a new stable URL. */
+ StableUrl createStableUrl(CreateStableUrlRequest createStableUrlRequest);
+
+ /** Delete a failover group. */
+ void deleteFailoverGroup(DeleteFailoverGroupRequest deleteFailoverGroupRequest);
+
+ /** Delete a stable URL. */
+ void deleteStableUrl(DeleteStableUrlRequest deleteStableUrlRequest);
+
+ /** Initiate a failover to a new primary region. */
+ FailoverGroup failoverFailoverGroup(FailoverFailoverGroupRequest failoverFailoverGroupRequest);
+
+ /** Get a failover group. */
+ FailoverGroup getFailoverGroup(GetFailoverGroupRequest getFailoverGroupRequest);
+
+ /** Get a stable URL. */
+ StableUrl getStableUrl(GetStableUrlRequest getStableUrlRequest);
+
+ /** List failover groups. */
+ ListFailoverGroupsResponse listFailoverGroups(
+ ListFailoverGroupsRequest listFailoverGroupsRequest);
+
+ /** List stable URLs for an account. */
+ ListStableUrlsResponse listStableUrls(ListStableUrlsRequest listStableUrlsRequest);
+
+ /** Update a failover group. */
+ FailoverGroup updateFailoverGroup(UpdateFailoverGroupRequest updateFailoverGroupRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequest.java
new file mode 100755
index 000000000..e7b7f1c02
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequest.java
@@ -0,0 +1,100 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Request to failover a failover group to a new primary region. */
+@Generated
+public class FailoverFailoverGroupRequest {
+ /**
+ * Opaque version string for optimistic locking. If provided, must match the current etag. If
+ * omitted, the failover proceeds regardless of current state.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /** The type of failover to perform. */
+ @JsonProperty("failover_type")
+ private FailoverFailoverGroupRequestFailoverType failoverType;
+
+ /**
+ * The fully qualified resource name of the failover group to failover. Format:
+ * accounts/{account_id}/failover-groups/{failover_group_id}.
+ */
+ @JsonIgnore private String name;
+
+ /**
+ * The target primary region. Must be one of the derived regions and different from the current
+ * effective_primary_region. Serves as an idempotency check.
+ */
+ @JsonProperty("target_primary_region")
+ private String targetPrimaryRegion;
+
+ public FailoverFailoverGroupRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public FailoverFailoverGroupRequest setFailoverType(
+ FailoverFailoverGroupRequestFailoverType failoverType) {
+ this.failoverType = failoverType;
+ return this;
+ }
+
+ public FailoverFailoverGroupRequestFailoverType getFailoverType() {
+ return failoverType;
+ }
+
+ public FailoverFailoverGroupRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public FailoverFailoverGroupRequest setTargetPrimaryRegion(String targetPrimaryRegion) {
+ this.targetPrimaryRegion = targetPrimaryRegion;
+ return this;
+ }
+
+ public String getTargetPrimaryRegion() {
+ return targetPrimaryRegion;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FailoverFailoverGroupRequest that = (FailoverFailoverGroupRequest) o;
+ return Objects.equals(etag, that.etag)
+ && Objects.equals(failoverType, that.failoverType)
+ && Objects.equals(name, that.name)
+ && Objects.equals(targetPrimaryRegion, that.targetPrimaryRegion);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag, failoverType, name, targetPrimaryRegion);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FailoverFailoverGroupRequest.class)
+ .add("etag", etag)
+ .add("failoverType", failoverType)
+ .add("name", name)
+ .add("targetPrimaryRegion", targetPrimaryRegion)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequestFailoverType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequestFailoverType.java
new file mode 100755
index 000000000..a3863ce82
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverFailoverGroupRequestFailoverType.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+
+/** The type of failover to perform. */
+@Generated
+public enum FailoverFailoverGroupRequestFailoverType {
+ FORCED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroup.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroup.java
new file mode 100755
index 000000000..6004930e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroup.java
@@ -0,0 +1,223 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * A failover group manages disaster recovery across workspace sets, coordinating UCDR and CPDR
+ * replication.
+ */
+@Generated
+public class FailoverGroup {
+ /** Time at which this failover group was created. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /**
+ * Current effective primary region. Replication flows FROM workspaces in this region. Changes
+ * after a successful failover.
+ */
+ @JsonProperty("effective_primary_region")
+ private String effectivePrimaryRegion;
+
+ /**
+ * Opaque version string for optimistic locking. Server-generated, returned in responses. Must be
+ * provided on Update requests to prevent concurrent modifications.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Initial primary region. Used only in Create requests to set the starting primary region. Not
+ * returned in responses.
+ */
+ @JsonProperty("initial_primary_region")
+ private String initialPrimaryRegion;
+
+ /**
+ * Fully qualified resource name in the format
+ * accounts/{account_id}/failover-groups/{failover_group_id}.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /** List of all regions participating in this failover group. */
+ @JsonProperty("regions")
+ private Collection regions;
+
+ /** The latest point in time to which data has been replicated. */
+ @JsonProperty("replication_point")
+ private Timestamp replicationPoint;
+
+ /** Aggregate state of the failover group. */
+ @JsonProperty("state")
+ private FailoverGroupState state;
+
+ /** Unity Catalog replication configuration. */
+ @JsonProperty("unity_catalog_assets")
+ private UcReplicationConfig unityCatalogAssets;
+
+ /** Time at which this failover group was last modified. */
+ @JsonProperty("update_time")
+ private Timestamp updateTime;
+
+ /** Workspace sets, each containing workspaces that replicate to each other. */
+ @JsonProperty("workspace_sets")
+ private Collection workspaceSets;
+
+ public FailoverGroup setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public FailoverGroup setEffectivePrimaryRegion(String effectivePrimaryRegion) {
+ this.effectivePrimaryRegion = effectivePrimaryRegion;
+ return this;
+ }
+
+ public String getEffectivePrimaryRegion() {
+ return effectivePrimaryRegion;
+ }
+
+ public FailoverGroup setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public FailoverGroup setInitialPrimaryRegion(String initialPrimaryRegion) {
+ this.initialPrimaryRegion = initialPrimaryRegion;
+ return this;
+ }
+
+ public String getInitialPrimaryRegion() {
+ return initialPrimaryRegion;
+ }
+
+ public FailoverGroup setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public FailoverGroup setRegions(Collection regions) {
+ this.regions = regions;
+ return this;
+ }
+
+ public Collection getRegions() {
+ return regions;
+ }
+
+ public FailoverGroup setReplicationPoint(Timestamp replicationPoint) {
+ this.replicationPoint = replicationPoint;
+ return this;
+ }
+
+ public Timestamp getReplicationPoint() {
+ return replicationPoint;
+ }
+
+ public FailoverGroup setState(FailoverGroupState state) {
+ this.state = state;
+ return this;
+ }
+
+ public FailoverGroupState getState() {
+ return state;
+ }
+
+ public FailoverGroup setUnityCatalogAssets(UcReplicationConfig unityCatalogAssets) {
+ this.unityCatalogAssets = unityCatalogAssets;
+ return this;
+ }
+
+ public UcReplicationConfig getUnityCatalogAssets() {
+ return unityCatalogAssets;
+ }
+
+ public FailoverGroup setUpdateTime(Timestamp updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public Timestamp getUpdateTime() {
+ return updateTime;
+ }
+
+ public FailoverGroup setWorkspaceSets(Collection workspaceSets) {
+ this.workspaceSets = workspaceSets;
+ return this;
+ }
+
+ public Collection getWorkspaceSets() {
+ return workspaceSets;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FailoverGroup that = (FailoverGroup) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(effectivePrimaryRegion, that.effectivePrimaryRegion)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(initialPrimaryRegion, that.initialPrimaryRegion)
+ && Objects.equals(name, that.name)
+ && Objects.equals(regions, that.regions)
+ && Objects.equals(replicationPoint, that.replicationPoint)
+ && Objects.equals(state, that.state)
+ && Objects.equals(unityCatalogAssets, that.unityCatalogAssets)
+ && Objects.equals(updateTime, that.updateTime)
+ && Objects.equals(workspaceSets, that.workspaceSets);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ effectivePrimaryRegion,
+ etag,
+ initialPrimaryRegion,
+ name,
+ regions,
+ replicationPoint,
+ state,
+ unityCatalogAssets,
+ updateTime,
+ workspaceSets);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FailoverGroup.class)
+ .add("createTime", createTime)
+ .add("effectivePrimaryRegion", effectivePrimaryRegion)
+ .add("etag", etag)
+ .add("initialPrimaryRegion", initialPrimaryRegion)
+ .add("name", name)
+ .add("regions", regions)
+ .add("replicationPoint", replicationPoint)
+ .add("state", state)
+ .add("unityCatalogAssets", unityCatalogAssets)
+ .add("updateTime", updateTime)
+ .add("workspaceSets", workspaceSets)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroupState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroupState.java
new file mode 100755
index 000000000..27b434ee5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/FailoverGroupState.java
@@ -0,0 +1,18 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+
+/** The aggregate state of a FailoverGroup. */
+@Generated
+public enum FailoverGroupState {
+ ACTIVE,
+ CREATING,
+ CREATION_FAILED,
+ DELETING,
+ DELETION_FAILED,
+ FAILING_OVER,
+ FAILOVER_FAILED,
+ INITIAL_REPLICATION,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetFailoverGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetFailoverGroupRequest.java
new file mode 100755
index 000000000..2e0344193
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetFailoverGroupRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetFailoverGroupRequest {
+ /**
+ * The fully qualified resource name of the failover group. Format:
+ * accounts/{account_id}/failover-groups/{failover_group_id}.
+ */
+ @JsonIgnore private String name;
+
+ public GetFailoverGroupRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetFailoverGroupRequest that = (GetFailoverGroupRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetFailoverGroupRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetStableUrlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetStableUrlRequest.java
new file mode 100755
index 000000000..71aad7750
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/GetStableUrlRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetStableUrlRequest {
+ /**
+ * The fully qualified resource name. Format: accounts/{account_id}/stable-urls/{stable_url_id}.
+ */
+ @JsonIgnore private String name;
+
+ public GetStableUrlRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetStableUrlRequest that = (GetStableUrlRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetStableUrlRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsRequest.java
new file mode 100755
index 000000000..2b677222e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListFailoverGroupsRequest {
+ /** Maximum number of failover groups to return per page. Default: 50, maximum: 100. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Page token received from a previous ListFailoverGroups call. Provide this to retrieve the
+ * subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The parent resource. Format: accounts/{account_id}. */
+ @JsonIgnore private String parent;
+
+ public ListFailoverGroupsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListFailoverGroupsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListFailoverGroupsRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListFailoverGroupsRequest that = (ListFailoverGroupsRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListFailoverGroupsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsResponse.java
new file mode 100755
index 000000000..76ca542ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListFailoverGroupsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response for listing failover groups. */
+@Generated
+public class ListFailoverGroupsResponse {
+ /** The failover groups for this account. */
+ @JsonProperty("failover_groups")
+ private Collection failoverGroups;
+
+ /**
+ * A token that can be sent as page_token to retrieve the next page. If omitted, there are no
+ * subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListFailoverGroupsResponse setFailoverGroups(Collection failoverGroups) {
+ this.failoverGroups = failoverGroups;
+ return this;
+ }
+
+ public Collection getFailoverGroups() {
+ return failoverGroups;
+ }
+
+ public ListFailoverGroupsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListFailoverGroupsResponse that = (ListFailoverGroupsResponse) o;
+ return Objects.equals(failoverGroups, that.failoverGroups)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failoverGroups, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListFailoverGroupsResponse.class)
+ .add("failoverGroups", failoverGroups)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsRequest.java
new file mode 100755
index 000000000..8310e9ef2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListStableUrlsRequest {
+ /** Maximum number of stable URLs to return per page. Default: 50, maximum: 100. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Page token received from a previous ListStableUrls call. Provide this to retrieve the
+ * subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The parent resource. Format: accounts/{account_id}. */
+ @JsonIgnore private String parent;
+
+ public ListStableUrlsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListStableUrlsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListStableUrlsRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListStableUrlsRequest that = (ListStableUrlsRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListStableUrlsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsResponse.java
new file mode 100755
index 000000000..e9519a5d6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/ListStableUrlsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response for listing stable URLs. */
+@Generated
+public class ListStableUrlsResponse {
+ /**
+ * A token that can be sent as page_token to retrieve the next page. If omitted, there are no
+ * subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** The stable URLs for this account. */
+ @JsonProperty("stable_urls")
+ private Collection stableUrls;
+
+ public ListStableUrlsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListStableUrlsResponse setStableUrls(Collection stableUrls) {
+ this.stableUrls = stableUrls;
+ return this;
+ }
+
+ public Collection getStableUrls() {
+ return stableUrls;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListStableUrlsResponse that = (ListStableUrlsResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(stableUrls, that.stableUrls);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, stableUrls);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListStableUrlsResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("stableUrls", stableUrls)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMapping.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMapping.java
new file mode 100755
index 000000000..03e2ff72d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMapping.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * A location mapping identified by a name, with URIs per region. The system derives replication
+ * direction from effective_primary_region.
+ */
+@Generated
+public class LocationMapping {
+ /** Resource name for this location. */
+ @JsonProperty("name")
+ private String name;
+
+ /** URI for each region. Each entry maps a region name to a storage URI. */
+ @JsonProperty("uri_by_region")
+ private Collection uriByRegion;
+
+ public LocationMapping setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public LocationMapping setUriByRegion(Collection uriByRegion) {
+ this.uriByRegion = uriByRegion;
+ return this;
+ }
+
+ public Collection getUriByRegion() {
+ return uriByRegion;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LocationMapping that = (LocationMapping) o;
+ return Objects.equals(name, that.name) && Objects.equals(uriByRegion, that.uriByRegion);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, uriByRegion);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LocationMapping.class)
+ .add("name", name)
+ .add("uriByRegion", uriByRegion)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMappingEntry.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMappingEntry.java
new file mode 100755
index 000000000..c83e93499
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/LocationMappingEntry.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * A single entry in a location mapping, mapping a region to a storage URI. Used instead of
+ * map for proto2 compatibility.
+ */
+@Generated
+public class LocationMappingEntry {
+ /** The region name. */
+ @JsonProperty("region")
+ private String region;
+
+ /** The storage URI for this region. */
+ @JsonProperty("uri")
+ private String uri;
+
+ public LocationMappingEntry setRegion(String region) {
+ this.region = region;
+ return this;
+ }
+
+ public String getRegion() {
+ return region;
+ }
+
+ public LocationMappingEntry setUri(String uri) {
+ this.uri = uri;
+ return this;
+ }
+
+ public String getUri() {
+ return uri;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LocationMappingEntry that = (LocationMappingEntry) o;
+ return Objects.equals(region, that.region) && Objects.equals(uri, that.uri);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(region, uri);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LocationMappingEntry.class)
+ .add("region", region)
+ .add("uri", uri)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/StableUrl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/StableUrl.java
new file mode 100755
index 000000000..285b7db64
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/StableUrl.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * A stable URL provides a failover-aware endpoint for accessing a workspace. Its lifecycle is
+ * independent of any failover group.
+ */
+@Generated
+public class StableUrl {
+ /**
+ * The workspace this stable URL is initially bound to. Used only in Create requests to associate
+ * the stable URL with a workspace. Not returned in responses. Mirrors
+ * FailoverGroup.initial_primary_region semantics.
+ */
+ @JsonProperty("initial_workspace_id")
+ private String initialWorkspaceId;
+
+ /** Fully qualified resource name. Format: accounts/{account_id}/stable-urls/{stable_url_id}. */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * The stable URL endpoint. Generated by the backend on creation and immutable thereafter. For
+ * non-Private-Link workspaces this is `https:///?c=`. For Private-Link
+ * workspaces this is the per-connection hostname.
+ */
+ @JsonProperty("url")
+ private String url;
+
+ public StableUrl setInitialWorkspaceId(String initialWorkspaceId) {
+ this.initialWorkspaceId = initialWorkspaceId;
+ return this;
+ }
+
+ public String getInitialWorkspaceId() {
+ return initialWorkspaceId;
+ }
+
+ public StableUrl setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public StableUrl setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ StableUrl that = (StableUrl) o;
+ return Objects.equals(initialWorkspaceId, that.initialWorkspaceId)
+ && Objects.equals(name, that.name)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(initialWorkspaceId, name, url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(StableUrl.class)
+ .add("initialWorkspaceId", initialWorkspaceId)
+ .add("name", name)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcCatalog.java
new file mode 100755
index 000000000..d81d172eb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcCatalog.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A Unity Catalog catalog to replicate. */
+@Generated
+public class UcCatalog {
+ /** The name of the UC catalog to replicate. */
+ @JsonProperty("name")
+ private String name;
+
+ public UcCatalog setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UcCatalog that = (UcCatalog) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UcCatalog.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcReplicationConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcReplicationConfig.java
new file mode 100755
index 000000000..8b61b86b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UcReplicationConfig.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Unity Catalog replication configuration (top-level, not per-set). */
+@Generated
+public class UcReplicationConfig {
+ /** UC catalogs to replicate. */
+ @JsonProperty("catalogs")
+ private Collection catalogs;
+
+ /**
+ * The workspace set whose workspaces will be used for data replication of all UC catalogs'
+ * underlying storage.
+ */
+ @JsonProperty("data_replication_workspace_set")
+ private String dataReplicationWorkspaceSet;
+
+ /** Location mappings - storage URI per region for each location. */
+ @JsonProperty("location_mappings")
+ private Collection locationMappings;
+
+ public UcReplicationConfig setCatalogs(Collection catalogs) {
+ this.catalogs = catalogs;
+ return this;
+ }
+
+ public Collection getCatalogs() {
+ return catalogs;
+ }
+
+ public UcReplicationConfig setDataReplicationWorkspaceSet(String dataReplicationWorkspaceSet) {
+ this.dataReplicationWorkspaceSet = dataReplicationWorkspaceSet;
+ return this;
+ }
+
+ public String getDataReplicationWorkspaceSet() {
+ return dataReplicationWorkspaceSet;
+ }
+
+ public UcReplicationConfig setLocationMappings(Collection locationMappings) {
+ this.locationMappings = locationMappings;
+ return this;
+ }
+
+ public Collection getLocationMappings() {
+ return locationMappings;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UcReplicationConfig that = (UcReplicationConfig) o;
+ return Objects.equals(catalogs, that.catalogs)
+ && Objects.equals(dataReplicationWorkspaceSet, that.dataReplicationWorkspaceSet)
+ && Objects.equals(locationMappings, that.locationMappings);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogs, dataReplicationWorkspaceSet, locationMappings);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UcReplicationConfig.class)
+ .add("catalogs", catalogs)
+ .add("dataReplicationWorkspaceSet", dataReplicationWorkspaceSet)
+ .add("locationMappings", locationMappings)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UpdateFailoverGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UpdateFailoverGroupRequest.java
new file mode 100755
index 000000000..777ad584b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/UpdateFailoverGroupRequest.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateFailoverGroupRequest {
+ /**
+ * The failover group with updated fields. The name field identifies the resource and is populated
+ * from the URL path.
+ */
+ @JsonProperty("failover_group")
+ private FailoverGroup failoverGroup;
+
+ /**
+ * Fully qualified resource name in the format
+ * accounts/{account_id}/failover-groups/{failover_group_id}.
+ */
+ @JsonIgnore private String name;
+
+ /** Comma-separated list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateFailoverGroupRequest setFailoverGroup(FailoverGroup failoverGroup) {
+ this.failoverGroup = failoverGroup;
+ return this;
+ }
+
+ public FailoverGroup getFailoverGroup() {
+ return failoverGroup;
+ }
+
+ public UpdateFailoverGroupRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateFailoverGroupRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateFailoverGroupRequest that = (UpdateFailoverGroupRequest) o;
+ return Objects.equals(failoverGroup, that.failoverGroup)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failoverGroup, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateFailoverGroupRequest.class)
+ .add("failoverGroup", failoverGroup)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/WorkspaceSet.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/WorkspaceSet.java
new file mode 100755
index 000000000..80ff67d68
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/disasterrecovery/WorkspaceSet.java
@@ -0,0 +1,101 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.disasterrecovery;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A set of workspaces that replicate to each other across regions. */
+@Generated
+public class WorkspaceSet {
+ /** Resource name for this workspace set. */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * Whether to enable control plane DR (notebooks, jobs, clusters, etc.) for this set. Requires all
+ * workspaces in the set to be Mission Critical tier.
+ */
+ @JsonProperty("replicate_workspace_assets")
+ private Boolean replicateWorkspaceAssets;
+
+ /**
+ * Resource names of stable URLs associated with this workspace set. Format:
+ * accounts/{account_id}/stable-urls/{stable_url_id}. The referenced stable URLs must already
+ * exist (via CreateStableUrl).
+ */
+ @JsonProperty("stable_url_names")
+ private Collection stableUrlNames;
+
+ /**
+ * Workspace IDs in this set. The system derives and validates regions. EA: exactly 2 workspaces
+ * (one per region).
+ */
+ @JsonProperty("workspace_ids")
+ private Collection workspaceIds;
+
+ public WorkspaceSet setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public WorkspaceSet setReplicateWorkspaceAssets(Boolean replicateWorkspaceAssets) {
+ this.replicateWorkspaceAssets = replicateWorkspaceAssets;
+ return this;
+ }
+
+ public Boolean getReplicateWorkspaceAssets() {
+ return replicateWorkspaceAssets;
+ }
+
+ public WorkspaceSet setStableUrlNames(Collection stableUrlNames) {
+ this.stableUrlNames = stableUrlNames;
+ return this;
+ }
+
+ public Collection getStableUrlNames() {
+ return stableUrlNames;
+ }
+
+ public WorkspaceSet setWorkspaceIds(Collection workspaceIds) {
+ this.workspaceIds = workspaceIds;
+ return this;
+ }
+
+ public Collection getWorkspaceIds() {
+ return workspaceIds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ WorkspaceSet that = (WorkspaceSet) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(replicateWorkspaceAssets, that.replicateWorkspaceAssets)
+ && Objects.equals(stableUrlNames, that.stableUrlNames)
+ && Objects.equals(workspaceIds, that.workspaceIds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, replicateWorkspaceAssets, stableUrlNames, workspaceIds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(WorkspaceSet.class)
+ .add("name", name)
+ .add("replicateWorkspaceAssets", replicateWorkspaceAssets)
+ .add("stableUrlNames", stableUrlNames)
+ .add("workspaceIds", workspaceIds)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java
index 7a164ba04..a7791de3e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.environments;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createWorkspaceBaseEnvironment operation. Provides
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java
index 14355e0b1..6f37f8f89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.environments;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* APIs to manage environment resources.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java
index 52034f985..976459a90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.environments;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running refreshWorkspaceBaseEnvironment operation. Provides
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java
index 19125a238..7b91b7b79 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.environments;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateWorkspaceBaseEnvironment operation. Provides
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
index 0b155cf88..3c5681fde 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.files;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* DBFS API makes it simple to interact with various data sources without having to include a users
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
index 579389ac9..2d2a86657 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.files;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Files API is a standard HTTP API that allows you to read, write, list, and delete files and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java
index a48e6a2b9..f4e3d9c00 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Rule based Access Control for Databricks Resources. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
index 9ccd99b11..60a4c96ba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage access rules on resources in an account. Currently, only grant rules are
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
index 009c675e7..5c9c83569 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage access rules on resources in an account. Currently, only grant rules are
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
index 570dce648..741098068 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Groups simplify identity management, making it easier to assign access to Databricks account,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsV2API.java
index 43c0da6ef..42173e14a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersV2API.java
index e71c93daf..af29f791a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* User identities recognized by Databricks and represented by email addresses.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserAPI.java
index 257e60d9e..302fcff03 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This API allows retrieving information about currently authenticated user or service principal.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
index f7f7dd752..4980b2e1f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
@@ -15,8 +15,8 @@ public class GetPermissionLevelsRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
- * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
- * registered-models, repos, serving-endpoints, or warehouses.
+ * experiments, files, genie, instance-pools, jobs, knowledge-assistants, notebooks, pipelines,
+ * queries, registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
index b27841373..85383b5bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
@@ -15,8 +15,8 @@ public class GetPermissionRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
- * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
- * registered-models, repos, serving-endpoints, or warehouses.
+ * experiments, files, genie, instance-pools, jobs, knowledge-assistants, notebooks, pipelines,
+ * queries, registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsV2API.java
index 84e109766..638be6c41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Groups simplify identity management, making it easier to assign access to Databricks workspace,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java
index 5a5c04950..e9c57e934 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* APIs for migrating acl permissions, used only by the ucx tool:
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
index 1dae5c2a0..38defe3f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Permissions API are used to create read, write, edit, update and manage access for various users
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsV2API.java
index adbd38cc9..53bfcf54a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
index a30c83e0c..a7bb4584b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
@@ -21,8 +21,8 @@ public class SetObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
- * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
- * registered-models, repos, serving-endpoints, or warehouses.
+ * experiments, files, genie, instance-pools, jobs, knowledge-assistants, notebooks, pipelines,
+ * queries, registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
index 2420a2d73..69897eafc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
@@ -21,8 +21,8 @@ public class UpdateObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
- * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
- * registered-models, repos, serving-endpoints, or warehouses.
+ * experiments, files, genie, instance-pools, jobs, knowledge-assistants, notebooks, pipelines,
+ * queries, registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersV2API.java
index 7ab1654a6..9c41a6766 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* User identities recognized by Databricks and represented by email addresses.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
index 712df2b02..f5cabec7a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.iam;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Workspace Permission Assignment API allows you to manage workspace permissions for principals
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
index 431df6742..da98e7a49 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iamv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs are used to manage identities and the workspace access of these identities in
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
index a9c53bcdf..8d146b4fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.iamv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs are used to manage identities and the workspace access of these identities in
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
index 9d7349fe6..da097f96f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Jobs API allows you to create, edit, and delete jobs.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsAPI.java
index bc181efa3..9a4912efd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.jobs;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The compliance APIs allow you to view and manage the policy compliance status of jobs in your
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateExampleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateExampleRequest.java
new file mode 100755
index 000000000..ac5005c75
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateExampleRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateExampleRequest {
+ /** The example to create under the parent Knowledge Assistant. */
+ @JsonProperty("example")
+ private Example example;
+
+ /**
+ * Parent resource where this example will be created. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String parent;
+
+ public CreateExampleRequest setExample(Example example) {
+ this.example = example;
+ return this;
+ }
+
+ public Example getExample() {
+ return example;
+ }
+
+ public CreateExampleRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateExampleRequest that = (CreateExampleRequest) o;
+ return Objects.equals(example, that.example) && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(example, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateExampleRequest.class)
+ .add("example", example)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteExampleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteExampleRequest.java
new file mode 100755
index 000000000..57cb6d5d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteExampleRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteExampleRequest {
+ /**
+ * The resource name of the example to delete. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/examples/{example_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteExampleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteExampleRequest that = (DeleteExampleRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteExampleRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/Example.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/Example.java
new file mode 100755
index 000000000..0986220dc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/Example.java
@@ -0,0 +1,125 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * An example associated with a Knowledge Assistant. Contains a question and guidelines for how the
+ * assistant should respond.
+ */
+@Generated
+public class Example {
+ /** Timestamp when this example was created. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /** The universally unique identifier (UUID) of the example. */
+ @JsonProperty("example_id")
+ private String exampleId;
+
+ /** Guidelines for answering the question. */
+ @JsonProperty("guidelines")
+ private Collection guidelines;
+
+ /** Full resource name: knowledge-assistants/{knowledge_assistant_id}/examples/{example_id} */
+ @JsonProperty("name")
+ private String name;
+
+ /** The example question. */
+ @JsonProperty("question")
+ private String question;
+
+ /** Timestamp when this example was last updated. */
+ @JsonProperty("update_time")
+ private Timestamp updateTime;
+
+ public Example setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public Example setExampleId(String exampleId) {
+ this.exampleId = exampleId;
+ return this;
+ }
+
+ public String getExampleId() {
+ return exampleId;
+ }
+
+ public Example setGuidelines(Collection guidelines) {
+ this.guidelines = guidelines;
+ return this;
+ }
+
+ public Collection getGuidelines() {
+ return guidelines;
+ }
+
+ public Example setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Example setQuestion(String question) {
+ this.question = question;
+ return this;
+ }
+
+ public String getQuestion() {
+ return question;
+ }
+
+ public Example setUpdateTime(Timestamp updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public Timestamp getUpdateTime() {
+ return updateTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Example that = (Example) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(exampleId, that.exampleId)
+ && Objects.equals(guidelines, that.guidelines)
+ && Objects.equals(name, that.name)
+ && Objects.equals(question, that.question)
+ && Objects.equals(updateTime, that.updateTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(createTime, exampleId, guidelines, name, question, updateTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Example.class)
+ .add("createTime", createTime)
+ .add("exampleId", exampleId)
+ .add("guidelines", guidelines)
+ .add("name", name)
+ .add("question", question)
+ .add("updateTime", updateTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetExampleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetExampleRequest.java
new file mode 100755
index 000000000..a52c2dc3b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetExampleRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetExampleRequest {
+ /**
+ * The resource name of the example. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/examples/{example_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetExampleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetExampleRequest that = (GetExampleRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetExampleRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsRequest.java
new file mode 100755
index 000000000..0d394cab8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeAssistantPermissionLevelsRequest {
+ /** The knowledge assistant for which to get or manage permissions. */
+ @JsonIgnore private String knowledgeAssistantId;
+
+ public GetKnowledgeAssistantPermissionLevelsRequest setKnowledgeAssistantId(
+ String knowledgeAssistantId) {
+ this.knowledgeAssistantId = knowledgeAssistantId;
+ return this;
+ }
+
+ public String getKnowledgeAssistantId() {
+ return knowledgeAssistantId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeAssistantPermissionLevelsRequest that =
+ (GetKnowledgeAssistantPermissionLevelsRequest) o;
+ return Objects.equals(knowledgeAssistantId, that.knowledgeAssistantId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistantId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeAssistantPermissionLevelsRequest.class)
+ .add("knowledgeAssistantId", knowledgeAssistantId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsResponse.java
new file mode 100755
index 000000000..8b25581a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionLevelsResponse.java
@@ -0,0 +1,47 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeAssistantPermissionLevelsResponse {
+ /** Specific permission levels */
+ @JsonProperty("permission_levels")
+ private Collection permissionLevels;
+
+ public GetKnowledgeAssistantPermissionLevelsResponse setPermissionLevels(
+ Collection permissionLevels) {
+ this.permissionLevels = permissionLevels;
+ return this;
+ }
+
+ public Collection getPermissionLevels() {
+ return permissionLevels;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeAssistantPermissionLevelsResponse that =
+ (GetKnowledgeAssistantPermissionLevelsResponse) o;
+ return Objects.equals(permissionLevels, that.permissionLevels);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(permissionLevels);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeAssistantPermissionLevelsResponse.class)
+ .add("permissionLevels", permissionLevels)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionsRequest.java
new file mode 100755
index 000000000..08f1a15c6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantPermissionsRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeAssistantPermissionsRequest {
+ /** The knowledge assistant for which to get or manage permissions. */
+ @JsonIgnore private String knowledgeAssistantId;
+
+ public GetKnowledgeAssistantPermissionsRequest setKnowledgeAssistantId(
+ String knowledgeAssistantId) {
+ this.knowledgeAssistantId = knowledgeAssistantId;
+ return this;
+ }
+
+ public String getKnowledgeAssistantId() {
+ return knowledgeAssistantId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeAssistantPermissionsRequest that = (GetKnowledgeAssistantPermissionsRequest) o;
+ return Objects.equals(knowledgeAssistantId, that.knowledgeAssistantId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistantId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeAssistantPermissionsRequest.class)
+ .add("knowledgeAssistantId", knowledgeAssistantId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlRequest.java
new file mode 100755
index 000000000..5b81ed0b7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlRequest.java
@@ -0,0 +1,91 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantAccessControlRequest {
+ /** name of the group */
+ @JsonProperty("group_name")
+ private String groupName;
+
+ /** */
+ @JsonProperty("permission_level")
+ private KnowledgeAssistantPermissionLevel permissionLevel;
+
+ /** application ID of a service principal */
+ @JsonProperty("service_principal_name")
+ private String servicePrincipalName;
+
+ /** name of the user */
+ @JsonProperty("user_name")
+ private String userName;
+
+ public KnowledgeAssistantAccessControlRequest setGroupName(String groupName) {
+ this.groupName = groupName;
+ return this;
+ }
+
+ public String getGroupName() {
+ return groupName;
+ }
+
+ public KnowledgeAssistantAccessControlRequest setPermissionLevel(
+ KnowledgeAssistantPermissionLevel permissionLevel) {
+ this.permissionLevel = permissionLevel;
+ return this;
+ }
+
+ public KnowledgeAssistantPermissionLevel getPermissionLevel() {
+ return permissionLevel;
+ }
+
+ public KnowledgeAssistantAccessControlRequest setServicePrincipalName(
+ String servicePrincipalName) {
+ this.servicePrincipalName = servicePrincipalName;
+ return this;
+ }
+
+ public String getServicePrincipalName() {
+ return servicePrincipalName;
+ }
+
+ public KnowledgeAssistantAccessControlRequest setUserName(String userName) {
+ this.userName = userName;
+ return this;
+ }
+
+ public String getUserName() {
+ return userName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantAccessControlRequest that = (KnowledgeAssistantAccessControlRequest) o;
+ return Objects.equals(groupName, that.groupName)
+ && Objects.equals(permissionLevel, that.permissionLevel)
+ && Objects.equals(servicePrincipalName, that.servicePrincipalName)
+ && Objects.equals(userName, that.userName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantAccessControlRequest.class)
+ .add("groupName", groupName)
+ .add("permissionLevel", permissionLevel)
+ .add("servicePrincipalName", servicePrincipalName)
+ .add("userName", userName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlResponse.java
new file mode 100755
index 000000000..abdc84898
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantAccessControlResponse.java
@@ -0,0 +1,107 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantAccessControlResponse {
+ /** All permissions. */
+ @JsonProperty("all_permissions")
+ private Collection allPermissions;
+
+ /** Display name of the user or service principal. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** name of the group */
+ @JsonProperty("group_name")
+ private String groupName;
+
+ /** Name of the service principal. */
+ @JsonProperty("service_principal_name")
+ private String servicePrincipalName;
+
+ /** name of the user */
+ @JsonProperty("user_name")
+ private String userName;
+
+ public KnowledgeAssistantAccessControlResponse setAllPermissions(
+ Collection allPermissions) {
+ this.allPermissions = allPermissions;
+ return this;
+ }
+
+ public Collection getAllPermissions() {
+ return allPermissions;
+ }
+
+ public KnowledgeAssistantAccessControlResponse setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public KnowledgeAssistantAccessControlResponse setGroupName(String groupName) {
+ this.groupName = groupName;
+ return this;
+ }
+
+ public String getGroupName() {
+ return groupName;
+ }
+
+ public KnowledgeAssistantAccessControlResponse setServicePrincipalName(
+ String servicePrincipalName) {
+ this.servicePrincipalName = servicePrincipalName;
+ return this;
+ }
+
+ public String getServicePrincipalName() {
+ return servicePrincipalName;
+ }
+
+ public KnowledgeAssistantAccessControlResponse setUserName(String userName) {
+ this.userName = userName;
+ return this;
+ }
+
+ public String getUserName() {
+ return userName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantAccessControlResponse that = (KnowledgeAssistantAccessControlResponse) o;
+ return Objects.equals(allPermissions, that.allPermissions)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(groupName, that.groupName)
+ && Objects.equals(servicePrincipalName, that.servicePrincipalName)
+ && Objects.equals(userName, that.userName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantAccessControlResponse.class)
+ .add("allPermissions", allPermissions)
+ .add("displayName", displayName)
+ .add("groupName", groupName)
+ .add("servicePrincipalName", servicePrincipalName)
+ .add("userName", userName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermission.java
new file mode 100755
index 000000000..86ee5fc70
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermission.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantPermission {
+ /** */
+ @JsonProperty("inherited")
+ private Boolean inherited;
+
+ /** */
+ @JsonProperty("inherited_from_object")
+ private Collection inheritedFromObject;
+
+ /** */
+ @JsonProperty("permission_level")
+ private KnowledgeAssistantPermissionLevel permissionLevel;
+
+ public KnowledgeAssistantPermission setInherited(Boolean inherited) {
+ this.inherited = inherited;
+ return this;
+ }
+
+ public Boolean getInherited() {
+ return inherited;
+ }
+
+ public KnowledgeAssistantPermission setInheritedFromObject(
+ Collection inheritedFromObject) {
+ this.inheritedFromObject = inheritedFromObject;
+ return this;
+ }
+
+ public Collection getInheritedFromObject() {
+ return inheritedFromObject;
+ }
+
+ public KnowledgeAssistantPermission setPermissionLevel(
+ KnowledgeAssistantPermissionLevel permissionLevel) {
+ this.permissionLevel = permissionLevel;
+ return this;
+ }
+
+ public KnowledgeAssistantPermissionLevel getPermissionLevel() {
+ return permissionLevel;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantPermission that = (KnowledgeAssistantPermission) o;
+ return Objects.equals(inherited, that.inherited)
+ && Objects.equals(inheritedFromObject, that.inheritedFromObject)
+ && Objects.equals(permissionLevel, that.permissionLevel);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(inherited, inheritedFromObject, permissionLevel);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantPermission.class)
+ .add("inherited", inherited)
+ .add("inheritedFromObject", inheritedFromObject)
+ .add("permissionLevel", permissionLevel)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionLevel.java
new file mode 100755
index 000000000..c1457683c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionLevel.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+/** Permission level */
+@Generated
+public enum KnowledgeAssistantPermissionLevel {
+ CAN_MANAGE,
+ CAN_QUERY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissions.java
new file mode 100755
index 000000000..d844f2af5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissions.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantPermissions {
+ /** */
+ @JsonProperty("access_control_list")
+ private Collection accessControlList;
+
+ /** */
+ @JsonProperty("object_id")
+ private String objectId;
+
+ /** */
+ @JsonProperty("object_type")
+ private String objectType;
+
+ public KnowledgeAssistantPermissions setAccessControlList(
+ Collection accessControlList) {
+ this.accessControlList = accessControlList;
+ return this;
+ }
+
+ public Collection getAccessControlList() {
+ return accessControlList;
+ }
+
+ public KnowledgeAssistantPermissions setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public KnowledgeAssistantPermissions setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantPermissions that = (KnowledgeAssistantPermissions) o;
+ return Objects.equals(accessControlList, that.accessControlList)
+ && Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessControlList, objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantPermissions.class)
+ .add("accessControlList", accessControlList)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsDescription.java
new file mode 100755
index 000000000..75acfcbbd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsDescription.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantPermissionsDescription {
+ /** */
+ @JsonProperty("description")
+ private String description;
+
+ /** */
+ @JsonProperty("permission_level")
+ private KnowledgeAssistantPermissionLevel permissionLevel;
+
+ public KnowledgeAssistantPermissionsDescription setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public KnowledgeAssistantPermissionsDescription setPermissionLevel(
+ KnowledgeAssistantPermissionLevel permissionLevel) {
+ this.permissionLevel = permissionLevel;
+ return this;
+ }
+
+ public KnowledgeAssistantPermissionLevel getPermissionLevel() {
+ return permissionLevel;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantPermissionsDescription that = (KnowledgeAssistantPermissionsDescription) o;
+ return Objects.equals(description, that.description)
+ && Objects.equals(permissionLevel, that.permissionLevel);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(description, permissionLevel);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantPermissionsDescription.class)
+ .add("description", description)
+ .add("permissionLevel", permissionLevel)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsRequest.java
new file mode 100755
index 000000000..c6caa9d91
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantPermissionsRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistantPermissionsRequest {
+ /** */
+ @JsonProperty("access_control_list")
+ private Collection accessControlList;
+
+ /** The knowledge assistant for which to get or manage permissions. */
+ @JsonIgnore private String knowledgeAssistantId;
+
+ public KnowledgeAssistantPermissionsRequest setAccessControlList(
+ Collection accessControlList) {
+ this.accessControlList = accessControlList;
+ return this;
+ }
+
+ public Collection getAccessControlList() {
+ return accessControlList;
+ }
+
+ public KnowledgeAssistantPermissionsRequest setKnowledgeAssistantId(String knowledgeAssistantId) {
+ this.knowledgeAssistantId = knowledgeAssistantId;
+ return this;
+ }
+
+ public String getKnowledgeAssistantId() {
+ return knowledgeAssistantId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistantPermissionsRequest that = (KnowledgeAssistantPermissionsRequest) o;
+ return Objects.equals(accessControlList, that.accessControlList)
+ && Objects.equals(knowledgeAssistantId, that.knowledgeAssistantId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessControlList, knowledgeAssistantId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistantPermissionsRequest.class)
+ .add("accessControlList", accessControlList)
+ .add("knowledgeAssistantId", knowledgeAssistantId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
index 47c062573..7545ec5f7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.knowledgeassistants;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Manage Knowledge Assistants and related resources. */
@Generated
@@ -24,6 +24,11 @@ public KnowledgeAssistantsAPI(KnowledgeAssistantsService mock) {
impl = mock;
}
+ /** Creates an example for a Knowledge Assistant. */
+ public Example createExample(CreateExampleRequest request) {
+ return impl.createExample(request);
+ }
+
/** Creates a Knowledge Assistant. */
public KnowledgeAssistant createKnowledgeAssistant(CreateKnowledgeAssistantRequest request) {
return impl.createKnowledgeAssistant(request);
@@ -34,6 +39,15 @@ public KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest reques
return impl.createKnowledgeSource(request);
}
+ public void deleteExample(String name) {
+ deleteExample(new DeleteExampleRequest().setName(name));
+ }
+
+ /** Deletes an example from a Knowledge Assistant. */
+ public void deleteExample(DeleteExampleRequest request) {
+ impl.deleteExample(request);
+ }
+
public void deleteKnowledgeAssistant(String name) {
deleteKnowledgeAssistant(new DeleteKnowledgeAssistantRequest().setName(name));
}
@@ -52,6 +66,15 @@ public void deleteKnowledgeSource(DeleteKnowledgeSourceRequest request) {
impl.deleteKnowledgeSource(request);
}
+ public Example getExample(String name) {
+ return getExample(new GetExampleRequest().setName(name));
+ }
+
+ /** Gets an example from a Knowledge Assistant. */
+ public Example getExample(GetExampleRequest request) {
+ return impl.getExample(request);
+ }
+
public KnowledgeAssistant getKnowledgeAssistant(String name) {
return getKnowledgeAssistant(new GetKnowledgeAssistantRequest().setName(name));
}
@@ -70,6 +93,53 @@ public KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest request) {
return impl.getKnowledgeSource(request);
}
+ public GetKnowledgeAssistantPermissionLevelsResponse getPermissionLevels(
+ String knowledgeAssistantId) {
+ return getPermissionLevels(
+ new GetKnowledgeAssistantPermissionLevelsRequest()
+ .setKnowledgeAssistantId(knowledgeAssistantId));
+ }
+
+ /** Gets the permission levels that a user can have on an object. */
+ public GetKnowledgeAssistantPermissionLevelsResponse getPermissionLevels(
+ GetKnowledgeAssistantPermissionLevelsRequest request) {
+ return impl.getPermissionLevels(request);
+ }
+
+ public KnowledgeAssistantPermissions getPermissions(String knowledgeAssistantId) {
+ return getPermissions(
+ new GetKnowledgeAssistantPermissionsRequest()
+ .setKnowledgeAssistantId(knowledgeAssistantId));
+ }
+
+ /**
+ * Gets the permissions of a knowledge assistant. Knowledge assistants can inherit permissions
+ * from their root object.
+ */
+ public KnowledgeAssistantPermissions getPermissions(
+ GetKnowledgeAssistantPermissionsRequest request) {
+ return impl.getPermissions(request);
+ }
+
+ public Iterable listExamples(String parent) {
+ return listExamples(new ListExamplesRequest().setParent(parent));
+ }
+
+ /** Lists examples under a Knowledge Assistant. */
+ public Iterable listExamples(ListExamplesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listExamples,
+ ListExamplesResponse::getExamples,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
/** List Knowledge Assistants */
public Iterable listKnowledgeAssistants(
ListKnowledgeAssistantsRequest request) {
@@ -105,6 +175,15 @@ public Iterable listKnowledgeSources(ListKnowledgeSourcesReques
});
}
+ /**
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
+ */
+ public KnowledgeAssistantPermissions setPermissions(
+ KnowledgeAssistantPermissionsRequest request) {
+ return impl.setPermissions(request);
+ }
+
/**
* Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require
* sync)
@@ -113,6 +192,11 @@ public void syncKnowledgeSources(SyncKnowledgeSourcesRequest request) {
impl.syncKnowledgeSources(request);
}
+ /** Updates an example in a Knowledge Assistant. */
+ public Example updateExample(UpdateExampleRequest request) {
+ return impl.updateExample(request);
+ }
+
/** Updates a Knowledge Assistant. */
public KnowledgeAssistant updateKnowledgeAssistant(UpdateKnowledgeAssistantRequest request) {
return impl.updateKnowledgeAssistant(request);
@@ -123,6 +207,15 @@ public KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest reques
return impl.updateKnowledgeSource(request);
}
+ /**
+ * Updates the permissions on a knowledge assistant. Knowledge assistants can inherit permissions
+ * from their root object.
+ */
+ public KnowledgeAssistantPermissions updatePermissions(
+ KnowledgeAssistantPermissionsRequest request) {
+ return impl.updatePermissions(request);
+ }
+
public KnowledgeAssistantsService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java
index 14747a16d..c0129b700 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java
@@ -16,6 +16,24 @@ public KnowledgeAssistantsImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public Example createExample(CreateExampleRequest request) {
+ String path = String.format("/api/2.1/%s/examples", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getExample()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Example.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public KnowledgeAssistant createKnowledgeAssistant(CreateKnowledgeAssistantRequest request) {
String path = "/api/2.1/knowledge-assistants";
@@ -52,6 +70,23 @@ public KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest reques
}
}
+ @Override
+ public void deleteExample(DeleteExampleRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest request) {
String path = String.format("/api/2.1/%s", request.getName());
@@ -86,6 +121,23 @@ public void deleteKnowledgeSource(DeleteKnowledgeSourceRequest request) {
}
}
+ @Override
+ public Example getExample(GetExampleRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Example.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public KnowledgeAssistant getKnowledgeAssistant(GetKnowledgeAssistantRequest request) {
String path = String.format("/api/2.1/%s", request.getName());
@@ -120,6 +172,64 @@ public KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest request) {
}
}
+ @Override
+ public GetKnowledgeAssistantPermissionLevelsResponse getPermissionLevels(
+ GetKnowledgeAssistantPermissionLevelsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/permissions/knowledge-assistants/%s/permissionLevels",
+ request.getKnowledgeAssistantId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GetKnowledgeAssistantPermissionLevelsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeAssistantPermissions getPermissions(
+ GetKnowledgeAssistantPermissionsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/permissions/knowledge-assistants/%s", request.getKnowledgeAssistantId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistantPermissions.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListExamplesResponse listExamples(ListExamplesRequest request) {
+ String path = String.format("/api/2.1/%s/examples", request.getParent());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, ListExamplesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListKnowledgeAssistantsResponse listKnowledgeAssistants(
ListKnowledgeAssistantsRequest request) {
@@ -155,6 +265,27 @@ public ListKnowledgeSourcesResponse listKnowledgeSources(ListKnowledgeSourcesReq
}
}
+ @Override
+ public KnowledgeAssistantPermissions setPermissions(
+ KnowledgeAssistantPermissionsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/permissions/knowledge-assistants/%s", request.getKnowledgeAssistantId());
+ try {
+ Request req = new Request("PUT", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistantPermissions.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void syncKnowledgeSources(SyncKnowledgeSourcesRequest request) {
String path = String.format("/api/2.1/%s/knowledge-sources:sync", request.getName());
@@ -173,6 +304,24 @@ public void syncKnowledgeSources(SyncKnowledgeSourcesRequest request) {
}
}
+ @Override
+ public Example updateExample(UpdateExampleRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getExample()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Example.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public KnowledgeAssistant updateKnowledgeAssistant(UpdateKnowledgeAssistantRequest request) {
String path = String.format("/api/2.1/%s", request.getName());
@@ -209,4 +358,25 @@ public KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest reques
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public KnowledgeAssistantPermissions updatePermissions(
+ KnowledgeAssistantPermissionsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/permissions/knowledge-assistants/%s", request.getKnowledgeAssistantId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistantPermissions.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java
index cb70f6b10..b5f754f4d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java
@@ -12,6 +12,9 @@
*/
@Generated
public interface KnowledgeAssistantsService {
+ /** Creates an example for a Knowledge Assistant. */
+ Example createExample(CreateExampleRequest createExampleRequest);
+
/** Creates a Knowledge Assistant. */
KnowledgeAssistant createKnowledgeAssistant(
CreateKnowledgeAssistantRequest createKnowledgeAssistantRequest);
@@ -19,12 +22,18 @@ KnowledgeAssistant createKnowledgeAssistant(
/** Creates a Knowledge Source under a Knowledge Assistant. */
KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest createKnowledgeSourceRequest);
+ /** Deletes an example from a Knowledge Assistant. */
+ void deleteExample(DeleteExampleRequest deleteExampleRequest);
+
/** Deletes a Knowledge Assistant. */
void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest deleteKnowledgeAssistantRequest);
/** Deletes a Knowledge Source. */
void deleteKnowledgeSource(DeleteKnowledgeSourceRequest deleteKnowledgeSourceRequest);
+ /** Gets an example from a Knowledge Assistant. */
+ Example getExample(GetExampleRequest getExampleRequest);
+
/** Gets a Knowledge Assistant. */
KnowledgeAssistant getKnowledgeAssistant(
GetKnowledgeAssistantRequest getKnowledgeAssistantRequest);
@@ -32,6 +41,20 @@ KnowledgeAssistant getKnowledgeAssistant(
/** Gets a Knowledge Source. */
KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest getKnowledgeSourceRequest);
+ /** Gets the permission levels that a user can have on an object. */
+ GetKnowledgeAssistantPermissionLevelsResponse getPermissionLevels(
+ GetKnowledgeAssistantPermissionLevelsRequest getKnowledgeAssistantPermissionLevelsRequest);
+
+ /**
+ * Gets the permissions of a knowledge assistant. Knowledge assistants can inherit permissions
+ * from their root object.
+ */
+ KnowledgeAssistantPermissions getPermissions(
+ GetKnowledgeAssistantPermissionsRequest getKnowledgeAssistantPermissionsRequest);
+
+ /** Lists examples under a Knowledge Assistant. */
+ ListExamplesResponse listExamples(ListExamplesRequest listExamplesRequest);
+
/** List Knowledge Assistants */
ListKnowledgeAssistantsResponse listKnowledgeAssistants(
ListKnowledgeAssistantsRequest listKnowledgeAssistantsRequest);
@@ -40,16 +63,33 @@ ListKnowledgeAssistantsResponse listKnowledgeAssistants(
ListKnowledgeSourcesResponse listKnowledgeSources(
ListKnowledgeSourcesRequest listKnowledgeSourcesRequest);
+ /**
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
+ */
+ KnowledgeAssistantPermissions setPermissions(
+ KnowledgeAssistantPermissionsRequest knowledgeAssistantPermissionsRequest);
+
/**
* Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require
* sync)
*/
void syncKnowledgeSources(SyncKnowledgeSourcesRequest syncKnowledgeSourcesRequest);
+ /** Updates an example in a Knowledge Assistant. */
+ Example updateExample(UpdateExampleRequest updateExampleRequest);
+
/** Updates a Knowledge Assistant. */
KnowledgeAssistant updateKnowledgeAssistant(
UpdateKnowledgeAssistantRequest updateKnowledgeAssistantRequest);
/** Updates a Knowledge Source. */
KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest updateKnowledgeSourceRequest);
+
+ /**
+ * Updates the permissions on a knowledge assistant. Knowledge assistants can inherit permissions
+ * from their root object.
+ */
+ KnowledgeAssistantPermissions updatePermissions(
+ KnowledgeAssistantPermissionsRequest knowledgeAssistantPermissionsRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesRequest.java
new file mode 100755
index 000000000..14c52391f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesRequest.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListExamplesRequest {
+ /**
+ * The maximum number of examples to return. If unspecified, at most 100 examples will be
+ * returned. The maximum value is 100; values above 100 will be coerced to 100.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous `ListExamples` call. Provide this to retrieve the
+ * subsequent page. If unspecified, the first page will be returned.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** Parent resource to list from. Format: knowledge-assistants/{knowledge_assistant_id} */
+ @JsonIgnore private String parent;
+
+ public ListExamplesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListExamplesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListExamplesRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListExamplesRequest that = (ListExamplesRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListExamplesRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesResponse.java
new file mode 100755
index 000000000..0e6ad11a1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListExamplesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A list of Knowledge Assistant examples. */
+@Generated
+public class ListExamplesResponse {
+ /** */
+ @JsonProperty("examples")
+ private Collection examples;
+
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListExamplesResponse setExamples(Collection examples) {
+ this.examples = examples;
+ return this;
+ }
+
+ public Collection getExamples() {
+ return examples;
+ }
+
+ public ListExamplesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListExamplesResponse that = (ListExamplesResponse) o;
+ return Objects.equals(examples, that.examples)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(examples, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListExamplesResponse.class)
+ .add("examples", examples)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateExampleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateExampleRequest.java
new file mode 100755
index 000000000..c617cf20b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateExampleRequest.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateExampleRequest {
+ /** */
+ @JsonProperty("example")
+ private Example example;
+
+ /**
+ * The resource name of the example to update. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/examples/{example_id}
+ */
+ @JsonIgnore private String name;
+
+ /**
+ * Comma-delimited list of fields to update on the example. Allowed values: `question`,
+ * `guidelines`. Examples: - `question` - `question,guidelines`
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateExampleRequest setExample(Example example) {
+ this.example = example;
+ return this;
+ }
+
+ public Example getExample() {
+ return example;
+ }
+
+ public UpdateExampleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateExampleRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateExampleRequest that = (UpdateExampleRequest) o;
+ return Objects.equals(example, that.example)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(example, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateExampleRequest.class)
+ .add("example", example)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java
index 73bc4b380..99475af00 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Fulfillments are entities that allow consumers to preview installations. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java
index 27191be52..b1ee91230 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Installations are entities that allow consumers to interact with Databricks Marketplace listings.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java
index 5f947cfd8..17400dae3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Listings are the core entities in the Marketplace. They represent the products that are available
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java
index aea55feec..041a1ba28 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Personalization Requests allow customers to interact with the individualized Marketplace listing
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java
index c7eb9791a..717185c15 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Providers are the entities that publish listings to the Marketplace. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java
index c98816519..4190317b6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Marketplace exchanges filters curate which groups can access an exchange. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java
index 981771fe9..bc199e7e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Marketplace exchanges allow providers to share their listings with a curated set of customers.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java
index d47867b88..90a97a1ca 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java
index 958503843..deed60786 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Listings are the core entities in the Marketplace. They represent the products that are available
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java
index 987f97f1b..de6fbd8c9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Personalization requests are an alternate to instantly available listings. Control the lifecycle
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java
index af0b3d698..e12a08777 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Manage templated analytics solution for providers. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java
index 51106568f..db463ab07 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.marketplace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Providers are entities that manage assets in Marketplace. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BackfillSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BackfillSource.java
index c30adeb8d..aedda6108 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BackfillSource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BackfillSource.java
@@ -10,13 +10,29 @@
@Generated
public class BackfillSource {
/**
- * The Delta table source containing the historic data to backfill. Only the delta table name is
- * used for backfill, the entity columns and timeseries column are ignored as they are defined by
- * the associated KafkaSource.
+ * The full three-part name (catalog, schema, name) of the Delta table containing the historical
+ * data to backfill.
+ */
+ @JsonProperty("delta_table_name")
+ private String deltaTableName;
+
+ /**
+ * Deprecated: Use delta_table_name instead. Kept for backwards compatibility. The Delta table
+ * source containing the historical data to backfill. Only the delta table name is used for
+ * backfill, other fields are ignored.
*/
@JsonProperty("delta_table_source")
private DeltaTableSource deltaTableSource;
+ public BackfillSource setDeltaTableName(String deltaTableName) {
+ this.deltaTableName = deltaTableName;
+ return this;
+ }
+
+ public String getDeltaTableName() {
+ return deltaTableName;
+ }
+
public BackfillSource setDeltaTableSource(DeltaTableSource deltaTableSource) {
this.deltaTableSource = deltaTableSource;
return this;
@@ -31,17 +47,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BackfillSource that = (BackfillSource) o;
- return Objects.equals(deltaTableSource, that.deltaTableSource);
+ return Objects.equals(deltaTableName, that.deltaTableName)
+ && Objects.equals(deltaTableSource, that.deltaTableSource);
}
@Override
public int hashCode() {
- return Objects.hash(deltaTableSource);
+ return Objects.hash(deltaTableName, deltaTableSource);
}
@Override
public String toString() {
return new ToStringer(BackfillSource.class)
+ .add("deltaTableName", deltaTableName)
.add("deltaTableSource", deltaTableSource)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
index a3080894f..9390a1eed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.ml;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
index 64b85bd5d..be3b64663 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.ml;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** [description] */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
index 62e440d3d..b287ef956 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.ml;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A feature store is a centralized repository that enables data scientists to find and share
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java
index 4dde4ae56..5f47125aa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java
@@ -3,14 +3,14 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Wait;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** The Forecasting API allows you to create and get serverless forecasting experiments */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java
index d08f31dfa..5cd8ff588 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.ml;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Materialized Features are columns in tables and views that can be directly used as features to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
index 39375faf8..7b6874526 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.ml;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java
index 6ce1dd5f8..a69cd5281 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java
@@ -9,7 +9,7 @@
@Generated
public class SchemaConfig {
- /** Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/) */
+ /** Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/). */
@JsonProperty("json_schema")
private String jsonSchema;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/networking/EndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/networking/EndpointsAPI.java
index 25b926e50..d0ce9c4de 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/networking/EndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/networking/EndpointsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.networking;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** These APIs manage endpoint configurations for this account. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
index 703bf0024..c58b62a87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage account federation policies.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java
index be2ab484f..80df61c41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs enable administrators to manage custom OAuth app integrations, which is required for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsAPI.java
index 2ff9cc815..ea7a3ca69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs enable administrators to view all the available published OAuth applications in
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java
index 01b702baa..631b40b47 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs enable administrators to manage published OAuth app integrations, which is required
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
index e3082b33f..2a5362945 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage service principal federation policies.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java
index 504ebad15..d7ad47258 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs enable administrators to manage service principal secrets.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsProxyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsProxyAPI.java
index 2473e74e5..5085fd7c1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsProxyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsProxyAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs enable administrators to manage service principal secrets at the workspace level. To
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConfluenceConnectorOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConfluenceConnectorOptions.java
new file mode 100755
index 000000000..09c13173c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConfluenceConnectorOptions.java
@@ -0,0 +1,47 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Confluence specific options for ingestion */
+@Generated
+public class ConfluenceConnectorOptions {
+ /** (Optional) Spaces to filter Confluence data on */
+ @JsonProperty("include_confluence_spaces")
+ private Collection includeConfluenceSpaces;
+
+ public ConfluenceConnectorOptions setIncludeConfluenceSpaces(
+ Collection includeConfluenceSpaces) {
+ this.includeConfluenceSpaces = includeConfluenceSpaces;
+ return this;
+ }
+
+ public Collection getIncludeConfluenceSpaces() {
+ return includeConfluenceSpaces;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConfluenceConnectorOptions that = (ConfluenceConnectorOptions) o;
+ return Objects.equals(includeConfluenceSpaces, that.includeConfluenceSpaces);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(includeConfluenceSpaces);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConfluenceConnectorOptions.class)
+ .add("includeConfluenceSpaces", includeConfluenceSpaces)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java
index b5ae9aef3..821257e62 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java
@@ -10,6 +10,10 @@
/** Wrapper message for source-specific options to support multiple connector types */
@Generated
public class ConnectorOptions {
+ /** */
+ @JsonProperty("confluence_options")
+ private ConfluenceConnectorOptions confluenceOptions;
+
/** */
@JsonProperty("gdrive_options")
private GoogleDriveOptions gdriveOptions;
@@ -18,14 +22,35 @@ public class ConnectorOptions {
@JsonProperty("google_ads_options")
private GoogleAdsOptions googleAdsOptions;
+ /** */
+ @JsonProperty("jira_options")
+ private JiraConnectorOptions jiraOptions;
+
+ /** */
+ @JsonProperty("outlook_options")
+ private OutlookOptions outlookOptions;
+
/** */
@JsonProperty("sharepoint_options")
private SharepointOptions sharepointOptions;
+ /** */
+ @JsonProperty("smartsheet_options")
+ private SmartsheetOptions smartsheetOptions;
+
/** */
@JsonProperty("tiktok_ads_options")
private TikTokAdsOptions tiktokAdsOptions;
+ public ConnectorOptions setConfluenceOptions(ConfluenceConnectorOptions confluenceOptions) {
+ this.confluenceOptions = confluenceOptions;
+ return this;
+ }
+
+ public ConfluenceConnectorOptions getConfluenceOptions() {
+ return confluenceOptions;
+ }
+
public ConnectorOptions setGdriveOptions(GoogleDriveOptions gdriveOptions) {
this.gdriveOptions = gdriveOptions;
return this;
@@ -44,6 +69,24 @@ public GoogleAdsOptions getGoogleAdsOptions() {
return googleAdsOptions;
}
+ public ConnectorOptions setJiraOptions(JiraConnectorOptions jiraOptions) {
+ this.jiraOptions = jiraOptions;
+ return this;
+ }
+
+ public JiraConnectorOptions getJiraOptions() {
+ return jiraOptions;
+ }
+
+ public ConnectorOptions setOutlookOptions(OutlookOptions outlookOptions) {
+ this.outlookOptions = outlookOptions;
+ return this;
+ }
+
+ public OutlookOptions getOutlookOptions() {
+ return outlookOptions;
+ }
+
public ConnectorOptions setSharepointOptions(SharepointOptions sharepointOptions) {
this.sharepointOptions = sharepointOptions;
return this;
@@ -53,6 +96,15 @@ public SharepointOptions getSharepointOptions() {
return sharepointOptions;
}
+ public ConnectorOptions setSmartsheetOptions(SmartsheetOptions smartsheetOptions) {
+ this.smartsheetOptions = smartsheetOptions;
+ return this;
+ }
+
+ public SmartsheetOptions getSmartsheetOptions() {
+ return smartsheetOptions;
+ }
+
public ConnectorOptions setTiktokAdsOptions(TikTokAdsOptions tiktokAdsOptions) {
this.tiktokAdsOptions = tiktokAdsOptions;
return this;
@@ -67,23 +119,39 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ConnectorOptions that = (ConnectorOptions) o;
- return Objects.equals(gdriveOptions, that.gdriveOptions)
+ return Objects.equals(confluenceOptions, that.confluenceOptions)
+ && Objects.equals(gdriveOptions, that.gdriveOptions)
&& Objects.equals(googleAdsOptions, that.googleAdsOptions)
+ && Objects.equals(jiraOptions, that.jiraOptions)
+ && Objects.equals(outlookOptions, that.outlookOptions)
&& Objects.equals(sharepointOptions, that.sharepointOptions)
+ && Objects.equals(smartsheetOptions, that.smartsheetOptions)
&& Objects.equals(tiktokAdsOptions, that.tiktokAdsOptions);
}
@Override
public int hashCode() {
- return Objects.hash(gdriveOptions, googleAdsOptions, sharepointOptions, tiktokAdsOptions);
+ return Objects.hash(
+ confluenceOptions,
+ gdriveOptions,
+ googleAdsOptions,
+ jiraOptions,
+ outlookOptions,
+ sharepointOptions,
+ smartsheetOptions,
+ tiktokAdsOptions);
}
@Override
public String toString() {
return new ToStringer(ConnectorOptions.class)
+ .add("confluenceOptions", confluenceOptions)
.add("gdriveOptions", gdriveOptions)
.add("googleAdsOptions", googleAdsOptions)
+ .add("jiraOptions", jiraOptions)
+ .add("outlookOptions", outlookOptions)
.add("sharepointOptions", sharepointOptions)
+ .add("smartsheetOptions", smartsheetOptions)
.add("tiktokAdsOptions", tiktokAdsOptions)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsConfig.java
new file mode 100755
index 000000000..0b3d3985f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsConfig.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GoogleAdsConfig {
+ /**
+ * (Required) Manager Account ID (also called MCC Account ID) used to list and access customer
+ * accounts under this manager account. This is required for fetching the list of customer
+ * accounts during source selection. If the same field is also set in the object-level
+ * GoogleAdsOptions (connector_options), the object-level value takes precedence over this
+ * top-level config.
+ */
+ @JsonProperty("manager_account_id")
+ private String managerAccountId;
+
+ public GoogleAdsConfig setManagerAccountId(String managerAccountId) {
+ this.managerAccountId = managerAccountId;
+ return this;
+ }
+
+ public String getManagerAccountId() {
+ return managerAccountId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GoogleAdsConfig that = (GoogleAdsConfig) o;
+ return Objects.equals(managerAccountId, that.managerAccountId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(managerAccountId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GoogleAdsConfig.class)
+ .add("managerAccountId", managerAccountId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
index 410c7c90e..5a615f8e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
@@ -7,6 +7,7 @@
@Generated
public enum IngestionSourceType {
BIGQUERY,
+ CONFLUENCE,
DYNAMICS365,
FOREIGN_CATALOG,
GA4_RAW_DATA,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/JiraConnectorOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/JiraConnectorOptions.java
new file mode 100755
index 000000000..13ee4ac69
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/JiraConnectorOptions.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Jira specific options for ingestion */
+@Generated
+public class JiraConnectorOptions {
+ /** (Optional) Projects to filter Jira data on */
+ @JsonProperty("include_jira_spaces")
+ private Collection includeJiraSpaces;
+
+ public JiraConnectorOptions setIncludeJiraSpaces(Collection includeJiraSpaces) {
+ this.includeJiraSpaces = includeJiraSpaces;
+ return this;
+ }
+
+ public Collection getIncludeJiraSpaces() {
+ return includeJiraSpaces;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ JiraConnectorOptions that = (JiraConnectorOptions) o;
+ return Objects.equals(includeJiraSpaces, that.includeJiraSpaces);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(includeJiraSpaces);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(JiraConnectorOptions.class)
+ .add("includeJiraSpaces", includeJiraSpaces)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookAttachmentMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookAttachmentMode.java
new file mode 100755
index 000000000..c26f9382c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookAttachmentMode.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+
+/** Attachment behavior mode for Outlook ingestion */
+@Generated
+public enum OutlookAttachmentMode {
+ ALL,
+ INLINE_ONLY,
+ NONE,
+ NON_INLINE_ONLY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookBodyFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookBodyFormat.java
new file mode 100755
index 000000000..c5956b47d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookBodyFormat.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+
+/** Body format for Outlook email content */
+@Generated
+public enum OutlookBodyFormat {
+ TEXT_HTML,
+ TEXT_PLAIN,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookOptions.java
new file mode 100755
index 000000000..24d79235e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OutlookOptions.java
@@ -0,0 +1,216 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Outlook specific options for ingestion */
+@Generated
+public class OutlookOptions {
+ /** (Optional) Controls which attachments to ingest. If not specified, defaults to ALL. */
+ @JsonProperty("attachment_mode")
+ private OutlookAttachmentMode attachmentMode;
+
+ /**
+ * (Optional) Defines how the body_content column is populated. TEXT_HTML: Preserves full
+ * formatting, links, and styling. TEXT_PLAIN: Converts body to plain text. Recommended for AI/RAG
+ * pipelines to reduce token usage and noise.
+ */
+ @JsonProperty("body_format")
+ private OutlookBodyFormat bodyFormat;
+
+ /** Deprecated. Use include_folders instead. */
+ @JsonProperty("folder_filter")
+ private Collection folderFilter;
+
+ /**
+ * (Optional) Filter mail folders to include in the sync. If not specified, all folders will be
+ * synced. Examples: Inbox, Sent Items, Custom_Folder Filter semantics: OR between different
+ * folders.
+ */
+ @JsonProperty("include_folders")
+ private Collection includeFolders;
+
+ /**
+ * (Optional) List of mailboxes to sync (e.g. mailbox email addresses or identifiers). If not
+ * specified, all accessible mailboxes are ingested. Filter semantics: OR between different
+ * mailboxes.
+ */
+ @JsonProperty("include_mailboxes")
+ private Collection includeMailboxes;
+
+ /**
+ * (Optional) Filter emails by sender address. Uses exact email match. Examples: user@vendor.com,
+ * alerts@system.io, noreply@company.com If not specified, emails from all senders will be synced.
+ * Filter semantics: OR between different senders.
+ */
+ @JsonProperty("include_senders")
+ private Collection includeSenders;
+
+ /**
+ * (Optional) Filter emails by subject line. Values ending with "*" use prefix match (subject
+ * starts with the part before "*"); otherwise substring match (subject contains the value).
+ * Examples: "Invoice" (substring), "Re:*" (prefix), "Support Ticket", "URGENT*" If not specified,
+ * emails with all subjects will be synced. Filter semantics: OR between different subjects.
+ */
+ @JsonProperty("include_subjects")
+ private Collection includeSubjects;
+
+ /** Deprecated. Use include_senders instead. */
+ @JsonProperty("sender_filter")
+ private Collection senderFilter;
+
+ /**
+ * (Optional) Start date for the initial sync in YYYY-MM-DD format. Format: YYYY-MM-DD (e.g.,
+ * 2024-01-01) This determines the earliest date from which to sync historical data. If not
+ * specified, complete history is ingested.
+ */
+ @JsonProperty("start_date")
+ private String startDate;
+
+ /** Deprecated. Use include_subjects instead. */
+ @JsonProperty("subject_filter")
+ private Collection subjectFilter;
+
+ public OutlookOptions setAttachmentMode(OutlookAttachmentMode attachmentMode) {
+ this.attachmentMode = attachmentMode;
+ return this;
+ }
+
+ public OutlookAttachmentMode getAttachmentMode() {
+ return attachmentMode;
+ }
+
+ public OutlookOptions setBodyFormat(OutlookBodyFormat bodyFormat) {
+ this.bodyFormat = bodyFormat;
+ return this;
+ }
+
+ public OutlookBodyFormat getBodyFormat() {
+ return bodyFormat;
+ }
+
+ public OutlookOptions setFolderFilter(Collection folderFilter) {
+ this.folderFilter = folderFilter;
+ return this;
+ }
+
+ public Collection getFolderFilter() {
+ return folderFilter;
+ }
+
+ public OutlookOptions setIncludeFolders(Collection includeFolders) {
+ this.includeFolders = includeFolders;
+ return this;
+ }
+
+ public Collection getIncludeFolders() {
+ return includeFolders;
+ }
+
+ public OutlookOptions setIncludeMailboxes(Collection includeMailboxes) {
+ this.includeMailboxes = includeMailboxes;
+ return this;
+ }
+
+ public Collection getIncludeMailboxes() {
+ return includeMailboxes;
+ }
+
+ public OutlookOptions setIncludeSenders(Collection includeSenders) {
+ this.includeSenders = includeSenders;
+ return this;
+ }
+
+ public Collection getIncludeSenders() {
+ return includeSenders;
+ }
+
+ public OutlookOptions setIncludeSubjects(Collection includeSubjects) {
+ this.includeSubjects = includeSubjects;
+ return this;
+ }
+
+ public Collection getIncludeSubjects() {
+ return includeSubjects;
+ }
+
+ public OutlookOptions setSenderFilter(Collection senderFilter) {
+ this.senderFilter = senderFilter;
+ return this;
+ }
+
+ public Collection getSenderFilter() {
+ return senderFilter;
+ }
+
+ public OutlookOptions setStartDate(String startDate) {
+ this.startDate = startDate;
+ return this;
+ }
+
+ public String getStartDate() {
+ return startDate;
+ }
+
+ public OutlookOptions setSubjectFilter(Collection subjectFilter) {
+ this.subjectFilter = subjectFilter;
+ return this;
+ }
+
+ public Collection getSubjectFilter() {
+ return subjectFilter;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ OutlookOptions that = (OutlookOptions) o;
+ return Objects.equals(attachmentMode, that.attachmentMode)
+ && Objects.equals(bodyFormat, that.bodyFormat)
+ && Objects.equals(folderFilter, that.folderFilter)
+ && Objects.equals(includeFolders, that.includeFolders)
+ && Objects.equals(includeMailboxes, that.includeMailboxes)
+ && Objects.equals(includeSenders, that.includeSenders)
+ && Objects.equals(includeSubjects, that.includeSubjects)
+ && Objects.equals(senderFilter, that.senderFilter)
+ && Objects.equals(startDate, that.startDate)
+ && Objects.equals(subjectFilter, that.subjectFilter);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ attachmentMode,
+ bodyFormat,
+ folderFilter,
+ includeFolders,
+ includeMailboxes,
+ includeSenders,
+ includeSubjects,
+ senderFilter,
+ startDate,
+ subjectFilter);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(OutlookOptions.class)
+ .add("attachmentMode", attachmentMode)
+ .add("bodyFormat", bodyFormat)
+ .add("folderFilter", folderFilter)
+ .add("includeFolders", includeFolders)
+ .add("includeMailboxes", includeMailboxes)
+ .add("includeSenders", includeSenders)
+ .add("includeSubjects", includeSubjects)
+ .add("senderFilter", senderFilter)
+ .add("startDate", startDate)
+ .add("subjectFilter", subjectFilter)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
index bff0d6640..fcd0623fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SmartsheetOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SmartsheetOptions.java
new file mode 100755
index 000000000..a565396bf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SmartsheetOptions.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Smartsheet specific options for ingestion */
+@Generated
+public class SmartsheetOptions {
+ /**
+ * (Optional) When true, maps each column to its Smartsheet-declared type (Text/Number/Date/
+ * Checkbox/etc.). Cells that do not conform to the declared type are set to NULL. When false, all
+ * columns land as STRING. Use false for sheets with irregular data or columns that frequently
+ * violate their own declared type. If not specified, defaults to true.
+ */
+ @JsonProperty("enforce_schema")
+ private Boolean enforceSchema;
+
+ public SmartsheetOptions setEnforceSchema(Boolean enforceSchema) {
+ this.enforceSchema = enforceSchema;
+ return this;
+ }
+
+ public Boolean getEnforceSchema() {
+ return enforceSchema;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SmartsheetOptions that = (SmartsheetOptions) o;
+ return Objects.equals(enforceSchema, that.enforceSchema);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(enforceSchema);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SmartsheetOptions.class).add("enforceSchema", enforceSchema).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java
index de63dc578..8eacbde36 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java
@@ -13,6 +13,10 @@ public class SourceConfig {
@JsonProperty("catalog")
private SourceCatalogConfig catalog;
+ /** */
+ @JsonProperty("google_ads_config")
+ private GoogleAdsConfig googleAdsConfig;
+
public SourceConfig setCatalog(SourceCatalogConfig catalog) {
this.catalog = catalog;
return this;
@@ -22,21 +26,34 @@ public SourceCatalogConfig getCatalog() {
return catalog;
}
+ public SourceConfig setGoogleAdsConfig(GoogleAdsConfig googleAdsConfig) {
+ this.googleAdsConfig = googleAdsConfig;
+ return this;
+ }
+
+ public GoogleAdsConfig getGoogleAdsConfig() {
+ return googleAdsConfig;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SourceConfig that = (SourceConfig) o;
- return Objects.equals(catalog, that.catalog);
+ return Objects.equals(catalog, that.catalog)
+ && Objects.equals(googleAdsConfig, that.googleAdsConfig);
}
@Override
public int hashCode() {
- return Objects.hash(catalog);
+ return Objects.hash(catalog, googleAdsConfig);
}
@Override
public String toString() {
- return new ToStringer(SourceConfig.class).add("catalog", catalog).toString();
+ return new ToStringer(SourceConfig.class)
+ .add("catalog", catalog)
+ .add("googleAdsConfig", googleAdsConfig)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java
index 73a875e84..f571ba0ee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java
@@ -11,7 +11,10 @@
@Generated
public class BranchSpec {
- /** Absolute expiration timestamp. When set, the branch will expire at this time. */
+ /**
+ * Absolute expiration timestamp. When set, the branch will expire at this time. Mutually
+ * exclusive with `ttl` and `no_expiry`. When updating, use `spec.expiration` in the update_mask.
+ */
@JsonProperty("expire_time")
private Timestamp expireTime;
@@ -24,7 +27,8 @@ public class BranchSpec {
/**
* Explicitly disable expiration. When set to true, the branch will not expire. If set to false,
- * the request is invalid; provide either ttl or expire_time instead.
+ * the request is invalid; provide either ttl or expire_time instead. Mutually exclusive with
+ * `expire_time` and `ttl`. When updating, use `spec.expiration` in the update_mask.
*/
@JsonProperty("no_expiry")
private Boolean noExpiry;
@@ -45,7 +49,11 @@ public class BranchSpec {
@JsonProperty("source_branch_time")
private Timestamp sourceBranchTime;
- /** Relative time-to-live duration. When set, the branch will expire at creation_time + ttl. */
+ /**
+ * Relative time-to-live duration. When set, the branch will expire at creation_time + ttl.
+ * Mutually exclusive with `expire_time` and `no_expiry`. When updating, use `spec.expiration` in
+ * the update_mask.
+ */
@JsonProperty("ttl")
private Duration ttl;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java
index e29208573..bce38ec3f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createBranch operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java
index b66b8771d..db6ae00a8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java
@@ -28,6 +28,11 @@ public class CreateBranchRequest {
/** The Project where this Branch will be created. Format: projects/{project_id} */
@JsonIgnore private String parent;
+ /** If true, update the branch if it already exists instead of returning an error. */
+ @JsonIgnore
+ @QueryParam("replace_existing")
+ private Boolean replaceExisting;
+
public CreateBranchRequest setBranch(Branch branch) {
this.branch = branch;
return this;
@@ -55,6 +60,15 @@ public String getParent() {
return parent;
}
+ public CreateBranchRequest setReplaceExisting(Boolean replaceExisting) {
+ this.replaceExisting = replaceExisting;
+ return this;
+ }
+
+ public Boolean getReplaceExisting() {
+ return replaceExisting;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -62,12 +76,13 @@ public boolean equals(Object o) {
CreateBranchRequest that = (CreateBranchRequest) o;
return Objects.equals(branch, that.branch)
&& Objects.equals(branchId, that.branchId)
- && Objects.equals(parent, that.parent);
+ && Objects.equals(parent, that.parent)
+ && Objects.equals(replaceExisting, that.replaceExisting);
}
@Override
public int hashCode() {
- return Objects.hash(branch, branchId, parent);
+ return Objects.hash(branch, branchId, parent, replaceExisting);
}
@Override
@@ -76,6 +91,7 @@ public String toString() {
.add("branch", branch)
.add("branchId", branchId)
.add("parent", parent)
+ .add("replaceExisting", replaceExisting)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateCatalogOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateCatalogOperation.java
index ddbe2780d..f1dc2916a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateCatalogOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateCatalogOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createCatalog operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateDatabaseOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateDatabaseOperation.java
index 47292df1d..90d5ea957 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateDatabaseOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateDatabaseOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createDatabase operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java
index 977aa855f..68ca431c8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createEndpoint operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java
index 83de8a724..93fd4f3c6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java
@@ -31,6 +31,11 @@ public class CreateEndpointRequest {
*/
@JsonIgnore private String parent;
+ /** If true, update the endpoint if it already exists instead of returning an error. */
+ @JsonIgnore
+ @QueryParam("replace_existing")
+ private Boolean replaceExisting;
+
public CreateEndpointRequest setEndpoint(Endpoint endpoint) {
this.endpoint = endpoint;
return this;
@@ -58,6 +63,15 @@ public String getParent() {
return parent;
}
+ public CreateEndpointRequest setReplaceExisting(Boolean replaceExisting) {
+ this.replaceExisting = replaceExisting;
+ return this;
+ }
+
+ public Boolean getReplaceExisting() {
+ return replaceExisting;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -65,12 +79,13 @@ public boolean equals(Object o) {
CreateEndpointRequest that = (CreateEndpointRequest) o;
return Objects.equals(endpoint, that.endpoint)
&& Objects.equals(endpointId, that.endpointId)
- && Objects.equals(parent, that.parent);
+ && Objects.equals(parent, that.parent)
+ && Objects.equals(replaceExisting, that.replaceExisting);
}
@Override
public int hashCode() {
- return Objects.hash(endpoint, endpointId, parent);
+ return Objects.hash(endpoint, endpointId, parent, replaceExisting);
}
@Override
@@ -79,6 +94,7 @@ public String toString() {
.add("endpoint", endpoint)
.add("endpointId", endpointId)
.add("parent", parent)
+ .add("replaceExisting", replaceExisting)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java
index 9292706fe..e457b3ca9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createProject operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleOperation.java
index fb229fe50..a89bcfc1e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createRole operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateSyncedTableOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateSyncedTableOperation.java
index 657c538f0..d8b1eec17 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateSyncedTableOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateSyncedTableOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createSyncedTable operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchOperation.java
index 070558e24..9f6dd967c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteBranch operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteCatalogOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteCatalogOperation.java
index 460b87d67..78a5e07ed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteCatalogOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteCatalogOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteCatalog operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteDatabaseOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteDatabaseOperation.java
index 52806dd52..3ce4faa85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteDatabaseOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteDatabaseOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteDatabase operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointOperation.java
index dc868d52a..66c5aeb57 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteEndpoint operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectOperation.java
index 289d193fa..85a5bae25 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteProject operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java
index 524d9eb87..071cf43bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
@@ -12,6 +13,14 @@ public class DeleteProjectRequest {
/** The full resource path of the project to delete. Format: projects/{project_id} */
@JsonIgnore private String name;
+ /**
+ * If true, permanently deletes the project (hard delete). If false or unset, performs a soft
+ * delete.
+ */
+ @JsonIgnore
+ @QueryParam("purge")
+ private Boolean purge;
+
public DeleteProjectRequest setName(String name) {
this.name = name;
return this;
@@ -21,21 +30,33 @@ public String getName() {
return name;
}
+ public DeleteProjectRequest setPurge(Boolean purge) {
+ this.purge = purge;
+ return this;
+ }
+
+ public Boolean getPurge() {
+ return purge;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteProjectRequest that = (DeleteProjectRequest) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(name, that.name) && Objects.equals(purge, that.purge);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(name, purge);
}
@Override
public String toString() {
- return new ToStringer(DeleteProjectRequest.class).add("name", name).toString();
+ return new ToStringer(DeleteProjectRequest.class)
+ .add("name", name)
+ .add("purge", purge)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java
index 368527abd..0da203990 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteRole operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteSyncedTableOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteSyncedTableOperation.java
index 7d0bf28d0..f078311d2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteSyncedTableOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteSyncedTableOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteSyncedTable operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java
index cdc271030..259f344bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java
@@ -10,7 +10,10 @@
@Generated
public class EndpointSpec {
- /** The maximum number of Compute Units. Minimum value is 0.5. */
+ /**
+ * The maximum number of Compute Units. The maximum value is 64. The difference between the
+ * minimum and maximum Compute Units (max - min) must not exceed 16.
+ */
@JsonProperty("autoscaling_limit_max_cu")
private Double autoscalingLimitMaxCu;
@@ -40,7 +43,8 @@ public class EndpointSpec {
/**
* When set to true, explicitly disables automatic suspension (never suspend). Should be set to
- * true when provided.
+ * true when provided. Mutually exclusive with `suspend_timeout_duration`. When updating, use
+ * `spec.suspension` in the update_mask.
*/
@JsonProperty("no_suspension")
private Boolean noSuspension;
@@ -51,7 +55,8 @@ public class EndpointSpec {
/**
* Duration of inactivity after which the compute endpoint is automatically suspended. If
- * specified should be between 60s and 604800s (1 minute to 1 week).
+ * specified should be between 60s and 604800s (1 minute to 1 week). Mutually exclusive with
+ * `no_suspension`. When updating, use `spec.suspension` in the update_mask.
*/
@JsonProperty("suspend_timeout_duration")
private Duration suspendTimeoutDuration;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
index 34743c1e2..ea7cea139 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
@@ -10,7 +10,10 @@
@Generated
public class EndpointStatus {
- /** The maximum number of Compute Units. */
+ /**
+ * The maximum number of Compute Units. The maximum value is 64. The difference between the
+ * minimum and maximum Compute Units (max - min) must not exceed 16.
+ */
@JsonProperty("autoscaling_limit_max_cu")
private Double autoscalingLimitMaxCu;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetSyncedTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetSyncedTableRequest.java
index 7bbefa4a3..1f2611e9d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetSyncedTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetSyncedTableRequest.java
@@ -10,8 +10,8 @@
@Generated
public class GetSyncedTableRequest {
/**
- * Format: "synced_tables/{catalog}.{schema}.{table}", where (catalog, schema, table) are the
- * entity names in the Unity Catalog.
+ * The Full resource name of the synced table. Format: "synced_tables/{catalog}.{schema}.{table}",
+ * where (catalog, schema, table) are the entity names in the Unity Catalog.
*/
@JsonIgnore private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/InitialEndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/InitialEndpointSpec.java
index f2196ad2a..e33fa9e5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/InitialEndpointSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/InitialEndpointSpec.java
@@ -7,9 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Configuration for the initial Read/Write endpoint created during project creation. */
@Generated
public class InitialEndpointSpec {
- /** Settings for HA configuration of the endpoint */
+ /** Settings for HA configuration of the endpoint. */
@JsonProperty("group")
private EndpointGroupSpec group;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java
index 7d9296f7e..5639ef257 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java
@@ -20,6 +20,15 @@ public class ListProjectsRequest {
@QueryParam("page_token")
private String pageToken;
+ /**
+ * Whether to include soft-deleted projects in the response. When true, soft-deleted projects are
+ * included alongside active projects. Hard-deleted and already-purged projects are never
+ * returned.
+ */
+ @JsonIgnore
+ @QueryParam("show_deleted")
+ private Boolean showDeleted;
+
public ListProjectsRequest setPageSize(Long pageSize) {
this.pageSize = pageSize;
return this;
@@ -38,17 +47,28 @@ public String getPageToken() {
return pageToken;
}
+ public ListProjectsRequest setShowDeleted(Boolean showDeleted) {
+ this.showDeleted = showDeleted;
+ return this;
+ }
+
+ public Boolean getShowDeleted() {
+ return showDeleted;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListProjectsRequest that = (ListProjectsRequest) o;
- return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(showDeleted, that.showDeleted);
}
@Override
public int hashCode() {
- return Objects.hash(pageSize, pageToken);
+ return Objects.hash(pageSize, pageToken, showDeleted);
}
@Override
@@ -56,6 +76,7 @@ public String toString() {
return new ToStringer(ListProjectsRequest.class)
.add("pageSize", pageSize)
.add("pageToken", pageToken)
+ .add("showDeleted", showDeleted)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java
index 4733a881c..abe5c04ec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Use the Postgres API to create and manage Lakebase Autoscaling Postgres infrastructure, including
@@ -333,6 +333,12 @@ public Iterable listRoles(ListRolesRequest request) {
});
}
+ /** Undeletes a soft-deleted project. */
+ public UndeleteProjectOperation undeleteProject(UndeleteProjectRequest request) {
+ Operation operation = impl.undeleteProject(request);
+ return new UndeleteProjectOperation(impl, operation);
+ }
+
/**
* Updates the specified database branch. You can set this branch as the project's default branch,
* or protect/unprotect it.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java
index 26e5230d5..b6981a62d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java
@@ -500,6 +500,24 @@ public ListRolesResponse listRoles(ListRolesRequest request) {
}
}
+ @Override
+ public Operation undeleteProject(UndeleteProjectRequest request) {
+ String path = String.format("/api/2.0/postgres/%s/undelete", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Operation.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public Operation updateBranch(UpdateBranchRequest request) {
String path = String.format("/api/2.0/postgres/%s", request.getName());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java
index 7c3889ab1..4c767cb90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java
@@ -124,6 +124,9 @@ DatabaseCredential generateDatabaseCredential(
/** Returns a paginated list of Postgres roles in the branch. */
ListRolesResponse listRoles(ListRolesRequest listRolesRequest);
+ /** Undeletes a soft-deleted project. */
+ Operation undeleteProject(UndeleteProjectRequest undeleteProjectRequest);
+
/**
* Updates the specified database branch. You can set this branch as the project's default branch,
* or protect/unprotect it.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java
index 2f9137f8d..0ccd3a0e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java
@@ -15,7 +15,14 @@ public class Project {
private Timestamp createTime;
/**
- * Configuration settings for the initial Read/Write endpoint created inside the default branch
+ * A timestamp indicating when the project was soft-deleted. Empty if the project is not deleted,
+ * otherwise set to a timestamp in the past.
+ */
+ @JsonProperty("delete_time")
+ private Timestamp deleteTime;
+
+ /**
+ * Configuration settings for the initial Read/Write endpoint created inside the initial branch
* for a newly created project. If omitted, the initial endpoint created will have default
* settings, without high availability configured. This field does not apply to any endpoints
* created after project creation. Use spec.default_endpoint_settings to configure default
@@ -28,6 +35,13 @@ public class Project {
@JsonProperty("name")
private String name;
+ /**
+ * A timestamp indicating when the project is scheduled for permanent deletion. Empty if the
+ * project is not deleted, otherwise set to a timestamp in the future.
+ */
+ @JsonProperty("purge_time")
+ private Timestamp purgeTime;
+
/**
* The spec contains the project configuration, including display_name, pg_version (Postgres
* version), history_retention_duration, and default_endpoint_settings.
@@ -56,6 +70,15 @@ public Timestamp getCreateTime() {
return createTime;
}
+ public Project setDeleteTime(Timestamp deleteTime) {
+ this.deleteTime = deleteTime;
+ return this;
+ }
+
+ public Timestamp getDeleteTime() {
+ return deleteTime;
+ }
+
public Project setInitialEndpointSpec(InitialEndpointSpec initialEndpointSpec) {
this.initialEndpointSpec = initialEndpointSpec;
return this;
@@ -74,6 +97,15 @@ public String getName() {
return name;
}
+ public Project setPurgeTime(Timestamp purgeTime) {
+ this.purgeTime = purgeTime;
+ return this;
+ }
+
+ public Timestamp getPurgeTime() {
+ return purgeTime;
+ }
+
public Project setSpec(ProjectSpec spec) {
this.spec = spec;
return this;
@@ -116,8 +148,10 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Project that = (Project) o;
return Objects.equals(createTime, that.createTime)
+ && Objects.equals(deleteTime, that.deleteTime)
&& Objects.equals(initialEndpointSpec, that.initialEndpointSpec)
&& Objects.equals(name, that.name)
+ && Objects.equals(purgeTime, that.purgeTime)
&& Objects.equals(spec, that.spec)
&& Objects.equals(status, that.status)
&& Objects.equals(uid, that.uid)
@@ -126,15 +160,26 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(createTime, initialEndpointSpec, name, spec, status, uid, updateTime);
+ return Objects.hash(
+ createTime,
+ deleteTime,
+ initialEndpointSpec,
+ name,
+ purgeTime,
+ spec,
+ status,
+ uid,
+ updateTime);
}
@Override
public String toString() {
return new ToStringer(Project.class)
.add("createTime", createTime)
+ .add("deleteTime", deleteTime)
.add("initialEndpointSpec", initialEndpointSpec)
.add("name", name)
+ .add("purgeTime", purgeTime)
.add("spec", spec)
.add("status", status)
.add("uid", uid)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java
index 3718efe3b..d7d1f5f0d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java
@@ -22,7 +22,8 @@ public class ProjectDefaultEndpointSettings {
/**
* When set to true, explicitly disables automatic suspension (never suspend). Should be set to
- * true when provided.
+ * true when provided. Mutually exclusive with `suspend_timeout_duration`. When updating, use
+ * `spec.project_default_settings.suspension` in the update_mask.
*/
@JsonProperty("no_suspension")
private Boolean noSuspension;
@@ -33,7 +34,9 @@ public class ProjectDefaultEndpointSettings {
/**
* Duration of inactivity after which the compute endpoint is automatically suspended. If
- * specified should be between 60s and 604800s (1 minute to 1 week).
+ * specified should be between 60s and 604800s (1 minute to 1 week). Mutually exclusive with
+ * `no_suspension`. When updating, use `spec.project_default_settings.suspension` in the
+ * update_mask.
*/
@JsonProperty("suspend_timeout_duration")
private Duration suspendTimeoutDuration;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java
index dbda32f08..daa2de4cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java
@@ -51,7 +51,7 @@ public class ProjectSpec {
/**
* The number of seconds to retain the shared history for point in time recovery for all branches
- * in this project. Value should be between 172800s (2 days) and 2592000s (30 days).
+ * in this project. Value should be between 172800s (2 days) and 3024000s (35 days).
*/
@JsonProperty("history_retention_duration")
private Duration historyRetentionDuration;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProvisioningPhase.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProvisioningPhase.java
index 520039dac..c0d946a69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProvisioningPhase.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProvisioningPhase.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Copied from database_table_statuses.proto to decouple SDK packages. */
+/** The current phase of the data synchronization pipeline. */
@Generated
public enum ProvisioningPhase {
PROVISIONING_PHASE_INDEX_SCAN,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RequestedResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RequestedResource.java
index ffc6ce7e2..a4c6696bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RequestedResource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RequestedResource.java
@@ -9,7 +9,7 @@
@Generated
public class RequestedResource {
- /** */
+ /** The full Unity Catalog table name. */
@JsonProperty("table_name")
private String tableName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java
index e8af2c9da..983155859 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java
@@ -15,7 +15,18 @@ public class RoleRoleSpec {
private RoleAttributes attributes;
/**
- * If auth_method is left unspecified, a meaningful authentication method is derived from the
+ * Controls how the Postgres role authenticates when a client opens a database connection.
+ * Supported values:
+ *
+ * * LAKEBASE_OAUTH_V1: the role authenticates by presenting a Databricks OAuth access token
+ * derived from the backing managed identity (the Databricks user, service principal, or group
+ * named by the role's `postgres_role`). No static password exists for roles using this method. *
+ * PG_PASSWORD_SCRAM_SHA_256: the role authenticates with a Postgres password verified server-side
+ * using the SCRAM-SHA-256 mechanism. Lakebase generates a password for the role. * NO_LOGIN: the
+ * role cannot open a Postgres session at all. Useful for roles that exist only to own objects or
+ * to aggregate privileges that are then granted to other, loginable roles.
+ *
+ *
If auth_method is left unspecified, a meaningful authentication method is derived from the
* identity_type: * For the managed identities, OAUTH is used. * For the regular postgres roles,
* authentication based on postgres passwords is used.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableState.java
index d90b09b4d..6aeb49ae6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableState.java
@@ -4,9 +4,7 @@
import com.databricks.sdk.support.Generated;
-/**
- * The state of a synced table. Copied from database_table_statuses.proto to decouple SDK packages.
- */
+/** The state of a synced table. */
@Generated
public enum SyncedTableState {
SYNCED_TABLE_OFFLINE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectOperation.java
new file mode 100755
index 000000000..c3f24cfae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectOperation.java
@@ -0,0 +1,161 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.postgres;
+
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
+import com.databricks.sdk.core.utils.SerDeUtils;
+import com.databricks.sdk.service.common.lro.LroOptions;
+import com.databricks.sdk.support.Generated;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.time.Duration;
+import java.util.Optional;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Wrapper for interacting with a long-running undeleteProject operation. Provides methods to wait
+ * for completion, check status, cancel, and access metadata.
+ */
+@Generated
+public class UndeleteProjectOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(UndeleteProjectOperation.class);
+
+ private final PostgresService impl;
+ private Operation operation;
+ private final ObjectMapper objectMapper;
+
+ public UndeleteProjectOperation(PostgresService impl, Operation operation) {
+ this.impl = impl;
+ this.operation = operation;
+ this.objectMapper = SerDeUtils.createMapper();
+ }
+
+ /**
+ * Wait for the operation to complete and return the resulting . Waits indefinitely if no timeout
+ * is specified.
+ *
+ * @return the created
+ * @throws TimeoutException if the operation doesn't complete within the timeout
+ * @throws DatabricksException if the operation fails
+ */
+ public void waitForCompletion() throws TimeoutException {
+ waitForCompletion(Optional.empty());
+ }
+
+ /**
+ * Wait for the operation to complete and return the resulting .
+ *
+ * @param options the options for configuring the wait behavior, can be empty for defaults
+ * @return the created
+ * @throws TimeoutException if the operation doesn't complete within the timeout
+ * @throws DatabricksException if the operation fails
+ */
+ public void waitForCompletion(Optional options) throws TimeoutException {
+ Optional timeout = options.flatMap(LroOptions::getTimeout);
+ long deadline =
+ timeout.isPresent()
+ ? System.currentTimeMillis() + timeout.get().toMillis()
+ : Long.MAX_VALUE;
+ String statusMessage = "polling operation...";
+ int attempt = 1;
+
+ while (System.currentTimeMillis() < deadline) {
+ // Refresh the operation state
+ refreshOperation();
+
+ if (operation.getDone() != null && operation.getDone()) {
+ // Operation completed, check for success or failure
+ if (operation.getError() != null) {
+ String errorMsg = "unknown error";
+ if (operation.getError().getMessage() != null
+ && !operation.getError().getMessage().isEmpty()) {
+ errorMsg = operation.getError().getMessage();
+ }
+
+ if (operation.getError().getErrorCode() != null) {
+ errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg);
+ }
+
+ throw new DatabricksException("Operation failed: " + errorMsg);
+ }
+
+ // Operation completed successfully, unmarshal response
+ if (operation.getResponse() == null) {
+ throw new DatabricksException("Operation completed but no response available");
+ }
+
+ try {
+ JsonNode responseJson = objectMapper.valueToTree(operation.getResponse());
+ objectMapper.treeToValue(responseJson, Void.class);
+ } catch (JsonProcessingException e) {
+ throw new DatabricksException("Failed to unmarshal response: " + e.getMessage(), e);
+ }
+ }
+
+ // Operation still in progress, wait before polling again
+ String prefix = String.format("operation=%s", operation.getName());
+ int sleep = Math.min(attempt, 10); // sleep 10s max per attempt
+ LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep);
+
+ try {
+ Thread.sleep((long) (sleep * 1000L + Math.random() * 1000));
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new DatabricksException("Current thread was interrupted", e);
+ }
+ attempt++;
+ }
+
+ String timeoutMessage =
+ timeout.isPresent()
+ ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage)
+ : String.format("Operation timed out: %s", statusMessage);
+ throw new TimeoutException(timeoutMessage);
+ }
+
+ /**
+ * Get the operation name.
+ *
+ * @return the operation name
+ */
+ public String getName() {
+ return operation.getName();
+ }
+
+ /**
+ * Get the operation metadata.
+ *
+ * @return the operation metadata, or null if not available
+ * @throws DatabricksException if the metadata cannot be deserialized
+ */
+ public ProjectOperationMetadata getMetadata() {
+ if (operation.getMetadata() == null) {
+ return null;
+ }
+
+ try {
+ JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata());
+ return objectMapper.treeToValue(metadataJson, ProjectOperationMetadata.class);
+ } catch (JsonProcessingException e) {
+ throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e);
+ }
+ }
+
+ /**
+ * Check if the operation is done. This method refreshes the operation state before checking.
+ *
+ * @return true if the operation is complete, false otherwise
+ * @throws DatabricksException if the status check fails
+ */
+ public boolean isDone() {
+ refreshOperation();
+ return operation.getDone() != null && operation.getDone();
+ }
+
+ /** Refresh the operation state by polling the server. */
+ private void refreshOperation() {
+ operation = impl.getOperation(new GetOperationRequest().setName(operation.getName()));
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectRequest.java
new file mode 100755
index 000000000..533f01b47
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UndeleteProjectRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.postgres;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Request to restore a soft-deleted project within its retention period. */
+@Generated
+public class UndeleteProjectRequest {
+ /** The full resource path of the project to undelete. Format: projects/{project_id} */
+ @JsonIgnore private String name;
+
+ public UndeleteProjectRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UndeleteProjectRequest that = (UndeleteProjectRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UndeleteProjectRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java
index 0b6a8a788..f01fcf534 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateBranch operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateDatabaseOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateDatabaseOperation.java
index d228c88bd..9fbf891ac 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateDatabaseOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateDatabaseOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateDatabase operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java
index 2f4882f31..a016351b3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateEndpoint operation. Provides methods to wait
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java
index f9742e5af..fb4b0bd5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateProject operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateRoleOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateRoleOperation.java
index 57f9b2d25..6a73616e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateRoleOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateRoleOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.postgres;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running updateRole operation. Provides methods to wait for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java
index 035ea5659..0b6e31ec6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage credential configurations for this workspace. Databricks needs access to a
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
index a1fa6e14c..b6155841c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage encryption key configurations for this workspace (optional). A key
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java
index 628802833..3a7080ac9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java
index 644d702be..d60ac2a61 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** These APIs manage private access settings for this account. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java
index b8a3cb710..a13f177a5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage storage configurations for this workspace. A root storage S3 bucket in your
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java
index e9568104b..c16d515c4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.provisioning;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** These APIs manage VPC endpoint configurations for this account. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java
index 479e91661..e17d86d39 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java
@@ -3,14 +3,14 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Wait;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage workspaces for this account. A Databricks workspace is an environment for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java
index 2fc90b8f1..29f476ab8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.qualitymonitorv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Deprecated: Please use the Data Quality Monitoring API instead (REST:
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java
index e34add98e..345426d1f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java
@@ -7,6 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * Deprecated: legacy inference table configuration. Please use AI Gateway inference tables instead.
+ * See https://docs.databricks.com/aws/en/ai-gateway/inference-tables.
+ */
@Generated
public class AutoCaptureConfigInput {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
index cb1665074..4d6d370c3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
@@ -7,6 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * Deprecated: legacy inference table configuration. Please use AI Gateway inference tables instead.
+ * See https://docs.databricks.com/aws/en/ai-gateway/inference-tables.
+ */
@Generated
public class AutoCaptureConfigOutput {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
index 5e0034ef9..b96c89905 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
@@ -12,10 +12,9 @@
@Generated
public class EndpointCoreConfigInput {
/**
- * Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
- * updating existing provisioned throughput endpoints that never have inference table configured;
- * in these cases please use AI Gateway to manage inference tables.
+ * Configuration for legacy Inference Tables which automatically log requests and responses to
+ * Unity Catalog. Deprecated: please use AI Gateway inference tables instead. See
+ * https://docs.databricks.com/aws/en/ai-gateway/inference-tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigInput autoCaptureConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
index 253eaba34..2471d25a0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
@@ -11,10 +11,9 @@
@Generated
public class EndpointCoreConfigOutput {
/**
- * Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
- * updating existing provisioned throughput endpoints that never have inference table configured;
- * in these cases please use AI Gateway to manage inference tables.
+ * Configuration for legacy Inference Tables which automatically log requests and responses to
+ * Unity Catalog. Deprecated: please use AI Gateway inference tables instead. See
+ * https://docs.databricks.com/aws/en/ai-gateway/inference-tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigOutput autoCaptureConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
index b25e58be5..55924321e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
@@ -11,10 +11,9 @@
@Generated
public class EndpointPendingConfig {
/**
- * Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
- * updating existing provisioned throughput endpoints that never have inference table configured;
- * in these cases please use AI Gateway to manage inference tables.
+ * Configuration for legacy Inference Tables which automatically log requests and responses to
+ * Unity Catalog. Deprecated: please use AI Gateway inference tables instead. See
+ * https://docs.databricks.com/aws/en/ai-gateway/inference-tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigOutput autoCaptureConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
index 509c6bbf1..a57e9b817 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java
index 922fc5622..7f05fb715 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java
@@ -3,9 +3,9 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksConfig;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java
index dfdac9208..089ee1fbe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Accounts IP Access List API enables account admins to configure IP access lists for access to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
index b343a30ed..189debea3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Accounts Settings API allows users to manage settings at the account level. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
index c411e179d..46e3d2d66 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
index db2c36a2f..f81d69cf6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java
index 3a0c8e4e3..4faba7703 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether automatic cluster update is enabled for the current workspace. By default, it is
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java
index 53a333c2b..0dd69fa62 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java
@@ -8,14 +8,17 @@
import java.util.Collection;
import java.util.Objects;
-/** SHIELD feature: CSP */
+/**
+ * SHIELD feature: CSP Compliance Security Profile (CSP) enables enhanced compliance controls on the
+ * workspace.
+ */
@Generated
public class ComplianceSecurityProfile {
- /** Set by customers when they request Compliance Security Profile (CSP) */
+ /** Compliance standards selected by the customer for this Compliance Security Profile. */
@JsonProperty("compliance_standards")
private Collection complianceStandards;
- /** */
+ /** Whether Compliance Security Profile (CSP) is enabled on the workspace. */
@JsonProperty("is_enabled")
private Boolean isEnabled;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileAPI.java
index 4ccc44313..9e6b1047a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether to enable the compliance security profile for the current workspace. Enabling it
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerAPI.java
index 7298744dc..1a501cd99 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Credentials manager interacts with with Identity Providers to to perform token exchanges using
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java
index f9efde9a4..953e5ea65 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The compliance security profile settings at the account level control whether to enable it for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java
index 611506df7..627aaf779 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java
index 2da78074b..8de527697 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The default namespace setting API allows users to configure the default namespace for a
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java
index 100bdec20..92fbcb3ff 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java
index 0d3a6b628..a93417e40 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* 'Disabling legacy access' has the following impacts:
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java
index 203d743b2..5fa81a9d9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Disabling legacy DBFS has the following implications:
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java
index 951676b1e..2f1dbd569 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Disable legacy features for new Databricks workspaces.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java
index 48756f92b..b0b7c6653 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether users can export notebooks and files from the Workspace UI. By default, this
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java
index 4c8ef476c..57d322680 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls the enforcement of IP access lists for accessing the account console. Allowing you to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java
index c6edb1c13..db106afe5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether users can copy tabular data to the clipboard via the UI. By default, this
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java
index 770b08cc9..6de51c3c1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Controls whether users can download notebook results. By default, this setting is enabled. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java
index 316b5d6c2..c2e29aaba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java
@@ -7,10 +7,13 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** SHIELD feature: ESM */
+/**
+ * SHIELD feature: ESM Enhanced Security Monitoring (ESM) enables additional security monitoring on
+ * the workspace.
+ */
@Generated
public class EnhancedSecurityMonitoring {
- /** */
+ /** Whether Enhanced Security Monitoring (ESM) is enabled on the workspace. */
@JsonProperty("is_enabled")
private Boolean isEnabled;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringAPI.java
index 966d6967e..ea11ac579 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether enhanced security monitoring is enabled for the current workspace. If the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java
index c9a9785ec..c7efe0439 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The enhanced security monitoring setting at the account level controls whether to enable the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java
index 1c1f97e16..c012a0328 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* IP Access List enables admins to configure IP access lists.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java
index 4bfa46b7f..efdaa330d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Determines if partner powered models are enabled or not for a specific account */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java
index f74066558..7e6d2449b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Determines if the account-level partner-powered setting value is enforced upon the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java
index 80361d434..48b038a27 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Determines if partner powered models are enabled or not for a specific workspace */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
index 502cbc59f..d8c648128 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs provide configurations for the network connectivity of your workspaces for serverless
@@ -93,7 +93,7 @@ public NccPrivateEndpointRule deletePrivateEndpointRule(
/**
* Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the
* private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and
- * will be deleted after seven days of deactivation. When a private endpoint is deactivated, the
+ * will be deleted after one day of deactivation. When a private endpoint is deactivated, the
* `deactivated` field is set to `true` and the private endpoint is not available to your
* serverless compute resources.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
index 802d79ae4..30e491376 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
@@ -59,7 +59,7 @@ void deleteNetworkConnectivityConfiguration(
/**
* Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the
* private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and
- * will be deleted after seven days of deactivation. When a private endpoint is deactivated, the
+ * will be deleted after one day of deactivation. When a private endpoint is deactivated, the
* `deactivated` field is set to `true` and the private endpoint is not available to your
* serverless compute resources.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java
index 76dd21057..c733b9761 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs manage network policies for this account. Network policies control which network
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsAPI.java
index 01145fa9e..a19d242cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The notification destinations API lets you programmatically manage a workspace's notification
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java
index 38ed548bc..95e3b34d4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Personal Compute enablement setting lets you control which users can use the Personal Compute
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java
index bb5d209e3..b07a3d841 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index 6e7933a7c..c6eecaa82 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Workspace Settings API allows users to manage settings at the workspace level. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadAPI.java
index 4bce27824..3dbab3a82 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Controls whether users within the workspace are allowed to download results from the SQL Editor
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
index 6f756c514..d1ccc37aa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Enables administrators to get all tokens and delete tokens for other users. Admins can either get
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
index d15d8820b..0cec5fee9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Token API allows you to create, list, and revoke tokens that can be used to authenticate and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java
index 67b5e9534..9e570afeb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** This API allows updating known workspace settings for advanced users. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java
index 20f1db212..8a10e0757 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.settings;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These APIs allow configuration of network settings for Databricks workspaces by selecting which
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
index 32010ad91..75a78f4dd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settingsv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** APIs to manage account level settings */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
index 33ef3dfb5..511c84287 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.settingsv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** APIs to manage workspace level settings */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java
index 919108102..cc1972490 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sharing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A data provider is an object representing the organization in the real world who shares the data.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationAPI.java
index 23d6470c3..a98401235 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sharing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Recipient Activation API is only applicable in the open sharing model where the recipient
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
index 9f93d1a2d..0f588f8fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sharing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Recipient Federation Policies APIs are only applicable in the open sharing model where the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
index 07fe0f094..461bcbce9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sharing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A recipient is an object you create using :method:recipients/create to represent an organization
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java
index c19d3c7e8..d28c6c73d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sharing;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A share is a container instantiated with :method:shares/create. Once created you can iteratively
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsAPI.java
index 18fbf10d8..c9c2383d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyAPI.java
index 8011c2583..294ecd0c0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
index 8a0c9fdbe..512e3bc41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** New version of SQL Alerts */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsAPI.java
index 2e34723b2..511b4c5b6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This is an evolving API that facilitates the addition and removal of widgets from existing
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsAPI.java
index 8d449a27c..379e235eb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* In general, there is little need to modify dashboards using the API. However, it can be useful to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesAPI.java
index c30a8670b..a397532ee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This API is provided to assist you in making new query objects. When creating a query object, you
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsAPI.java
index e6c1ad93c..844f03b9e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java
index 30b569fe0..ddecdebb5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyAPI.java
index 02900bf13..1f457a1e3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* These endpoints are used for CRUD operations on query definitions. Query definitions include the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryAPI.java
index c66cbde20..a69df4b35 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A service responsible for storing and retrieving the list of queries run against SQL endpoints
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsAPI.java
index f99e89a7e..5149f0253 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This is an evolving API that facilitates the addition and removal of visualizations from existing
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyAPI.java
index 08f278bb6..c0bdbaa7d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This is an evolving API that facilitates the addition and removal of vizualisations from existing
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java
index 7a27bd439..86a380114 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Redash V2 service for workspace configurations (internal) */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
index c47eed4aa..64b6cc588 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
index e642391b8..84ade8bd8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A SQL warehouse is a compute resource that lets you run SQL commands on data objects within
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java
index f5a9dc48e..57d673c0c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.supervisoragents;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Manage Supervisor Agents and related resources. */
@Generated
@@ -31,7 +31,8 @@ public SupervisorAgent createSupervisorAgent(CreateSupervisorAgentRequest reques
/**
* Creates a Tool under a Supervisor Agent. Specify one of "genie_space", "knowledge_assistant",
- * "uc_function", "connection", "app", "volume", "lakeview_dashboard" in the request body.
+ * "uc_function", "uc_connection", "app", "volume", "lakeview_dashboard", "uc_table",
+ * "vector_search_index" in the request body.
*/
public Tool createTool(CreateToolRequest request) {
return impl.createTool(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java
index 2445a0bb1..0e84ee179 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java
@@ -17,7 +17,8 @@ public interface SupervisorAgentsService {
/**
* Creates a Tool under a Supervisor Agent. Specify one of "genie_space", "knowledge_assistant",
- * "uc_function", "connection", "app", "volume", "lakeview_dashboard" in the request body.
+ * "uc_function", "uc_connection", "app", "volume", "lakeview_dashboard", "uc_table",
+ * "vector_search_index" in the request body.
*/
Tool createTool(CreateToolRequest createToolRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java
index 7fede2aac..8c1d00862 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java
@@ -13,10 +13,6 @@ public class Tool {
@JsonProperty("app")
private App app;
- /** */
- @JsonProperty("connection")
- private Connection connection;
-
/** Description of what this tool does (user-facing). */
@JsonProperty("description")
private String description;
@@ -42,12 +38,17 @@ public class Tool {
private String toolId;
/**
- * Tool type. Must be one of: "genie_space", "knowledge_assistant", "uc_function", "connection",
- * "app", "volume", "lakeview_dashboard", "serving_endpoint".
+ * Tool type. Must be one of: "genie_space", "knowledge_assistant", "uc_function",
+ * "uc_connection", "app", "volume", "lakeview_dashboard", "serving_endpoint", "uc_table",
+ * "vector_search_index".
*/
@JsonProperty("tool_type")
private String toolType;
+ /** */
+ @JsonProperty("uc_connection")
+ private UcConnection ucConnection;
+
/** */
@JsonProperty("uc_function")
private UcFunction ucFunction;
@@ -65,15 +66,6 @@ public App getApp() {
return app;
}
- public Tool setConnection(Connection connection) {
- this.connection = connection;
- return this;
- }
-
- public Connection getConnection() {
- return connection;
- }
-
public Tool setDescription(String description) {
this.description = description;
return this;
@@ -137,6 +129,15 @@ public String getToolType() {
return toolType;
}
+ public Tool setUcConnection(UcConnection ucConnection) {
+ this.ucConnection = ucConnection;
+ return this;
+ }
+
+ public UcConnection getUcConnection() {
+ return ucConnection;
+ }
+
public Tool setUcFunction(UcFunction ucFunction) {
this.ucFunction = ucFunction;
return this;
@@ -161,7 +162,6 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Tool that = (Tool) o;
return Objects.equals(app, that.app)
- && Objects.equals(connection, that.connection)
&& Objects.equals(description, that.description)
&& Objects.equals(genieSpace, that.genieSpace)
&& Objects.equals(id, that.id)
@@ -169,6 +169,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(toolId, that.toolId)
&& Objects.equals(toolType, that.toolType)
+ && Objects.equals(ucConnection, that.ucConnection)
&& Objects.equals(ucFunction, that.ucFunction)
&& Objects.equals(volume, that.volume);
}
@@ -177,7 +178,6 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
app,
- connection,
description,
genieSpace,
id,
@@ -185,6 +185,7 @@ public int hashCode() {
name,
toolId,
toolType,
+ ucConnection,
ucFunction,
volume);
}
@@ -193,7 +194,6 @@ public int hashCode() {
public String toString() {
return new ToStringer(Tool.class)
.add("app", app)
- .add("connection", connection)
.add("description", description)
.add("genieSpace", genieSpace)
.add("id", id)
@@ -201,6 +201,7 @@ public String toString() {
.add("name", name)
.add("toolId", toolId)
.add("toolType", toolType)
+ .add("ucConnection", ucConnection)
.add("ucFunction", ucFunction)
.add("volume", volume)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcConnection.java
similarity index 74%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcConnection.java
index 1093f2d99..2c284fcea 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcConnection.java
@@ -7,14 +7,14 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Databricks connection. Supported connection: external mcp server. */
+/** Databricks UC connection. Supported connection: external mcp server. */
@Generated
-public class Connection {
+public class UcConnection {
/** */
@JsonProperty("name")
private String name;
- public Connection setName(String name) {
+ public UcConnection setName(String name) {
this.name = name;
return this;
}
@@ -27,7 +27,7 @@ public String getName() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- Connection that = (Connection) o;
+ UcConnection that = (UcConnection) o;
return Objects.equals(name, that.name);
}
@@ -38,6 +38,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(Connection.class).add("name", name).toString();
+ return new ToStringer(UcConnection.class).add("name", name).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
index 850e5a07a..7760a0f3e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.tags;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Tag Policy API allows you to manage policies for governed tags in Databricks. For Terraform
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/WorkspaceEntityTagAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/WorkspaceEntityTagAssignmentsAPI.java
index 8b25b68ee..52dd4cf54 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/WorkspaceEntityTagAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/WorkspaceEntityTagAssignmentsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.tags;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Manage tag assignments on workspace-scoped objects. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java
index c8abf37c4..d5ebbaf18 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java
@@ -18,8 +18,9 @@ public class CreateEndpoint {
private EndpointType endpointType;
/**
- * Min QPS for the endpoint. Mutually exclusive with num_replicas. The actual replica count is
- * calculated at index creation/sync time based on this value.
+ * Deprecated: use target_qps. Min QPS for the endpoint. Mutually exclusive with num_replicas.
+ * Kept at PUBLIC_BETA with deprecated = true so generated SDK surfaces keep the field with a
+ * deprecation marker; hiding completely is a follow-up PR.
*/
@JsonProperty("min_qps")
private Long minQps;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointScalingInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointScalingInfo.java
index 0015175f0..680d34582 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointScalingInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointScalingInfo.java
@@ -9,7 +9,12 @@
@Generated
public class EndpointScalingInfo {
- /** The minimum QPS target requested for the endpoint. */
+ /**
+ * Deprecated: use requested_target_qps. Kept at PUBLIC_BETA with deprecated = true so generated
+ * SDK surfaces (Go, Java, TypeScript, Terraform) keep exposing the field with a deprecation
+ * marker rather than losing it on next regeneration. Hiding completely (visibility =
+ * PUBLIC_UNDOCUMENTED) is a follow-up PR once downstream consumers have migrated.
+ */
@JsonProperty("requested_min_qps")
private Long requestedMinQps;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointRequest.java
index 252eaa921..5dad905f0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointRequest.java
@@ -14,8 +14,9 @@ public class PatchEndpointRequest {
@JsonIgnore private String endpointName;
/**
- * Min QPS for the endpoint. Positive integer sets QPS target; -1 resets to default scaling
- * behavior.
+ * Deprecated: use target_qps. Min QPS for the endpoint. Positive integer sets QPS target; -1
+ * resets to default scaling behavior. Kept at PUBLIC_BETA with deprecated = true so generated SDK
+ * surfaces keep the field with a deprecation marker; hiding completely is a follow-up PR.
*/
@JsonProperty("min_qps")
private Long minQps;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
index b8a04a706..d6951bc69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
@@ -3,6 +3,8 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
import com.databricks.sdk.support.Wait;
@@ -10,8 +12,6 @@
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** **Endpoint**: Represents the compute resources to host vector search indexes. */
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java
index 5333cb149..cde607010 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.vectorsearch;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* **Index**: An efficient representation of your embedding vectors that supports real-time and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java
index 7272f6051..4b4dece25 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.workspace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Registers personal access token for Databricks to do operations on behalf of the user.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java
index 77643ed12..d955fa42d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java
@@ -16,7 +16,10 @@ public class ObjectInfo {
@JsonProperty("created_at")
private Long createdAt;
- /** The language of the object. This value is set only if the object type is ``NOTEBOOK``. */
+ /**
+ * The language of the object. This value is set only if the object type is ``NOTEBOOK``. For
+ * Jupyter (.ipynb) notebooks, this is always ``PYTHON``.
+ */
@JsonProperty("language")
private Language language;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
index 00927adfa..d3b32b12c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.workspace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Repos API allows users to manage their git repos. Users can use the API to access all repos
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java
index 514e11827..7fdee8987 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.workspace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Secrets API allows you to manage secrets, secret scopes, and access permissions.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
index 911c92fce..67791f47e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
@@ -2,10 +2,10 @@
package com.databricks.sdk.service.workspace;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The Workspace API allows you to list, import, export, and delete workspace objects such as
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java
index 8a7345c5d..beae0c6dc 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.httpcallv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Lorem Ipsum */
@Generated
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java
index 9f4dc8dc8..0d3012e60 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.idempotencytesting;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Test service for Idempotency of Operations */
@Generated
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java
index d84b553cb..cace45ac7 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.jsonmarshallv2;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Lorem Ipsum */
@Generated
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java
index 179d17afb..314f27b35 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.lrotesting;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running createTestResource operation. Provides methods to
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java
index 722858c22..7cf4d4fb7 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java
@@ -2,6 +2,8 @@
package com.databricks.sdk.service.lrotesting;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
@@ -11,8 +13,6 @@
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Wrapper for interacting with a long-running deleteTestResource operation. Provides methods to
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java
index 91b92e67d..e71c92e9c 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java
@@ -2,9 +2,9 @@
package com.databricks.sdk.service.lrotesting;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.logging.Logger;
+import com.databricks.sdk.core.logging.LoggerFactory;
import com.databricks.sdk.support.Generated;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** Test service for Long Running Operations */
@Generated
diff --git a/tagging.py b/tagging.py
index 79f2894c6..c19028923 100644
--- a/tagging.py
+++ b/tagging.py
@@ -7,7 +7,7 @@
import re
import argparse
from typing import Optional, List, Callable
-from dataclasses import dataclass
+from dataclasses import dataclass, replace
import subprocess
import time
import json
@@ -29,6 +29,106 @@
"""
+@dataclass(frozen=True)
+class Version:
+ """
+ A semver 2.0.0-compliant version (https://semver.org).
+
+ Mirrors the API of the `semver` PyPI package so this implementation can be
+ swapped for that library if it is ever added to the wheelhouse. Supports
+ parsing, stringification, and the two bumps we need: minor (for stable
+ releases) and prerelease (for release trains).
+ """
+
+ # Permissive pattern for locating a semver version string inside larger
+ # text (e.g. a changelog header). Callers use it in f-strings; strict
+ # validation happens via Version.parse.
+ PATTERN = r"\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?(?:\+[0-9A-Za-z.-]+)?"
+
+ # Strict anchored regex per https://semver.org. Rejects leading zeros in
+ # numeric identifiers and invalid pre-release/build identifier charsets.
+ _PARSE_REGEX = re.compile(
+ r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)"
+ r"(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
+ r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
+ r"(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
+ )
+
+ major: int
+ minor: int
+ patch: int
+ prerelease: str = ""
+ build: str = ""
+
+ @classmethod
+ def parse(cls, text: str) -> "Version":
+ """Parse a semver string, raising ValueError on malformed input."""
+ match = cls._PARSE_REGEX.match(text)
+ if not match:
+ raise ValueError(f"Invalid semver version: {text!r}")
+ major, minor, patch, prerelease, build = match.groups()
+ return cls(
+ major=int(major),
+ minor=int(minor),
+ patch=int(patch),
+ prerelease=prerelease or "",
+ build=build or "",
+ )
+
+ def __str__(self) -> str:
+ result = f"{self.major}.{self.minor}.{self.patch}"
+ if self.prerelease:
+ result += f"-{self.prerelease}"
+ if self.build:
+ result += f"+{self.build}"
+ return result
+
+ def bump_minor(self) -> "Version":
+ """
+ Bump the minor version and reset patch.
+
+ Per semver item 9, a pre-release version has lower precedence than
+ the same MAJOR.MINOR.PATCH, so bumping to a new minor drops any
+ pre-release and build metadata.
+ """
+ return Version(major=self.major, minor=self.minor + 1, patch=0)
+
+ def bump_prerelease(self) -> "Version":
+ """
+ Increment the rightmost numeric identifier in the pre-release.
+
+ Matches the npm `prerelease` bump semantics:
+ 0.0.0-alpha.1 -> 0.0.0-alpha.2
+ 0.0.0-alpha -> 0.0.0-alpha.1
+ 0.0.0-rc.1.2 -> 0.0.0-rc.1.3
+
+ Raises ValueError if the version has no pre-release to bump.
+ Build metadata is dropped since it does not affect precedence.
+ """
+ if not self.prerelease:
+ raise ValueError(f"Cannot bump prerelease of {self}: no pre-release component")
+ parts = self.prerelease.split(".")
+ for i in range(len(parts) - 1, -1, -1):
+ if parts[i].isdigit():
+ parts[i] = str(int(parts[i]) + 1)
+ return replace(self, prerelease=".".join(parts), build="")
+ # No numeric identifier exists; append ".1" to start a counter.
+ return replace(self, prerelease=f"{self.prerelease}.1", build="")
+
+ def next_release_version(self) -> "Version":
+ """
+ Default next version for the changelog after this one is released.
+
+ If on a pre-release track, stay on it by bumping the pre-release
+ identifier (npm convention). Otherwise, bump the minor version,
+ the script's historical default for stable releases. Teams can
+ override the default in the release PR.
+ """
+ if self.prerelease:
+ return self.bump_prerelease()
+ return self.bump_minor()
+
+
# GitHub does not support signing commits for GitHub Apps directly.
# This class replaces usages for git commands such as "git add", "git commit", and "git push".
@dataclass
@@ -170,11 +270,11 @@ def update_version_references(tag_info: TagInfo) -> None:
print("`version` not found in .codegen.json. Nothing to update.")
return
- # Update the versions
+ # Update the versions.
for filename, pattern in version.items():
loc = os.path.join(os.getcwd(), tag_info.package.path, filename)
- previous_version = re.sub(r"\$VERSION", r"\\d+\\.\\d+\\.\\d+", pattern)
- new_version = re.sub(r"\$VERSION", tag_info.version, pattern)
+ previous_version = pattern.replace("$VERSION", Version.PATTERN)
+ new_version = pattern.replace("$VERSION", tag_info.version)
with open(loc, "r") as file:
content = file.read()
@@ -197,23 +297,21 @@ def clean_next_changelog(package_path: str) -> None:
with open(file_path, "r") as file:
content = file.read()
- # Remove content between ### sections
+ # Remove content between ### sections.
cleaned_content = re.sub(r"(### [^\n]+\n)(?:.*?\n?)*?(?=###|$)", r"\1", content)
- # Ensure there is exactly one empty line before each section
+ # Ensure there is exactly one empty line before each section.
cleaned_content = re.sub(r"(\n*)(###[^\n]+)", r"\n\n\2", cleaned_content)
- # Find the version number
- version_match = re.search(r"Release v(\d+)\.(\d+)\.(\d+)", cleaned_content)
+ # Find the version number and compute the default next release version.
+ # Teams can adjust the version in the PR if the default is not desired.
+ # For stable versions, bump minor (historical default since minor releases
+ # are more common than patch or major). For pre-release versions, stay on
+ # the same track by bumping the pre-release identifier (npm convention).
+ version_match = re.search(rf"Release v({Version.PATTERN})", cleaned_content)
if not version_match:
raise Exception("Version not found in the changelog")
- major, minor, patch = map(int, version_match.groups())
- # Prepare next release version.
- # When doing a PR, teams can adjust the version.
- # By default, we increase a minor version, since minor versions releases
- # are more common than patch or major version releases.
- minor += 1
- patch = 0
- new_version = f"Release v{major}.{minor}.{patch}"
- cleaned_content = cleaned_content.replace(version_match.group(0), new_version)
+ current = Version.parse(version_match.group(1))
+ new_header = f"Release v{current.next_release_version()}"
+ cleaned_content = cleaned_content.replace(version_match.group(0), new_header)
# Update file with cleaned content
gh.add_file(file_path, cleaned_content)
@@ -229,20 +327,26 @@ def get_previous_tag_info(package: Package) -> Optional[TagInfo]:
with open(changelog_path, "r") as f:
changelog = f.read()
- # Extract the latest release section using regex
- match = re.search(r"## (\[Release\] )?Release v[\d\.]+.*?(?=\n## (\[Release\] )?Release v|\Z)", changelog, re.S)
+ # Extract the latest release section using regex.
+ match = re.search(
+ rf"## (\[Release\] )?Release v{Version.PATTERN}.*?(?=\n## (\[Release\] )?Release v|\Z)",
+ changelog,
+ re.S,
+ )
# E.g., for new packages.
if not match:
return None
latest_release = match.group(0)
- version_match = re.search(r"## (\[Release\] )?Release v(\d+\.\d+\.\d+)", latest_release)
+ version_match = re.search(rf"## (\[Release\] )?Release v({Version.PATTERN})", latest_release)
if not version_match:
raise Exception("Version not found in the changelog")
- return TagInfo(package=package, version=version_match.group(2), content=latest_release)
+ # Validate the extracted string is spec-compliant; fail loudly otherwise.
+ version = str(Version.parse(version_match.group(2)))
+ return TagInfo(package=package, version=version, content=latest_release)
def get_next_tag_info(package: Package) -> Optional[TagInfo]:
@@ -267,12 +371,14 @@ def get_next_tag_info(package: Package) -> Optional[TagInfo]:
print("All sections are empty. No changes will be made to the changelog.")
return None
- version_match = re.search(r"## Release v(\d+\.\d+\.\d+)", next_changelog)
+ version_match = re.search(rf"## Release v({Version.PATTERN})", next_changelog)
if not version_match:
raise Exception("Version not found in the changelog")
- return TagInfo(package=package, version=version_match.group(1), content=next_changelog)
+ # Validate the extracted string is spec-compliant; fail loudly otherwise.
+ version = str(Version.parse(version_match.group(1)))
+ return TagInfo(package=package, version=version, content=next_changelog)
def write_changelog(tag_info: TagInfo) -> None:
@@ -283,10 +389,12 @@ def write_changelog(tag_info: TagInfo) -> None:
with open(changelog_path, "r") as f:
changelog = f.read()
- # Add current date to the release header
+ # Add current date to the release header.
current_date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d")
content_with_date = re.sub(
- r"## Release v(\d+\.\d+\.\d+)", rf"## Release v\1 ({current_date})", tag_info.content.strip()
+ rf"## Release v({Version.PATTERN})",
+ rf"## Release v\1 ({current_date})",
+ tag_info.content.strip(),
)
updated_changelog = re.sub(r"(# Version changelog\n\n)", f"\\1{content_with_date}\n\n\n", changelog)
@@ -519,15 +627,26 @@ def pull_last_release_commit() -> None:
reset_repository(commit_hash)
-def get_package_from_args() -> Optional[str]:
+def get_packages_from_args() -> List[str]:
"""
- Retrieves an optional package
- python3 ./tagging.py --package
+ Retrieves the list of packages to tag.
+
+ python3 ./tagging.py --package # single package
+ python3 ./tagging.py --package , # multiple packages
+
+ Returns an empty list when --package is omitted, which means all packages
+ with pending releases will be tagged.
"""
parser = argparse.ArgumentParser(description="Update changelogs and tag the release.")
- parser.add_argument("--package", "-p", type=str, help="Tag a single package")
+ parser.add_argument(
+ "--package",
+ "-p",
+ type=str,
+ default="",
+ help="Comma-separated list of packages to tag. Leave empty to tag all packages with pending releases.",
+ )
args = parser.parse_args()
- return args.package
+ return [name.strip() for name in args.package.split(",") if name.strip()]
def init_github():
@@ -553,15 +672,15 @@ def process():
If any tag are pending from an early process, it will skip updating the CHANGELOG.md files and only apply the tags.
"""
- package_name = get_package_from_args()
+ package_names = get_packages_from_args()
pending_tags = find_pending_tags()
# pending_tags is non-empty only when the tagging process previously failed or interrupted.
# We must complete the interrupted tagging process before starting a new one to avoid inconsistent states and missing changelog entries.
- # Therefore, we don't support specifying the package until the previously started process has been successfully completed.
- if pending_tags and package_name:
+ # Therefore, we don't support specifying packages until the previously started process has been successfully completed.
+ if pending_tags and package_names:
pending_packages = [tag.package.name for tag in pending_tags]
- raise Exception(f"Cannot release package {package_name}. Pending release for {pending_packages}")
+ raise Exception(f"Cannot release packages {package_names}. Pending release for {pending_packages}")
if pending_tags:
print("Found pending tags from previous executions, entering recovery mode.")
@@ -570,9 +689,9 @@ def process():
return
packages = find_packages()
- # If a package is specified as an argument, only process that package
- if package_name:
- packages = [package for package in packages if package.name == package_name]
+ # If packages are specified as an argument, only process those packages.
+ if package_names:
+ packages = [package for package in packages if package.name in package_names]
pending_tags = retry_function(func=lambda: update_changelogs(packages), cleanup=reset_repository)
push_tags(pending_tags)