diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 12c62ec0b..b81ff727b 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -86481d2fa23e3fb65128ea34b045fe585f7643f1 \ No newline at end of file +177320c0c607eca68ec49e2329537eb0bd13a4b3 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 2510002e8..27648a69a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1115,6 +1115,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfigSchemaNames.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CreateCatalogConfigRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java linguist-generated=true @@ -1548,6 +1559,29 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport. databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md old mode 100644 new mode 100755 index f881abc99..db03328f4 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,6 @@ ### Internal Changes ### API Changes +* Add `com.databricks.sdk.service.dataclassification` and `com.databricks.sdk.service.knowledgeassistants` packages. +* Add `workspaceClient.dataClassification()` service. +* Add `workspaceClient.knowledgeAssistants()` service. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index f20ce2df3..28a9ee5d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -353,15 +353,15 @@ public AccountIpAccessListsAPI ipAccessLists() { /** * These APIs manage log delivery configurations for this account. The two supported log types for - * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This - * feature works with all account ID types. + * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is + * in Public Preview. This feature works with all account ID types. * *
Log delivery works with all account types. However, if your account is on the E2 version of * the platform or on a select custom plan that allows multiple workspaces per account, you can * optionally configure different storage destinations for each workspace. Log delivery status is * also provided to know the latest status of log delivery attempts. * - *
The high-level flow of billable usage delivery: + *
The high-level flow of billable usage delivery (AWS only): * *
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. * Using Databricks APIs, call the Account API to create a [storage configuration @@ -381,35 +381,34 @@ public AccountIpAccessListsAPI ipAccessLists() { * solely delivers logs related to the specified workspaces. You can create multiple types of * delivery configurations per account. * - *
For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For billable usage delivery (AWS only): * For more information about billable usage logs,
+ * see [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery
+ * location is ` For audit log delivery: * For more information about about audit log delivery, see [Audit
- * log delivery], which includes information about the used JSON schema. * The delivery location
- * is
+ * For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable
- * usage log delivery]:
+ * [AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS
+ * S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
public LogDeliveryAPI logDelivery() {
return logDeliveryAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 5b71b785e..fae5f8e85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -103,6 +103,8 @@
import com.databricks.sdk.service.dashboards.LakeviewService;
import com.databricks.sdk.service.database.DatabaseAPI;
import com.databricks.sdk.service.database.DatabaseService;
+import com.databricks.sdk.service.dataclassification.DataClassificationAPI;
+import com.databricks.sdk.service.dataclassification.DataClassificationService;
import com.databricks.sdk.service.dataquality.DataQualityAPI;
import com.databricks.sdk.service.dataquality.DataQualityService;
import com.databricks.sdk.service.files.DbfsService;
@@ -136,6 +138,8 @@
import com.databricks.sdk.service.jobs.JobsService;
import com.databricks.sdk.service.jobs.PolicyComplianceForJobsAPI;
import com.databricks.sdk.service.jobs.PolicyComplianceForJobsService;
+import com.databricks.sdk.service.knowledgeassistants.KnowledgeAssistantsAPI;
+import com.databricks.sdk.service.knowledgeassistants.KnowledgeAssistantsService;
import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsAPI;
import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsService;
import com.databricks.sdk.service.marketplace.ConsumerInstallationsAPI;
@@ -292,6 +296,7 @@ public class WorkspaceClient {
private CurrentUserAPI currentUserAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
private DashboardsAPI dashboardsAPI;
+ private DataClassificationAPI dataClassificationAPI;
private DataQualityAPI dataQualityAPI;
private DataSourcesAPI dataSourcesAPI;
private DatabaseAPI databaseAPI;
@@ -316,6 +321,7 @@ public class WorkspaceClient {
private InstanceProfilesAPI instanceProfilesAPI;
private IpAccessListsAPI ipAccessListsAPI;
private JobsAPI jobsAPI;
+ private KnowledgeAssistantsAPI knowledgeAssistantsAPI;
private LakeviewAPI lakeviewAPI;
private LakeviewEmbeddedAPI lakeviewEmbeddedAPI;
private LibrariesAPI librariesAPI;
@@ -425,6 +431,7 @@ public WorkspaceClient(DatabricksConfig config) {
currentUserAPI = new CurrentUserAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
dashboardsAPI = new DashboardsAPI(apiClient);
+ dataClassificationAPI = new DataClassificationAPI(apiClient);
dataQualityAPI = new DataQualityAPI(apiClient);
dataSourcesAPI = new DataSourcesAPI(apiClient);
databaseAPI = new DatabaseAPI(apiClient);
@@ -449,6 +456,7 @@ public WorkspaceClient(DatabricksConfig config) {
instanceProfilesAPI = new InstanceProfilesAPI(apiClient);
ipAccessListsAPI = new IpAccessListsAPI(apiClient);
jobsAPI = new JobsAPI(apiClient);
+ knowledgeAssistantsAPI = new KnowledgeAssistantsAPI(apiClient);
lakeviewAPI = new LakeviewAPI(apiClient);
lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient);
librariesAPI = new LibrariesAPI(apiClient);
@@ -818,6 +826,15 @@ public DashboardsAPI dashboards() {
return dashboardsAPI;
}
+ /**
+ * Manage data classification for Unity Catalog catalogs. Data classification automatically
+ * identifies and tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most
+ * one configuration resource that controls scanning behavior and auto-tagging rules.
+ */
+ public DataClassificationAPI dataClassification() {
+ return dataClassificationAPI;
+ }
+
/** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */
public DataQualityAPI dataQuality() {
return dataQualityAPI;
@@ -1146,6 +1163,11 @@ public JobsAPI jobs() {
return jobsAPI;
}
+ /** Manage Knowledge Assistants and related resources. */
+ public KnowledgeAssistantsAPI knowledgeAssistants() {
+ return knowledgeAssistantsAPI;
+ }
+
/**
* These APIs provide specific management operations for Lakeview dashboards. Generic resource
* management can be done with Workspace API (import, export, get-status, list, delete).
@@ -2463,6 +2485,17 @@ public WorkspaceClient withDashboardsAPI(DashboardsAPI dashboards) {
return this;
}
+ /** Replace the default DataClassificationService with a custom implementation. */
+ public WorkspaceClient withDataClassificationImpl(DataClassificationService dataClassification) {
+ return this.withDataClassificationAPI(new DataClassificationAPI(dataClassification));
+ }
+
+ /** Replace the default DataClassificationAPI with a custom implementation. */
+ public WorkspaceClient withDataClassificationAPI(DataClassificationAPI dataClassification) {
+ this.dataClassificationAPI = dataClassification;
+ return this;
+ }
+
/** Replace the default DataQualityService with a custom implementation. */
public WorkspaceClient withDataQualityImpl(DataQualityService dataQuality) {
return this.withDataQualityAPI(new DataQualityAPI(dataQuality));
@@ -2728,6 +2761,18 @@ public WorkspaceClient withJobsAPI(JobsAPI jobs) {
return this;
}
+ /** Replace the default KnowledgeAssistantsService with a custom implementation. */
+ public WorkspaceClient withKnowledgeAssistantsImpl(
+ KnowledgeAssistantsService knowledgeAssistants) {
+ return this.withKnowledgeAssistantsAPI(new KnowledgeAssistantsAPI(knowledgeAssistants));
+ }
+
+ /** Replace the default KnowledgeAssistantsAPI with a custom implementation. */
+ public WorkspaceClient withKnowledgeAssistantsAPI(KnowledgeAssistantsAPI knowledgeAssistants) {
+ this.knowledgeAssistantsAPI = knowledgeAssistants;
+ return this;
+ }
+
/** Replace the default LakeviewService with a custom implementation. */
public WorkspaceClient withLakeviewImpl(LakeviewService lakeview) {
return this.withLakeviewAPI(new LakeviewAPI(lakeview));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
index 92853f21b..aa9a56bb8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
@@ -9,15 +9,15 @@
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
+ * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is in
+ * Public Preview. This feature works with all account ID types.
*
* Log delivery works with all account types. However, if your account is on the E2 version of
* the platform or on a select custom plan that allows multiple workspaces per account, you can
* optionally configure different storage destinations for each workspace. Log delivery status is
* also provided to know the latest status of log delivery attempts.
*
- * The high-level flow of billable usage delivery:
+ * The high-level flow of billable usage delivery (AWS only):
*
* 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
* Using Databricks APIs, call the Account API to create a [storage configuration
@@ -37,9 +37,9 @@
* logs related to the specified workspaces. You can create multiple types of delivery
* configurations per account.
*
- * For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For billable usage delivery (AWS only): * For more information about billable usage logs, see
+ * [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location
+ * is ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
+ * For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
+ * [AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3
+ * bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
@Generated
public class LogDeliveryAPI {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
index 9c4796014..1470c7ad2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
@@ -5,15 +5,15 @@
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
+ * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is in
+ * Public Preview. This feature works with all account ID types.
*
* Log delivery works with all account types. However, if your account is on the E2 version of
* the platform or on a select custom plan that allows multiple workspaces per account, you can
* optionally configure different storage destinations for each workspace. Log delivery status is
* also provided to know the latest status of log delivery attempts.
*
- * The high-level flow of billable usage delivery:
+ * The high-level flow of billable usage delivery (AWS only):
*
* 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
* Using Databricks APIs, call the Account API to create a [storage configuration
@@ -33,9 +33,9 @@
* logs related to the specified workspaces. You can create multiple types of delivery
* configurations per account.
*
- * For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For billable usage delivery (AWS only): * For more information about billable usage logs, see
+ * [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location
+ * is ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
+ * For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
+ * [AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3
+ * bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*
* This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index 9cd70ad33..f0bb25743 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: EXTERNAL_LOCATION_ONELAKE_MANAGED = 299; Next id: 300 */
+/** Latest kind: CONNECTION_OUTLOOK_OAUTH_M2M = 300; Next id: 301 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java
new file mode 100755
index 000000000..f6f795f4d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Auto-tagging configuration for a classification tag. When enabled, detected columns are
+ * automatically tagged with Unity Catalog tags.
+ */
+@Generated
+public class AutoTaggingConfig {
+ /** Whether auto-tagging is enabled or disabled for this classification tag. */
+ @JsonProperty("auto_tagging_mode")
+ private AutoTaggingConfigAutoTaggingMode autoTaggingMode;
+
+ /** The Classification Tag (e.g., "class.name", "class.location") */
+ @JsonProperty("classification_tag")
+ private String classificationTag;
+
+ public AutoTaggingConfig setAutoTaggingMode(AutoTaggingConfigAutoTaggingMode autoTaggingMode) {
+ this.autoTaggingMode = autoTaggingMode;
+ return this;
+ }
+
+ public AutoTaggingConfigAutoTaggingMode getAutoTaggingMode() {
+ return autoTaggingMode;
+ }
+
+ public AutoTaggingConfig setClassificationTag(String classificationTag) {
+ this.classificationTag = classificationTag;
+ return this;
+ }
+
+ public String getClassificationTag() {
+ return classificationTag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AutoTaggingConfig that = (AutoTaggingConfig) o;
+ return Objects.equals(autoTaggingMode, that.autoTaggingMode)
+ && Objects.equals(classificationTag, that.classificationTag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(autoTaggingMode, classificationTag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AutoTaggingConfig.class)
+ .add("autoTaggingMode", autoTaggingMode)
+ .add("classificationTag", classificationTag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java
new file mode 100755
index 000000000..e20ba94fd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+
+/** Auto-tagging mode. */
+@Generated
+public enum AutoTaggingConfigAutoTaggingMode {
+ AUTO_TAGGING_DISABLED,
+ AUTO_TAGGING_ENABLED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java
new file mode 100755
index 000000000..d36081ec8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Data Classification configuration for a Unity Catalog catalog. This message follows the "At Most
+ * One Resource" pattern: at most one CatalogConfig exists per catalog. - Full CRUD operations are
+ * supported: Create enables Data Classification, Delete disables it - It has no unique identifier
+ * of its own and uses its parent catalog's identifier (catalog_name)
+ */
+@Generated
+public class CatalogConfig {
+ /**
+ * List of auto-tagging configurations for this catalog. Empty list means no auto-tagging is
+ * enabled.
+ */
+ @JsonProperty("auto_tag_configs")
+ private Collection Creates a new config resource, which enables Data Classification for the specified catalog.
+ * - The config must not already exist for the catalog.
+ */
+ public CatalogConfig createCatalogConfig(CreateCatalogConfigRequest request) {
+ return impl.createCatalogConfig(request);
+ }
+
+ public void deleteCatalogConfig(String name) {
+ deleteCatalogConfig(new DeleteCatalogConfigRequest().setName(name));
+ }
+
+ /** Delete Data Classification configuration for a catalog. */
+ public void deleteCatalogConfig(DeleteCatalogConfigRequest request) {
+ impl.deleteCatalogConfig(request);
+ }
+
+ public CatalogConfig getCatalogConfig(String name) {
+ return getCatalogConfig(new GetCatalogConfigRequest().setName(name));
+ }
+
+ /** Get the Data Classification configuration for a catalog. */
+ public CatalogConfig getCatalogConfig(GetCatalogConfigRequest request) {
+ return impl.getCatalogConfig(request);
+ }
+
+ /**
+ * Update the Data Classification configuration for a catalog. - The config must already exist for
+ * the catalog. - Updates fields specified in the update_mask. Use update_mask field to perform
+ * partial updates of the configuration.
+ */
+ public CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest request) {
+ return impl.updateCatalogConfig(request);
+ }
+
+ public DataClassificationService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java
new file mode 100755
index 000000000..7de258392
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DataClassification */
+@Generated
+class DataClassificationImpl implements DataClassificationService {
+ private final ApiClient apiClient;
+
+ public DataClassificationImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CatalogConfig createCatalogConfig(CreateCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s/config", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getCatalogConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteCatalogConfig(DeleteCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CatalogConfig getCatalogConfig(GetCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getCatalogConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java
new file mode 100755
index 000000000..3b8773790
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java
@@ -0,0 +1,37 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage data classification for Unity Catalog catalogs. Data classification automatically
+ * identifies and tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most
+ * one configuration resource that controls scanning behavior and auto-tagging rules.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DataClassificationService {
+ /**
+ * Create Data Classification configuration for a catalog.
+ *
+ * Creates a new config resource, which enables Data Classification for the specified catalog.
+ * - The config must not already exist for the catalog.
+ */
+ CatalogConfig createCatalogConfig(CreateCatalogConfigRequest createCatalogConfigRequest);
+
+ /** Delete Data Classification configuration for a catalog. */
+ void deleteCatalogConfig(DeleteCatalogConfigRequest deleteCatalogConfigRequest);
+
+ /** Get the Data Classification configuration for a catalog. */
+ CatalogConfig getCatalogConfig(GetCatalogConfigRequest getCatalogConfigRequest);
+
+ /**
+ * Update the Data Classification configuration for a catalog. - The config must already exist for
+ * the catalog. - Updates fields specified in the update_mask. Use update_mask field to perform
+ * partial updates of the configuration.
+ */
+ CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest updateCatalogConfigRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java
new file mode 100755
index 000000000..944b05988
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteCatalogConfigRequest {
+ /** Resource name in the format: catalogs/{catalog_name}/config */
+ @JsonIgnore private String name;
+
+ public DeleteCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteCatalogConfigRequest that = (DeleteCatalogConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteCatalogConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java
new file mode 100755
index 000000000..f227185cd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetCatalogConfigRequest {
+ /** Resource name in the format: catalogs/{catalog_name}/config */
+ @JsonIgnore private String name;
+
+ public GetCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetCatalogConfigRequest that = (GetCatalogConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetCatalogConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java
new file mode 100755
index 000000000..943125a92
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateCatalogConfigRequest {
+ /**
+ * The configuration to apply to the catalog. The name field in catalog_config identifies which
+ * resource to update.
+ */
+ @JsonProperty("catalog_config")
+ private CatalogConfig catalogConfig;
+
+ /** Resource name in the format: catalogs/{catalog_name}/config. */
+ @JsonIgnore private String name;
+
+ /** Field mask specifying which fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateCatalogConfigRequest setCatalogConfig(CatalogConfig catalogConfig) {
+ this.catalogConfig = catalogConfig;
+ return this;
+ }
+
+ public CatalogConfig getCatalogConfig() {
+ return catalogConfig;
+ }
+
+ public UpdateCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateCatalogConfigRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateCatalogConfigRequest that = (UpdateCatalogConfigRequest) o;
+ return Objects.equals(catalogConfig, that.catalogConfig)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogConfig, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCatalogConfigRequest.class)
+ .add("catalogConfig", catalogConfig)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
index 8ae58f6a5..f7f7dd752 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
@@ -14,9 +14,9 @@ public class GetPermissionLevelsRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
index 801a423e4..b27841373 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
@@ -14,9 +14,9 @@ public class GetPermissionRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
index 25ba32997..a30c83e0c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
@@ -20,9 +20,9 @@ public class SetObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
index b7ea0195f..2420a2d73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
@@ -20,9 +20,9 @@ public class UpdateObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..401986af0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateKnowledgeAssistantRequest {
+ /** The Knowledge Assistant to create. */
+ @JsonProperty("knowledge_assistant")
+ private KnowledgeAssistant knowledgeAssistant;
+
+ public CreateKnowledgeAssistantRequest setKnowledgeAssistant(
+ KnowledgeAssistant knowledgeAssistant) {
+ this.knowledgeAssistant = knowledgeAssistant;
+ return this;
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant() {
+ return knowledgeAssistant;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateKnowledgeAssistantRequest that = (CreateKnowledgeAssistantRequest) o;
+ return Objects.equals(knowledgeAssistant, that.knowledgeAssistant);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistant);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateKnowledgeAssistantRequest.class)
+ .add("knowledgeAssistant", knowledgeAssistant)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java
new file mode 100755
index 000000000..da7e77346
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateKnowledgeSourceRequest {
+ /** */
+ @JsonProperty("knowledge_source")
+ private KnowledgeSource knowledgeSource;
+
+ /**
+ * Parent resource where this source will be created. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String parent;
+
+ public CreateKnowledgeSourceRequest setKnowledgeSource(KnowledgeSource knowledgeSource) {
+ this.knowledgeSource = knowledgeSource;
+ return this;
+ }
+
+ public KnowledgeSource getKnowledgeSource() {
+ return knowledgeSource;
+ }
+
+ public CreateKnowledgeSourceRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateKnowledgeSourceRequest that = (CreateKnowledgeSourceRequest) o;
+ return Objects.equals(knowledgeSource, that.knowledgeSource)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeSource, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateKnowledgeSourceRequest.class)
+ .add("knowledgeSource", knowledgeSource)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..60d4638a0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteKnowledgeAssistantRequest {
+ /**
+ * The resource name of the knowledge assistant to be deleted. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteKnowledgeAssistantRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteKnowledgeAssistantRequest that = (DeleteKnowledgeAssistantRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteKnowledgeAssistantRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java
new file mode 100755
index 000000000..746581a50
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteKnowledgeSourceRequest {
+ /**
+ * The resource name of the Knowledge Source to delete. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteKnowledgeSourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteKnowledgeSourceRequest that = (DeleteKnowledgeSourceRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteKnowledgeSourceRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java
new file mode 100755
index 000000000..18f65a692
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** FileTableSpec specifies a file table source configuration. */
+@Generated
+public class FileTableSpec {
+ /** The name of the column containing BINARY file content to be indexed. */
+ @JsonProperty("file_col")
+ private String fileCol;
+
+ /** Full UC name of the table, in the format of {CATALOG}.{SCHEMA}.{TABLE_NAME}. */
+ @JsonProperty("table_name")
+ private String tableName;
+
+ public FileTableSpec setFileCol(String fileCol) {
+ this.fileCol = fileCol;
+ return this;
+ }
+
+ public String getFileCol() {
+ return fileCol;
+ }
+
+ public FileTableSpec setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FileTableSpec that = (FileTableSpec) o;
+ return Objects.equals(fileCol, that.fileCol) && Objects.equals(tableName, that.tableName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fileCol, tableName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FileTableSpec.class)
+ .add("fileCol", fileCol)
+ .add("tableName", tableName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java
new file mode 100755
index 000000000..14de72a5c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** FilesSpec specifies a files source configuration. */
+@Generated
+public class FilesSpec {
+ /** A UC volume path that includes a list of files. */
+ @JsonProperty("path")
+ private String path;
+
+ public FilesSpec setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FilesSpec that = (FilesSpec) o;
+ return Objects.equals(path, that.path);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(path);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FilesSpec.class).add("path", path).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..e722347bf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeAssistantRequest {
+ /**
+ * The resource name of the knowledge assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetKnowledgeAssistantRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeAssistantRequest that = (GetKnowledgeAssistantRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeAssistantRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java
new file mode 100755
index 000000000..268d77a0c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeSourceRequest {
+ /**
+ * The resource name of the Knowledge Source. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetKnowledgeSourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeSourceRequest that = (GetKnowledgeSourceRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeSourceRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java
new file mode 100755
index 000000000..a30bde9c6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** IndexSpec specifies a vector search index source configuration. */
+@Generated
+public class IndexSpec {
+ /** The column that specifies a link or reference to where the information came from. */
+ @JsonProperty("doc_uri_col")
+ private String docUriCol;
+
+ /** Full UC name of the vector search index, in the format of {CATALOG}.{SCHEMA}.{INDEX_NAME}. */
+ @JsonProperty("index_name")
+ private String indexName;
+
+ /** The column that includes the document text for retrieval. */
+ @JsonProperty("text_col")
+ private String textCol;
+
+ public IndexSpec setDocUriCol(String docUriCol) {
+ this.docUriCol = docUriCol;
+ return this;
+ }
+
+ public String getDocUriCol() {
+ return docUriCol;
+ }
+
+ public IndexSpec setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public IndexSpec setTextCol(String textCol) {
+ this.textCol = textCol;
+ return this;
+ }
+
+ public String getTextCol() {
+ return textCol;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ IndexSpec that = (IndexSpec) o;
+ return Objects.equals(docUriCol, that.docUriCol)
+ && Objects.equals(indexName, that.indexName)
+ && Objects.equals(textCol, that.textCol);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(docUriCol, indexName, textCol);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(IndexSpec.class)
+ .add("docUriCol", docUriCol)
+ .add("indexName", indexName)
+ .add("textCol", textCol)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java
new file mode 100755
index 000000000..b26ecda26
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java
@@ -0,0 +1,223 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Objects;
+
+/**
+ * Entity message that represents a knowledge assistant. Note: REQUIRED annotations below represent
+ * create-time requirements. For updates, required fields are determined by the update mask.
+ */
+@Generated
+public class KnowledgeAssistant {
+ /** Creation timestamp. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /** The creator of the Knowledge Assistant. */
+ @JsonProperty("creator")
+ private String creator;
+
+ /**
+ * Description of what this agent can do (user-facing). Required when creating a Knowledge
+ * Assistant. When updating a Knowledge Assistant, optional unless included in update_mask.
+ */
+ @JsonProperty("description")
+ private String description;
+
+ /**
+ * The display name of the Knowledge Assistant, unique at workspace level. Required when creating
+ * a Knowledge Assistant. When updating a Knowledge Assistant, optional unless included in
+ * update_mask.
+ */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** The name of the knowledge assistant agent endpoint. */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /** Error details when the Knowledge Assistant is in FAILED state. */
+ @JsonProperty("error_info")
+ private String errorInfo;
+
+ /** The MLflow experiment ID. */
+ @JsonProperty("experiment_id")
+ private String experimentId;
+
+ /** The universally unique identifier (UUID) of the Knowledge Assistant. */
+ @JsonProperty("id")
+ private String id;
+
+ /**
+ * Additional global instructions on how the agent should generate answers. Optional on create and
+ * update. When updating a Knowledge Assistant, include this field in update_mask to modify it.
+ */
+ @JsonProperty("instructions")
+ private String instructions;
+
+ /**
+ * The resource name of the Knowledge Assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /** State of the Knowledge Assistant. Not returned in List responses. */
+ @JsonProperty("state")
+ private KnowledgeAssistantState state;
+
+ public KnowledgeAssistant setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public KnowledgeAssistant setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public KnowledgeAssistant setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public KnowledgeAssistant setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public KnowledgeAssistant setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public KnowledgeAssistant setErrorInfo(String errorInfo) {
+ this.errorInfo = errorInfo;
+ return this;
+ }
+
+ public String getErrorInfo() {
+ return errorInfo;
+ }
+
+ public KnowledgeAssistant setExperimentId(String experimentId) {
+ this.experimentId = experimentId;
+ return this;
+ }
+
+ public String getExperimentId() {
+ return experimentId;
+ }
+
+ public KnowledgeAssistant setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public KnowledgeAssistant setInstructions(String instructions) {
+ this.instructions = instructions;
+ return this;
+ }
+
+ public String getInstructions() {
+ return instructions;
+ }
+
+ public KnowledgeAssistant setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KnowledgeAssistant setState(KnowledgeAssistantState state) {
+ this.state = state;
+ return this;
+ }
+
+ public KnowledgeAssistantState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistant that = (KnowledgeAssistant) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(creator, that.creator)
+ && Objects.equals(description, that.description)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(errorInfo, that.errorInfo)
+ && Objects.equals(experimentId, that.experimentId)
+ && Objects.equals(id, that.id)
+ && Objects.equals(instructions, that.instructions)
+ && Objects.equals(name, that.name)
+ && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ creator,
+ description,
+ displayName,
+ endpointName,
+ errorInfo,
+ experimentId,
+ id,
+ instructions,
+ name,
+ state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistant.class)
+ .add("createTime", createTime)
+ .add("creator", creator)
+ .add("description", description)
+ .add("displayName", displayName)
+ .add("endpointName", endpointName)
+ .add("errorInfo", errorInfo)
+ .add("experimentId", experimentId)
+ .add("id", id)
+ .add("instructions", instructions)
+ .add("name", name)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java
new file mode 100755
index 000000000..3a47b2cbb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum KnowledgeAssistantState {
+ ACTIVE,
+ CREATING,
+ FAILED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
new file mode 100755
index 000000000..47c062573
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
@@ -0,0 +1,129 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Manage Knowledge Assistants and related resources. */
+@Generated
+public class KnowledgeAssistantsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(KnowledgeAssistantsAPI.class);
+
+ private final KnowledgeAssistantsService impl;
+
+ /** Regular-use constructor */
+ public KnowledgeAssistantsAPI(ApiClient apiClient) {
+ impl = new KnowledgeAssistantsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public KnowledgeAssistantsAPI(KnowledgeAssistantsService mock) {
+ impl = mock;
+ }
+
+ /** Creates a Knowledge Assistant. */
+ public KnowledgeAssistant createKnowledgeAssistant(CreateKnowledgeAssistantRequest request) {
+ return impl.createKnowledgeAssistant(request);
+ }
+
+ /** Creates a Knowledge Source under a Knowledge Assistant. */
+ public KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest request) {
+ return impl.createKnowledgeSource(request);
+ }
+
+ public void deleteKnowledgeAssistant(String name) {
+ deleteKnowledgeAssistant(new DeleteKnowledgeAssistantRequest().setName(name));
+ }
+
+ /** Deletes a Knowledge Assistant. */
+ public void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest request) {
+ impl.deleteKnowledgeAssistant(request);
+ }
+
+ public void deleteKnowledgeSource(String name) {
+ deleteKnowledgeSource(new DeleteKnowledgeSourceRequest().setName(name));
+ }
+
+ /** Deletes a Knowledge Source. */
+ public void deleteKnowledgeSource(DeleteKnowledgeSourceRequest request) {
+ impl.deleteKnowledgeSource(request);
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant(String name) {
+ return getKnowledgeAssistant(new GetKnowledgeAssistantRequest().setName(name));
+ }
+
+ /** Gets a Knowledge Assistant. */
+ public KnowledgeAssistant getKnowledgeAssistant(GetKnowledgeAssistantRequest request) {
+ return impl.getKnowledgeAssistant(request);
+ }
+
+ public KnowledgeSource getKnowledgeSource(String name) {
+ return getKnowledgeSource(new GetKnowledgeSourceRequest().setName(name));
+ }
+
+ /** Gets a Knowledge Source. */
+ public KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest request) {
+ return impl.getKnowledgeSource(request);
+ }
+
+ /** List Knowledge Assistants */
+ public Iterable This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface KnowledgeAssistantsService {
+ /** Creates a Knowledge Assistant. */
+ KnowledgeAssistant createKnowledgeAssistant(
+ CreateKnowledgeAssistantRequest createKnowledgeAssistantRequest);
+
+ /** Creates a Knowledge Source under a Knowledge Assistant. */
+ KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest createKnowledgeSourceRequest);
+
+ /** Deletes a Knowledge Assistant. */
+ void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest deleteKnowledgeAssistantRequest);
+
+ /** Deletes a Knowledge Source. */
+ void deleteKnowledgeSource(DeleteKnowledgeSourceRequest deleteKnowledgeSourceRequest);
+
+ /** Gets a Knowledge Assistant. */
+ KnowledgeAssistant getKnowledgeAssistant(
+ GetKnowledgeAssistantRequest getKnowledgeAssistantRequest);
+
+ /** Gets a Knowledge Source. */
+ KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest getKnowledgeSourceRequest);
+
+ /** List Knowledge Assistants */
+ ListKnowledgeAssistantsResponse listKnowledgeAssistants(
+ ListKnowledgeAssistantsRequest listKnowledgeAssistantsRequest);
+
+ /** Lists Knowledge Sources under a Knowledge Assistant. */
+ ListKnowledgeSourcesResponse listKnowledgeSources(
+ ListKnowledgeSourcesRequest listKnowledgeSourcesRequest);
+
+ /**
+ * Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require
+ * sync)
+ */
+ void syncKnowledgeSources(SyncKnowledgeSourcesRequest syncKnowledgeSourcesRequest);
+
+ /** Updates a Knowledge Assistant. */
+ KnowledgeAssistant updateKnowledgeAssistant(
+ UpdateKnowledgeAssistantRequest updateKnowledgeAssistantRequest);
+
+ /** Updates a Knowledge Source. */
+ KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest updateKnowledgeSourceRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java
new file mode 100755
index 000000000..705511930
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java
@@ -0,0 +1,227 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Objects;
+
+/**
+ * KnowledgeSource represents a source of knowledge for the KnowledgeAssistant. Used in
+ * create/update requests and returned in Get/List responses. Note: REQUIRED annotations below
+ * represent create-time requirements. For updates, required fields are determined by the update
+ * mask.
+ */
+@Generated
+public class KnowledgeSource {
+ /** Timestamp when this knowledge source was created. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /**
+ * Description of the knowledge source. Required when creating a Knowledge Source. When updating a
+ * Knowledge Source, optional unless included in update_mask.
+ */
+ @JsonProperty("description")
+ private String description;
+
+ /**
+ * Human-readable display name of the knowledge source. Required when creating a Knowledge Source.
+ * When updating a Knowledge Source, optional unless included in update_mask.
+ */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** */
+ @JsonProperty("file_table")
+ private FileTableSpec fileTable;
+
+ /** */
+ @JsonProperty("files")
+ private FilesSpec files;
+
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("index")
+ private IndexSpec index;
+
+ /**
+ * Timestamp representing the cutoff before which content in this knowledge source is being
+ * ingested.
+ */
+ @JsonProperty("knowledge_cutoff_time")
+ private Timestamp knowledgeCutoffTime;
+
+ /**
+ * Full resource name:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * The type of the source: "index", "files", or "file_table". Required when creating a Knowledge
+ * Source. When updating a Knowledge Source, this field is ignored.
+ */
+ @JsonProperty("source_type")
+ private String sourceType;
+
+ /** */
+ @JsonProperty("state")
+ private KnowledgeSourceState state;
+
+ public KnowledgeSource setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public KnowledgeSource setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public KnowledgeSource setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public KnowledgeSource setFileTable(FileTableSpec fileTable) {
+ this.fileTable = fileTable;
+ return this;
+ }
+
+ public FileTableSpec getFileTable() {
+ return fileTable;
+ }
+
+ public KnowledgeSource setFiles(FilesSpec files) {
+ this.files = files;
+ return this;
+ }
+
+ public FilesSpec getFiles() {
+ return files;
+ }
+
+ public KnowledgeSource setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public KnowledgeSource setIndex(IndexSpec index) {
+ this.index = index;
+ return this;
+ }
+
+ public IndexSpec getIndex() {
+ return index;
+ }
+
+ public KnowledgeSource setKnowledgeCutoffTime(Timestamp knowledgeCutoffTime) {
+ this.knowledgeCutoffTime = knowledgeCutoffTime;
+ return this;
+ }
+
+ public Timestamp getKnowledgeCutoffTime() {
+ return knowledgeCutoffTime;
+ }
+
+ public KnowledgeSource setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KnowledgeSource setSourceType(String sourceType) {
+ this.sourceType = sourceType;
+ return this;
+ }
+
+ public String getSourceType() {
+ return sourceType;
+ }
+
+ public KnowledgeSource setState(KnowledgeSourceState state) {
+ this.state = state;
+ return this;
+ }
+
+ public KnowledgeSourceState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeSource that = (KnowledgeSource) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(description, that.description)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(fileTable, that.fileTable)
+ && Objects.equals(files, that.files)
+ && Objects.equals(id, that.id)
+ && Objects.equals(index, that.index)
+ && Objects.equals(knowledgeCutoffTime, that.knowledgeCutoffTime)
+ && Objects.equals(name, that.name)
+ && Objects.equals(sourceType, that.sourceType)
+ && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ description,
+ displayName,
+ fileTable,
+ files,
+ id,
+ index,
+ knowledgeCutoffTime,
+ name,
+ sourceType,
+ state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeSource.class)
+ .add("createTime", createTime)
+ .add("description", description)
+ .add("displayName", displayName)
+ .add("fileTable", fileTable)
+ .add("files", files)
+ .add("id", id)
+ .add("index", index)
+ .add("knowledgeCutoffTime", knowledgeCutoffTime)
+ .add("name", name)
+ .add("sourceType", sourceType)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java
new file mode 100755
index 000000000..f831c5d19
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum KnowledgeSourceState {
+ FAILED_UPDATE,
+ UPDATED,
+ UPDATING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java
new file mode 100755
index 000000000..228407b6f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListKnowledgeAssistantsRequest {
+ /**
+ * The maximum number of knowledge assistants to return. If unspecified, at most 100 knowledge
+ * assistants will be returned. The maximum value is 100; values above 100 will be coerced to 100.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous `ListKnowledgeAssistants` call. Provide this to retrieve
+ * the subsequent page. If unspecified, the first page will be returned.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListKnowledgeAssistantsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListKnowledgeAssistantsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKnowledgeAssistantsRequest that = (ListKnowledgeAssistantsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKnowledgeAssistantsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java
new file mode 100755
index 000000000..ec55ca4b4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A list of Knowledge Assistants. */
+@Generated
+public class ListKnowledgeAssistantsResponse {
+ /** */
+ @JsonProperty("knowledge_assistants")
+ private Collection