diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 0bba438e2..6489fea7c 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -1e3533f94335f0e6c5d9262bc1fea95b3ddcb0e1 \ No newline at end of file +b799a600ccf37b0776d99c315614f0d0f0f0ce54 \ No newline at end of file diff --git a/.codegen/workspace.java.tmpl b/.codegen/workspace.java.tmpl index a6ebdbc58..ced2d5bcb 100644 --- a/.codegen/workspace.java.tmpl +++ b/.codegen/workspace.java.tmpl @@ -10,6 +10,7 @@ import com.databricks.sdk.core.DatabricksConfig; defined in an outer scope (https://github.com/golang/go/issues/17454). */ -}} import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; +import com.databricks.sdk.mixin.SecretsExt; {{range .Services}}{{if not .IsAccounts}} import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}API; import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}Service; @@ -17,7 +18,7 @@ import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}Service; import com.databricks.sdk.support.Generated; {{- define "api" -}} - {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" -}} + {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "SecretsAPI" "SecretsExt" -}} {{- $genApi := concat .PascalName "API" -}} {{- getOrDefault $mixins $genApi $genApi -}} {{- end -}} diff --git a/.gitattributes b/.gitattributes index e73f7f23f..b62f8e02f 100755 --- a/.gitattributes +++ b/.gitattributes @@ -52,6 +52,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsSto databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true @@ -104,12 +110,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntim databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableSchemaName.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectiveAutoMaintenanceFlag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectiveAutoMaintenanceFlagInheritedFromType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilege.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableAutoMaintenance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableSchemaName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java linguist-generated=true @@ -133,6 +139,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true @@ -169,6 +176,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java linguist-generated=true @@ -191,6 +199,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasServ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableOptionsMap.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurablePropertiesMap.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java linguist-generated=true @@ -724,6 +733,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java linguist-generated=true @@ -812,6 +823,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelReque databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java linguist-generated=true @@ -1378,6 +1391,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPe databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponse.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 73d1643f8..1fd2e48aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -7,6 +7,9 @@ import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; +import com.databricks.sdk.mixin.SecretsExt; +import com.databricks.sdk.service.catalog.ArtifactAllowlistsAPI; +import com.databricks.sdk.service.catalog.ArtifactAllowlistsService; import com.databricks.sdk.service.catalog.CatalogsAPI; import com.databricks.sdk.service.catalog.CatalogsService; import com.databricks.sdk.service.catalog.ConnectionsAPI; @@ -109,7 +112,6 @@ import com.databricks.sdk.service.workspace.GitCredentialsService; import com.databricks.sdk.service.workspace.ReposAPI; import com.databricks.sdk.service.workspace.ReposService; -import com.databricks.sdk.service.workspace.SecretsAPI; import com.databricks.sdk.service.workspace.SecretsService; import com.databricks.sdk.service.workspace.WorkspaceAPI; import com.databricks.sdk.service.workspace.WorkspaceService; @@ -123,6 +125,7 @@ public class WorkspaceClient { private AccountAccessControlProxyAPI accountAccessControlProxyAPI; private AlertsAPI alertsAPI; + private ArtifactAllowlistsAPI artifactAllowlistsAPI; private CatalogsAPI catalogsAPI; private CleanRoomsAPI cleanRoomsAPI; private ClusterPoliciesAPI clusterPoliciesAPI; @@ -158,7 +161,7 @@ public class WorkspaceClient { private RecipientsAPI recipientsAPI; private ReposAPI reposAPI; private SchemasAPI schemasAPI; - private SecretsAPI secretsAPI; + private SecretsExt secretsAPI; private ServicePrincipalsAPI servicePrincipalsAPI; private ServingEndpointsAPI servingEndpointsAPI; private SharesAPI sharesAPI; @@ -186,6 +189,7 @@ public WorkspaceClient(DatabricksConfig config) { accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient); alertsAPI = new AlertsAPI(apiClient); + artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); catalogsAPI = new CatalogsAPI(apiClient); cleanRoomsAPI = new CleanRoomsAPI(apiClient); clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); @@ -221,7 +225,7 @@ public WorkspaceClient(DatabricksConfig config) { recipientsAPI = new RecipientsAPI(apiClient); reposAPI = new ReposAPI(apiClient); schemasAPI = new SchemasAPI(apiClient); - secretsAPI = new SecretsAPI(apiClient); + secretsAPI = new SecretsExt(apiClient); servicePrincipalsAPI = new ServicePrincipalsAPI(apiClient); servingEndpointsAPI = new ServingEndpointsAPI(apiClient); sharesAPI = new SharesAPI(apiClient); @@ -266,6 +270,14 @@ public AlertsAPI alerts() { return alertsAPI; } + /** + * In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` + * in UC so that users can leverage these artifacts on compute configured with shared access mode. + */ + public ArtifactAllowlistsAPI artifactAllowlists() { + return artifactAllowlistsAPI; + } + /** * A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize * your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG @@ -831,7 +843,7 @@ public SchemasAPI schemas() { * While Databricks makes an effort to redact secret values that might be displayed in notebooks, * it is not possible to prevent such users from reading secrets. */ - public SecretsAPI secrets() { + public SecretsExt secrets() { return secretsAPI; } @@ -1191,6 +1203,12 @@ public WorkspaceClient withAlertsImpl(AlertsService alerts) { return this; } + /** Replace ArtifactAllowlistsAPI implementation with mock */ + public WorkspaceClient withArtifactAllowlistsImpl(ArtifactAllowlistsService artifactAllowlists) { + artifactAllowlistsAPI = new ArtifactAllowlistsAPI(artifactAllowlists); + return this; + } + /** Replace CatalogsAPI implementation with mock */ public WorkspaceClient withCatalogsImpl(CatalogsService catalogs) { catalogsAPI = new CatalogsAPI(catalogs); @@ -1404,7 +1422,7 @@ public WorkspaceClient withSchemasImpl(SchemasService schemas) { /** Replace SecretsAPI implementation with mock */ public WorkspaceClient withSecretsImpl(SecretsService secrets) { - secretsAPI = new SecretsAPI(secrets); + secretsAPI = new SecretsExt(secrets); return this; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/SecretsExt.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/SecretsExt.java new file mode 100644 index 000000000..9d986e094 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/SecretsExt.java @@ -0,0 +1,29 @@ +package com.databricks.sdk.mixin; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.service.workspace.*; +import java.util.Base64; + +/** Remote equivalent of secrets util. */ +public class SecretsExt extends SecretsAPI { + + public SecretsExt(ApiClient apiClient) { + super(apiClient); + } + + public SecretsExt(SecretsService mock) { + super(mock); + } + + /** Gets the bytes representation of a secret value for the specified scope and key. */ + public byte[] getBytes(String scope, String key) { + GetSecretResponse response = this.getSecret(scope, key); + return Base64.getDecoder().decode(response.getValue()); + } + + /** Gets the string representation of a secret value for the specified secrets scope and key. */ + public String get(String scope, String key) { + byte[] val = this.getBytes(scope, key); + return new String(val); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java new file mode 100755 index 000000000..8c146a9a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ArtifactAllowlistInfo { + /** */ + @JsonProperty("artifact_matchers") + private ArtifactMatcher artifactMatchers; + + /** Time at which this artifact allowlist was set, in epoch milliseconds. */ + @JsonProperty("created_at") + private Long createdAt; + + /** Username of the user who set the artifact allowlist. */ + @JsonProperty("created_by") + private String createdBy; + + /** Unique identifier of parent metastore. */ + @JsonProperty("metastore_id") + private String metastoreId; + + public ArtifactAllowlistInfo setArtifactMatchers(ArtifactMatcher artifactMatchers) { + this.artifactMatchers = artifactMatchers; + return this; + } + + public ArtifactMatcher getArtifactMatchers() { + return artifactMatchers; + } + + public ArtifactAllowlistInfo setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ArtifactAllowlistInfo setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ArtifactAllowlistInfo setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactAllowlistInfo that = (ArtifactAllowlistInfo) o; + return Objects.equals(artifactMatchers, that.artifactMatchers) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(artifactMatchers, createdAt, createdBy, metastoreId); + } + + @Override + public String toString() { + return new ToStringer(ArtifactAllowlistInfo.class) + .add("artifactMatchers", artifactMatchers) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java new file mode 100755 index 000000000..5cf52d62b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` + * in UC so that users can leverage these artifacts on compute configured with shared access mode. + */ +@Generated +public class ArtifactAllowlistsAPI { + private static final Logger LOG = LoggerFactory.getLogger(ArtifactAllowlistsAPI.class); + + private final ArtifactAllowlistsService impl; + + /** Regular-use constructor */ + public ArtifactAllowlistsAPI(ApiClient apiClient) { + impl = new ArtifactAllowlistsImpl(apiClient); + } + + /** Constructor for mocks */ + public ArtifactAllowlistsAPI(ArtifactAllowlistsService mock) { + impl = mock; + } + + public ArtifactAllowlistInfo get(ArtifactType artifactType) { + return get(new GetArtifactAllowlistRequest().setArtifactType(artifactType)); + } + + /** + * Get an artifact allowlist. + * + *

Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin. + */ + public ArtifactAllowlistInfo get(GetArtifactAllowlistRequest request) { + return impl.get(request); + } + + public ArtifactAllowlistInfo update(ArtifactMatcher artifactMatchers, ArtifactType artifactType) { + return update( + new SetArtifactAllowlist() + .setArtifactMatchers(artifactMatchers) + .setArtifactType(artifactType)); + } + + /** + * Set an artifact allowlist. + * + *

Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is + * replaced with the new allowlist. The caller must be a metastore admin. + */ + public ArtifactAllowlistInfo update(SetArtifactAllowlist request) { + return impl.update(request); + } + + public ArtifactAllowlistsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java new file mode 100755 index 000000000..b32cd2537 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; + +/** Package-local implementation of ArtifactAllowlists */ +@Generated +class ArtifactAllowlistsImpl implements ArtifactAllowlistsService { + private final ApiClient apiClient; + + public ArtifactAllowlistsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public ArtifactAllowlistInfo get(GetArtifactAllowlistRequest request) { + String path = + String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); + return apiClient.GET(path, request, ArtifactAllowlistInfo.class); + } + + @Override + public ArtifactAllowlistInfo update(SetArtifactAllowlist request) { + String path = + String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); + return apiClient.PUT(path, request, ArtifactAllowlistInfo.class); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java new file mode 100755 index 000000000..7b0302acf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` + * in UC so that users can leverage these artifacts on compute configured with shared access mode. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ArtifactAllowlistsService { + /** + * Get an artifact allowlist. + * + *

Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin. + */ + ArtifactAllowlistInfo get(GetArtifactAllowlistRequest getArtifactAllowlistRequest); + + /** + * Set an artifact allowlist. + * + *

Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is + * replaced with the new allowlist. The caller must be a metastore admin. + */ + ArtifactAllowlistInfo update(SetArtifactAllowlist setArtifactAllowlist); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java new file mode 100755 index 000000000..22cb8d72c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ArtifactMatcher { + /** The artifact path or maven coordinate */ + @JsonProperty("artifact") + private String artifact; + + /** The pattern matching type of the artifact */ + @JsonProperty("match_type") + private MatchType matchType; + + public ArtifactMatcher setArtifact(String artifact) { + this.artifact = artifact; + return this; + } + + public String getArtifact() { + return artifact; + } + + public ArtifactMatcher setMatchType(MatchType matchType) { + this.matchType = matchType; + return this; + } + + public MatchType getMatchType() { + return matchType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactMatcher that = (ArtifactMatcher) o; + return Objects.equals(artifact, that.artifact) && Objects.equals(matchType, that.matchType); + } + + @Override + public int hashCode() { + return Objects.hash(artifact, matchType); + } + + @Override + public String toString() { + return new ToStringer(ArtifactMatcher.class) + .add("artifact", artifact) + .add("matchType", matchType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java new file mode 100755 index 000000000..116937719 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** The artifact type */ +@Generated +public enum ArtifactType { + INIT_SCRIPT, + LIBRARY_JAR, + LIBRARY_MAVEN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index 046a74e29..6358d470f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -31,12 +31,12 @@ public class CatalogInfo { private String createdBy; /** */ - @JsonProperty("effective_auto_maintenance_flag") - private EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag; + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; - /** Whether auto maintenance should be enabled for this object and objects under it. */ - @JsonProperty("enable_auto_maintenance") - private EnableAutoMaintenance enableAutoMaintenance; + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; /** * Whether the current securable is accessible from all workspaces or a specific set of @@ -139,23 +139,24 @@ public String getCreatedBy() { return createdBy; } - public CatalogInfo setEffectiveAutoMaintenanceFlag( - EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag) { - this.effectiveAutoMaintenanceFlag = effectiveAutoMaintenanceFlag; + public CatalogInfo setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; return this; } - public EffectiveAutoMaintenanceFlag getEffectiveAutoMaintenanceFlag() { - return effectiveAutoMaintenanceFlag; + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; } - public CatalogInfo setEnableAutoMaintenance(EnableAutoMaintenance enableAutoMaintenance) { - this.enableAutoMaintenance = enableAutoMaintenance; + public CatalogInfo setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; return this; } - public EnableAutoMaintenance getEnableAutoMaintenance() { - return enableAutoMaintenance; + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; } public CatalogInfo setIsolationMode(IsolationMode isolationMode) { @@ -276,8 +277,9 @@ public boolean equals(Object o) { && Objects.equals(connectionName, that.connectionName) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) - && Objects.equals(effectiveAutoMaintenanceFlag, that.effectiveAutoMaintenanceFlag) - && Objects.equals(enableAutoMaintenance, that.enableAutoMaintenance) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) @@ -300,8 +302,8 @@ public int hashCode() { connectionName, createdAt, createdBy, - effectiveAutoMaintenanceFlag, - enableAutoMaintenance, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, isolationMode, metastoreId, name, @@ -324,8 +326,8 @@ public String toString() { .add("connectionName", connectionName) .add("createdAt", createdAt) .add("createdBy", createdBy) - .add("effectiveAutoMaintenanceFlag", effectiveAutoMaintenanceFlag) - .add("enableAutoMaintenance", enableAutoMaintenance) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("isolationMode", isolationMode) .add("metastoreId", metastoreId) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java new file mode 100755 index 000000000..bdb08b43d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class EffectivePredictiveOptimizationFlag { + /** + * The name of the object from which the flag was inherited. If there was no inheritance, this + * field is left blank. + */ + @JsonProperty("inherited_from_name") + private String inheritedFromName; + + /** + * The type of the object from which the flag was inherited. If there was no inheritance, this + * field is left blank. + */ + @JsonProperty("inherited_from_type") + private EffectivePredictiveOptimizationFlagInheritedFromType inheritedFromType; + + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("value") + private EnablePredictiveOptimization value; + + public EffectivePredictiveOptimizationFlag setInheritedFromName(String inheritedFromName) { + this.inheritedFromName = inheritedFromName; + return this; + } + + public String getInheritedFromName() { + return inheritedFromName; + } + + public EffectivePredictiveOptimizationFlag setInheritedFromType( + EffectivePredictiveOptimizationFlagInheritedFromType inheritedFromType) { + this.inheritedFromType = inheritedFromType; + return this; + } + + public EffectivePredictiveOptimizationFlagInheritedFromType getInheritedFromType() { + return inheritedFromType; + } + + public EffectivePredictiveOptimizationFlag setValue(EnablePredictiveOptimization value) { + this.value = value; + return this; + } + + public EnablePredictiveOptimization getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EffectivePredictiveOptimizationFlag that = (EffectivePredictiveOptimizationFlag) o; + return Objects.equals(inheritedFromName, that.inheritedFromName) + && Objects.equals(inheritedFromType, that.inheritedFromType) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(inheritedFromName, inheritedFromType, value); + } + + @Override + public String toString() { + return new ToStringer(EffectivePredictiveOptimizationFlag.class) + .add("inheritedFromName", inheritedFromName) + .add("inheritedFromType", inheritedFromType) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java new file mode 100755 index 000000000..b0bff3fd6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * The type of the object from which the flag was inherited. If there was no inheritance, this field + * is left blank. + */ +@Generated +public enum EffectivePredictiveOptimizationFlagInheritedFromType { + CATALOG, + SCHEMA, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java new file mode 100755 index 000000000..801f54d44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** Whether predictive optimization should be enabled for this object and objects under it. */ +@Generated +public enum EnablePredictiveOptimization { + DISABLE, + ENABLE, + INHERIT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java new file mode 100755 index 000000000..d1c96f18b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get an artifact allowlist */ +@Generated +public class GetArtifactAllowlistRequest { + /** The artifact type of the allowlist. */ + private ArtifactType artifactType; + + public GetArtifactAllowlistRequest setArtifactType(ArtifactType artifactType) { + this.artifactType = artifactType; + return this; + } + + public ArtifactType getArtifactType() { + return artifactType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetArtifactAllowlistRequest that = (GetArtifactAllowlistRequest) o; + return Objects.equals(artifactType, that.artifactType); + } + + @Override + public int hashCode() { + return Objects.hash(artifactType); + } + + @Override + public String toString() { + return new ToStringer(GetArtifactAllowlistRequest.class) + .add("artifactType", artifactType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java new file mode 100755 index 000000000..4639aa300 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** The artifact pattern matching type */ +@Generated +public enum MatchType { + PREFIX_MATCH, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java index 8d01aa841..1d010259a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -31,12 +31,12 @@ public class SchemaInfo { private String createdBy; /** */ - @JsonProperty("effective_auto_maintenance_flag") - private EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag; + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; - /** Whether auto maintenance should be enabled for this object and objects under it. */ - @JsonProperty("enable_auto_maintenance") - private EnableAutoMaintenance enableAutoMaintenance; + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; /** Full name of schema, in form of __catalog_name__.__schema_name__. */ @JsonProperty("full_name") @@ -119,23 +119,24 @@ public String getCreatedBy() { return createdBy; } - public SchemaInfo setEffectiveAutoMaintenanceFlag( - EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag) { - this.effectiveAutoMaintenanceFlag = effectiveAutoMaintenanceFlag; + public SchemaInfo setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; return this; } - public EffectiveAutoMaintenanceFlag getEffectiveAutoMaintenanceFlag() { - return effectiveAutoMaintenanceFlag; + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; } - public SchemaInfo setEnableAutoMaintenance(EnableAutoMaintenance enableAutoMaintenance) { - this.enableAutoMaintenance = enableAutoMaintenance; + public SchemaInfo setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; return this; } - public EnableAutoMaintenance getEnableAutoMaintenance() { - return enableAutoMaintenance; + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; } public SchemaInfo setFullName(String fullName) { @@ -229,8 +230,9 @@ public boolean equals(Object o) { && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) - && Objects.equals(effectiveAutoMaintenanceFlag, that.effectiveAutoMaintenanceFlag) - && Objects.equals(enableAutoMaintenance, that.enableAutoMaintenance) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(fullName, that.fullName) && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) @@ -250,8 +252,8 @@ public int hashCode() { comment, createdAt, createdBy, - effectiveAutoMaintenanceFlag, - enableAutoMaintenance, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, fullName, metastoreId, name, @@ -271,8 +273,8 @@ public String toString() { .add("comment", comment) .add("createdAt", createdAt) .add("createdBy", createdBy) - .add("effectiveAutoMaintenanceFlag", effectiveAutoMaintenanceFlag) - .add("enableAutoMaintenance", enableAutoMaintenance) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("fullName", fullName) .add("metastoreId", metastoreId) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java new file mode 100755 index 000000000..724fa4e1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SetArtifactAllowlist { + /** */ + @JsonProperty("artifact_matchers") + private ArtifactMatcher artifactMatchers; + + /** The artifact type of the allowlist. */ + private ArtifactType artifactType; + + public SetArtifactAllowlist setArtifactMatchers(ArtifactMatcher artifactMatchers) { + this.artifactMatchers = artifactMatchers; + return this; + } + + public ArtifactMatcher getArtifactMatchers() { + return artifactMatchers; + } + + public SetArtifactAllowlist setArtifactType(ArtifactType artifactType) { + this.artifactType = artifactType; + return this; + } + + public ArtifactType getArtifactType() { + return artifactType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetArtifactAllowlist that = (SetArtifactAllowlist) o; + return Objects.equals(artifactMatchers, that.artifactMatchers) + && Objects.equals(artifactType, that.artifactType); + } + + @Override + public int hashCode() { + return Objects.hash(artifactMatchers, artifactType); + } + + @Override + public String toString() { + return new ToStringer(SetArtifactAllowlist.class) + .add("artifactMatchers", artifactMatchers) + .add("artifactType", artifactType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index b28f1f07c..7c451fcc6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -55,12 +55,12 @@ public class TableInfo { private DeltaRuntimePropertiesKvPairs deltaRuntimePropertiesKvpairs; /** */ - @JsonProperty("effective_auto_maintenance_flag") - private EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag; + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; - /** Whether auto maintenance should be enabled for this object and objects under it. */ - @JsonProperty("enable_auto_maintenance") - private EnableAutoMaintenance enableAutoMaintenance; + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; /** Encryption options that apply to clients connecting to cloud storage. */ @JsonProperty("encryption_details") @@ -236,23 +236,24 @@ public DeltaRuntimePropertiesKvPairs getDeltaRuntimePropertiesKvpairs() { return deltaRuntimePropertiesKvpairs; } - public TableInfo setEffectiveAutoMaintenanceFlag( - EffectiveAutoMaintenanceFlag effectiveAutoMaintenanceFlag) { - this.effectiveAutoMaintenanceFlag = effectiveAutoMaintenanceFlag; + public TableInfo setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; return this; } - public EffectiveAutoMaintenanceFlag getEffectiveAutoMaintenanceFlag() { - return effectiveAutoMaintenanceFlag; + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; } - public TableInfo setEnableAutoMaintenance(EnableAutoMaintenance enableAutoMaintenance) { - this.enableAutoMaintenance = enableAutoMaintenance; + public TableInfo setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; return this; } - public EnableAutoMaintenance getEnableAutoMaintenance() { - return enableAutoMaintenance; + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; } public TableInfo setEncryptionDetails(EncryptionDetails encryptionDetails) { @@ -432,8 +433,9 @@ public boolean equals(Object o) { && Objects.equals(dataSourceFormat, that.dataSourceFormat) && Objects.equals(deletedAt, that.deletedAt) && Objects.equals(deltaRuntimePropertiesKvpairs, that.deltaRuntimePropertiesKvpairs) - && Objects.equals(effectiveAutoMaintenanceFlag, that.effectiveAutoMaintenanceFlag) - && Objects.equals(enableAutoMaintenance, that.enableAutoMaintenance) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(encryptionDetails, that.encryptionDetails) && Objects.equals(fullName, that.fullName) && Objects.equals(metastoreId, that.metastoreId) @@ -467,8 +469,8 @@ public int hashCode() { dataSourceFormat, deletedAt, deltaRuntimePropertiesKvpairs, - effectiveAutoMaintenanceFlag, - enableAutoMaintenance, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, encryptionDetails, fullName, metastoreId, @@ -502,8 +504,8 @@ public String toString() { .add("dataSourceFormat", dataSourceFormat) .add("deletedAt", deletedAt) .add("deltaRuntimePropertiesKvpairs", deltaRuntimePropertiesKvpairs) - .add("effectiveAutoMaintenanceFlag", effectiveAutoMaintenanceFlag) - .add("enableAutoMaintenance", enableAutoMaintenance) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("encryptionDetails", encryptionDetails) .add("fullName", fullName) .add("metastoreId", metastoreId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java new file mode 100755 index 000000000..4cbd65156 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DeleteRuns { + /** The ID of the experiment containing the runs to delete. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** + * An optional positive integer indicating the maximum number of runs to delete. The maximum + * allowed value for max_runs is 10000. + */ + @JsonProperty("max_runs") + private Long maxRuns; + + /** + * The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only + * runs created prior to or at this timestamp are deleted. + */ + @JsonProperty("max_timestamp_millis") + private Long maxTimestampMillis; + + public DeleteRuns setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public DeleteRuns setMaxRuns(Long maxRuns) { + this.maxRuns = maxRuns; + return this; + } + + public Long getMaxRuns() { + return maxRuns; + } + + public DeleteRuns setMaxTimestampMillis(Long maxTimestampMillis) { + this.maxTimestampMillis = maxTimestampMillis; + return this; + } + + public Long getMaxTimestampMillis() { + return maxTimestampMillis; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRuns that = (DeleteRuns) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(maxRuns, that.maxRuns) + && Objects.equals(maxTimestampMillis, that.maxTimestampMillis); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, maxRuns, maxTimestampMillis); + } + + @Override + public String toString() { + return new ToStringer(DeleteRuns.class) + .add("experimentId", experimentId) + .add("maxRuns", maxRuns) + .add("maxTimestampMillis", maxTimestampMillis) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java new file mode 100755 index 000000000..92aa1f824 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DeleteRunsResponse { + /** The number of runs deleted. */ + @JsonProperty("runs_deleted") + private Long runsDeleted; + + public DeleteRunsResponse setRunsDeleted(Long runsDeleted) { + this.runsDeleted = runsDeleted; + return this; + } + + public Long getRunsDeleted() { + return runsDeleted; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRunsResponse that = (DeleteRunsResponse) o; + return Objects.equals(runsDeleted, that.runsDeleted); + } + + @Override + public int hashCode() { + return Objects.hash(runsDeleted); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunsResponse.class).add("runsDeleted", runsDeleted).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 479df68b8..54c08295b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -87,6 +87,21 @@ public void deleteRun(DeleteRun request) { impl.deleteRun(request); } + public DeleteRunsResponse deleteRuns(String experimentId, long maxTimestampMillis) { + return deleteRuns( + new DeleteRuns().setExperimentId(experimentId).setMaxTimestampMillis(maxTimestampMillis)); + } + + /** + * Delete runs by creation time. + * + *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. + * Deletes at most max_runs per request. + */ + public DeleteRunsResponse deleteRuns(DeleteRuns request) { + return impl.deleteRuns(request); + } + public void deleteTag(String runId, String key) { deleteTag(new DeleteTag().setRunId(runId).setKey(key)); } @@ -355,6 +370,21 @@ public void restoreRun(RestoreRun request) { impl.restoreRun(request); } + public RestoreRunsResponse restoreRuns(String experimentId, long minTimestampMillis) { + return restoreRuns( + new RestoreRuns().setExperimentId(experimentId).setMinTimestampMillis(minTimestampMillis)); + } + + /** + * Restore runs by deletion time. + * + *

Bulk restore runs in an experiment that were deleted no earlier than the specified + * timestamp. Restores at most max_runs per request. + */ + public RestoreRunsResponse restoreRuns(RestoreRuns request) { + return impl.restoreRuns(request); + } + /** * Search experiments. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index dc5fb0f8c..1d712d0e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -37,6 +37,12 @@ public void deleteRun(DeleteRun request) { apiClient.POST(path, request, Void.class); } + @Override + public DeleteRunsResponse deleteRuns(DeleteRuns request) { + String path = "/api/2.0/mlflow/databricks/runs/delete-runs"; + return apiClient.POST(path, request, DeleteRunsResponse.class); + } + @Override public void deleteTag(DeleteTag request) { String path = "/api/2.0/mlflow/runs/delete-tag"; @@ -136,6 +142,12 @@ public void restoreRun(RestoreRun request) { apiClient.POST(path, request, Void.class); } + @Override + public RestoreRunsResponse restoreRuns(RestoreRuns request) { + String path = "/api/2.0/mlflow/databricks/runs/restore-runs"; + return apiClient.POST(path, request, RestoreRunsResponse.class); + } + @Override public SearchExperimentsResponse searchExperiments(SearchExperiments request) { String path = "/api/2.0/mlflow/experiments/search"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 9d3a983e8..3d28056e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -53,6 +53,14 @@ public interface ExperimentsService { */ void deleteRun(DeleteRun deleteRun); + /** + * Delete runs by creation time. + * + *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. + * Deletes at most max_runs per request. + */ + DeleteRunsResponse deleteRuns(DeleteRuns deleteRuns); + /** * Delete a tag. * @@ -224,6 +232,14 @@ ExperimentPermissions getExperimentPermissions( */ void restoreRun(RestoreRun restoreRun); + /** + * Restore runs by deletion time. + * + *

Bulk restore runs in an experiment that were deleted no earlier than the specified + * timestamp. Restores at most max_runs per request. + */ + RestoreRunsResponse restoreRuns(RestoreRuns restoreRuns); + /** * Search experiments. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java new file mode 100755 index 000000000..0ce329136 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RestoreRuns { + /** The ID of the experiment containing the runs to restore. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** + * An optional positive integer indicating the maximum number of runs to restore. The maximum + * allowed value for max_runs is 10000. + */ + @JsonProperty("max_runs") + private Long maxRuns; + + /** + * The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only + * runs deleted no earlier than this timestamp are restored. + */ + @JsonProperty("min_timestamp_millis") + private Long minTimestampMillis; + + public RestoreRuns setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public RestoreRuns setMaxRuns(Long maxRuns) { + this.maxRuns = maxRuns; + return this; + } + + public Long getMaxRuns() { + return maxRuns; + } + + public RestoreRuns setMinTimestampMillis(Long minTimestampMillis) { + this.minTimestampMillis = minTimestampMillis; + return this; + } + + public Long getMinTimestampMillis() { + return minTimestampMillis; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreRuns that = (RestoreRuns) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(maxRuns, that.maxRuns) + && Objects.equals(minTimestampMillis, that.minTimestampMillis); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, maxRuns, minTimestampMillis); + } + + @Override + public String toString() { + return new ToStringer(RestoreRuns.class) + .add("experimentId", experimentId) + .add("maxRuns", maxRuns) + .add("minTimestampMillis", minTimestampMillis) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java new file mode 100755 index 000000000..45b2a692f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RestoreRunsResponse { + /** The number of runs restored. */ + @JsonProperty("runs_restored") + private Long runsRestored; + + public RestoreRunsResponse setRunsRestored(Long runsRestored) { + this.runsRestored = runsRestored; + return this; + } + + public Long getRunsRestored() { + return runsRestored; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreRunsResponse that = (RestoreRunsResponse) o; + return Objects.equals(runsRestored, that.runsRestored); + } + + @Override + public int hashCode() { + return Objects.hash(runsRestored); + } + + @Override + public String toString() { + return new ToStringer(RestoreRunsResponse.class).add("runsRestored", runsRestored).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 8e0f647a1..3b9d4cbb0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Name of the channel */ @Generated public enum ChannelName { CHANNEL_NAME_CURRENT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java new file mode 100755 index 000000000..9eedfcfac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get a secret */ +@Generated +public class GetSecretRequest { + /** The key to fetch secret for. */ + @QueryParam("key") + private String key; + + /** The name of the scope to fetch secret information from. */ + @QueryParam("scope") + private String scope; + + public GetSecretRequest setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public GetSecretRequest setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretRequest that = (GetSecretRequest) o; + return Objects.equals(key, that.key) && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(key, scope); + } + + @Override + public String toString() { + return new ToStringer(GetSecretRequest.class).add("key", key).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java new file mode 100755 index 000000000..e763f9ea6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetSecretResponse { + /** A unique name to identify the secret. */ + @JsonProperty("key") + private String key; + + /** The value of the secret in its byte representation. */ + @JsonProperty("value") + private String value; + + public GetSecretResponse setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public GetSecretResponse setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretResponse that = (GetSecretResponse) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(GetSecretResponse.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java index d2c4ca1ac..5786ad7e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java @@ -114,6 +114,27 @@ public AclItem getAcl(GetAclRequest request) { return impl.getAcl(request); } + public GetSecretResponse getSecret(String scope, String key) { + return getSecret(new GetSecretRequest().setScope(scope).setKey(key)); + } + + /** + * Get a secret. + * + *

Gets the bytes representation of a secret value for the specified scope and key. + * + *

Users need the READ permission to make this call. + * + *

Note that the secret value returned is in bytes. The interpretation of the bytes is + * determined by the caller in DBUtils and the type the data is decoded into. + * + *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + */ + public GetSecretResponse getSecret(GetSecretRequest request) { + return impl.getSecret(request); + } + public Iterable listAcls(String scope) { return listAcls(new ListAclsRequest().setScope(scope)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java index c4e6a8bcf..b4ea8b377 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java @@ -43,6 +43,12 @@ public AclItem getAcl(GetAclRequest request) { return apiClient.GET(path, request, AclItem.class); } + @Override + public GetSecretResponse getSecret(GetSecretRequest request) { + String path = "/api/2.0/secrets/get"; + return apiClient.GET(path, request, GetSecretResponse.class); + } + @Override public ListAclsResponse listAcls(ListAclsRequest request) { String path = "/api/2.0/secrets/acls/list"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java index 3f67acaf7..397dc40dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java @@ -71,6 +71,21 @@ public interface SecretsService { */ AclItem getAcl(GetAclRequest getAclRequest); + /** + * Get a secret. + * + *

Gets the bytes representation of a secret value for the specified scope and key. + * + *

Users need the READ permission to make this call. + * + *

Note that the secret value returned is in bytes. The interpretation of the bytes is + * determined by the caller in DBUtils and the type the data is decoded into. + * + *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + */ + GetSecretResponse getSecret(GetSecretRequest getSecretRequest); + /** * Lists ACLs. * diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/SecretsIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/SecretsIT.java new file mode 100644 index 000000000..096d5cd72 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/SecretsIT.java @@ -0,0 +1,93 @@ +package com.databricks.sdk.integration; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.integration.framework.EnvContext; +import com.databricks.sdk.integration.framework.EnvTest; +import com.databricks.sdk.mixin.SecretsExt; +import com.databricks.sdk.service.workspace.PutSecret; +import com.databricks.sdk.service.workspace.SecretMetadata; +import com.databricks.sdk.service.workspace.SecretScope; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +@EnvContext("workspace") +@ExtendWith(EnvTest.class) +public class SecretsIT { + @Test + void createAndGetSecretsTest(WorkspaceClient w) { + SecretsExt secretsExt = w.secrets(); + int randomSuffix = (int) (Math.random() * 1000); + String scope = "testScope-" + randomSuffix; + String key = "testKey-" + randomSuffix; + String value = "testValue-" + randomSuffix; + + try (SecretsTestResource ignored = SecretsTestResource.makeScope(secretsExt, scope)) { + + try (SecretsTestResource ignored1 = + SecretsTestResource.makeSecret(secretsExt, scope, key, value)) { + + Iterable response = secretsExt.listSecrets(scope); + boolean foundSecret = false; + for (SecretMetadata s : response) { + if (s.getKey().equals(key)) { + foundSecret = true; + break; + } + } + + assertTrue(foundSecret); + + // TODO: Uncomment once secrets.get is enabled + // String responseValue = secretResource.secretsExt.get(scope, key); + // assertEquals(value, responseValue); + } + } + } + + @Test + void createAndListScopesTest(WorkspaceClient w) { + SecretsExt secretsExt = w.secrets(); + int randomSuffix = (int) (Math.random() * 1000); + String scope = "testScope-" + randomSuffix; + + try (SecretsTestResource ignored = SecretsTestResource.makeScope(secretsExt, scope)) { + + Iterable response = secretsExt.listScopes(); + boolean foundScope = false; + for (SecretScope s : response) { + if (s.getName().equals(scope)) { + foundScope = true; + break; + } + } + + assertTrue(foundScope); + } + } + + private static class SecretsTestResource implements AutoCloseable { + private final Runnable cleanup; + + private SecretsTestResource(Runnable cleanup) { + this.cleanup = cleanup; + } + + public static SecretsTestResource makeSecret( + SecretsExt secretsExt, String scope, String key, String value) { + secretsExt.putSecret(new PutSecret().setScope(scope).setKey(key).setStringValue(value)); + return new SecretsTestResource(() -> secretsExt.deleteSecret(scope, key)); + } + + public static SecretsTestResource makeScope(SecretsExt secretsExt, String scope) { + secretsExt.createScope(scope); + return new SecretsTestResource(() -> secretsExt.deleteScope(scope)); + } + + @Override + public void close() { + cleanup.run(); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/SecretsExtTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/SecretsExtTest.java new file mode 100644 index 000000000..9a924de0b --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/SecretsExtTest.java @@ -0,0 +1,61 @@ +package com.databricks.sdk.mixin; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.databricks.sdk.service.workspace.*; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; + +public class SecretsExtTest { + @Mock SecretsService mockedSecretsService; + + private AutoCloseable closeable; + + @BeforeEach + void setup() { + closeable = MockitoAnnotations.openMocks(this); + } + + @AfterEach + void tearDown() throws Exception { + if (closeable != null) { + closeable.close(); + } + } + + @Test + void getBytesTest() { + SecretsExt mockedSecretExt = new SecretsExt(mockedSecretsService); + String encodedValue = + Base64.getEncoder() + .encodeToString("testValueBase64Decoded".getBytes(StandardCharsets.UTF_8)); + + Mockito.doReturn(new GetSecretResponse().setKey("testKey").setValue(encodedValue)) + .when(mockedSecretsService) + .getSecret(new GetSecretRequest().setScope("abc").setKey("xyz")); + + byte[] response = mockedSecretExt.getBytes("abc", "xyz"); + assertEquals("testValueBase64Decoded", new String(response, StandardCharsets.UTF_8)); + } + + @Test + void getTest() { + SecretsExt mockedSecretExt = new SecretsExt(mockedSecretsService); + String encodedValue = + Base64.getEncoder() + .encodeToString("testValueBase64Decoded".getBytes(StandardCharsets.UTF_8)); + + Mockito.doReturn(new GetSecretResponse().setKey("testKey").setValue(encodedValue)) + .when(mockedSecretsService) + .getSecret(new GetSecretRequest().setScope("abc").setKey("xyz")); + + String response = mockedSecretExt.get("abc", "xyz"); + assertEquals("testValueBase64Decoded", response); + } +}