Skip to content

Commit

Permalink
add databricks_serving_endpoints data source
Browse files Browse the repository at this point in the history
  • Loading branch information
nkvuong authored and alexott committed Nov 15, 2024
1 parent 6e7ca4c commit 83815d6
Show file tree
Hide file tree
Showing 4 changed files with 146 additions and 8 deletions.
55 changes: 55 additions & 0 deletions docs/data-sources/serving_endpoints.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
---
subcategory: "Serving"
---
# databricks_serving_endpoints Data Source

-> This resource can only be used with a workspace-level provider!

This resource allows you to get information about [Model Serving](https://docs.databricks.com/machine-learning/model-serving/index.html) endpoints in Databricks.

## Example Usage

```hcl
data "databricks_serving_endpoints" "all" {
}
resource "databricks_permissions" "ml_serving_usage" {
for_each = databricks_serving_endpoints.all.endpoints
serving_endpoint_id = each.value.id
access_control {
group_name = "users"
permission_level = "CAN_VIEW"
}
access_control {
group_name = databricks_group.auto.display_name
permission_level = "CAN_MANAGE"
}
access_control {
group_name = databricks_group.eng.display_name
permission_level = "CAN_QUERY"
}
}
```

## Attribute Reference

The following attributes are exported:

* `endpoints` - List of objects describing the serving endpoints. Each object consists of following attributes:
* `name` - The name of the model serving endpoint.
* `config` - The model serving endpoint configuration.
* `tags` - Tags to be attached to the serving endpoint and automatically propagated to billing logs.
* `rate_limits` - A list of rate limit blocks to be applied to the serving endpoint.
* `ai_gateway` - A block with AI Gateway configuration for the serving endpoint.
* `route_optimized` - A boolean enabling route optimization for the endpoint.

See [`databricks_model_serving` resource](../resources/model_serving.md) for the full list of attributes for each block

## Related Resources

The following resources are often used in the same context:

* [databricks_permissions](../resources/permissions.md#model-serving-usage) can control which groups or individual users can *Manage*, *Query* or *View* individual serving endpoints.
3 changes: 3 additions & 0 deletions internal/acceptance/model_serving_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ func TestAccModelServing(t *testing.T) {
}
}
data "databricks_serving_endpoints" "all" {}
resource "databricks_permissions" "ml_serving_usage" {
serving_endpoint_id = databricks_model_serving.endpoint.serving_endpoint_id
Expand Down Expand Up @@ -77,6 +79,7 @@ func TestAccModelServing(t *testing.T) {
}
}
}
data "databricks_serving_endpoints" "all" {}
`, name),
},
)
Expand Down
18 changes: 10 additions & 8 deletions internal/providers/pluginfw/pluginfw_rollout_utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,15 @@ import (
"slices"
"strings"

"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/catalog"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/notificationdestinations"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/sharing"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/volume"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/catalog"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/serving"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/sharing"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/resource"
)
Expand All @@ -43,6 +44,7 @@ var pluginFwOnlyResources = []func() resource.Resource{

// List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2.
var pluginFwOnlyDataSources = []func() datasource.DataSource{
serving.DataSourceServingEndpoints,
registered_model.DataSourceRegisteredModel,
notificationdestinations.DataSourceNotificationDestinations,
catalog.DataSourceFunctions,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package serving

import (
"context"

"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/terraform-provider-databricks/common"
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
"github.com/databricks/terraform-provider-databricks/internal/service/serving_tf"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
)

func DataSourceServingEndpoints() datasource.DataSource {
return &ServingEndpointsDataSource{}
}

var _ datasource.DataSourceWithConfigure = &ServingEndpointsDataSource{}

type ServingEndpointsDataSource struct {
Client *common.DatabricksClient
}

type ServingEndpointsData struct {
Endpoints []serving_tf.ServingEndpoint `tfsdk:"endpoints" tf:"optional,computed"`
}

func (d *ServingEndpointsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = "databricks_serving_endpoints"
}

func (d *ServingEndpointsDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
attrs, blocks := tfschema.DataSourceStructToSchemaMap(ServingEndpointsData{}, nil)
resp.Schema = schema.Schema{
Attributes: attrs,
Blocks: blocks,
}
}

func (d *ServingEndpointsDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
if d.Client == nil {
d.Client = pluginfwcommon.ConfigureDataSource(req, resp)
}
}

func (d *ServingEndpointsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
w, diags := d.Client.GetWorkspaceClient()
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}

var endpoints ServingEndpointsData
diags = req.Config.Get(ctx, &endpoints)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
endpointsInfoSdk, err := w.ServingEndpoints.ListAll(ctx)
if err != nil {
if apierr.IsMissing(err) {
resp.State.RemoveResource(ctx)
}
resp.Diagnostics.AddError("failed to list endpoints", err.Error())
return
}
for _, endpoint := range endpointsInfoSdk {
var endpointsInfo serving_tf.ServingEndpoint
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, endpoint, &endpointsInfo)...)
if resp.Diagnostics.HasError() {
return
}
endpoints.Endpoints = append(endpoints.Endpoints, endpointsInfo)
}
resp.Diagnostics.Append(resp.State.Set(ctx, endpoints)...)
}

0 comments on commit 83815d6

Please sign in to comment.