From 0d6422b924bb9fe6eebb56d2ff915b9ff2784f94 Mon Sep 17 00:00:00 2001 From: Adrian Nackov Date: Tue, 3 Dec 2024 07:43:41 +0000 Subject: [PATCH] ref 635294: server update schedules Signed-off-by: Adrian Nackov --- docs/data-sources/resourcemanager_project.md | 12 +- docs/data-sources/server_update_schedule.md | 41 ++ docs/data-sources/server_update_schedules.md | 47 ++ docs/index.md | 1 + docs/resources/resourcemanager_project.md | 11 + docs/resources/server_update_schedule.md | 44 ++ .../data-source.tf | 5 + .../data-source.tf | 4 + .../resource.tf | 8 + go.mod | 5 +- go.sum | 6 + stackit/internal/core/core.go | 1 + .../serverupdate/schedule/resource.go | 452 ++++++++++++++++++ .../serverupdate/schedule/resource_test.go | 223 +++++++++ .../schedule/schedule_datasource.go | 182 +++++++ .../schedule/schedules_datasource.go | 233 +++++++++ .../schedule/schedules_datasource_test.go | 92 ++++ .../serverupdate/serverupdate_acc_test.go | 231 +++++++++ stackit/internal/testutil/testutil.go | 16 + stackit/provider.go | 10 + 20 files changed, 1622 insertions(+), 2 deletions(-) create mode 100644 docs/data-sources/server_update_schedule.md create mode 100644 docs/data-sources/server_update_schedules.md create mode 100644 docs/resources/server_update_schedule.md create mode 100644 examples/data-sources/stackit_server_update_schedule/data-source.tf create mode 100644 examples/data-sources/stackit_server_update_schedules/data-source.tf create mode 100644 examples/resources/stackit_server_update_schedule/resource.tf create mode 100644 stackit/internal/services/serverupdate/schedule/resource.go create mode 100644 stackit/internal/services/serverupdate/schedule/resource_test.go create mode 100644 stackit/internal/services/serverupdate/schedule/schedule_datasource.go create mode 100644 stackit/internal/services/serverupdate/schedule/schedules_datasource.go create mode 100644 stackit/internal/services/serverupdate/schedule/schedules_datasource_test.go create mode 100644 stackit/internal/services/serverupdate/serverupdate_acc_test.go diff --git a/docs/data-sources/resourcemanager_project.md b/docs/data-sources/resourcemanager_project.md index b2cd5e88..8e28332a 100644 --- a/docs/data-sources/resourcemanager_project.md +++ b/docs/data-sources/resourcemanager_project.md @@ -20,7 +20,6 @@ data "stackit_resourcemanager_project" "example" { ``` - ## Schema ### Optional @@ -33,5 +32,16 @@ data "stackit_resourcemanager_project" "example" { - `id` (String) Terraform's internal data source. ID. It is structured as "`container_id`". - `labels` (Map of String) Labels are key-value string pairs which can be attached to a resource container. A label key must match the regex [A-ZÄÜÖa-zäüöß0-9_-]{1,64}. A label value must match the regex ^$|[A-ZÄÜÖa-zäüöß0-9_-]{1,64} +- `members` (Attributes List, Deprecated) The members assigned to the project. At least one subject needs to be a user, and not a client or service account. This value is only considered during creation. Changing it afterwards will have no effect. + +!> The "members" field has been deprecated in favor of the "owner_email" field. Please use the "owner_email" field to assign the owner role to a user. (see [below for nested schema](#nestedatt--members)) - `name` (String) Project name. - `parent_container_id` (String) Parent resource identifier. Both container ID (user-friendly) and UUID are supported + + +### Nested Schema for `members` + +Read-Only: + +- `role` (String) The role of the member in the project. Legacy roles (`project.admin`, `project.auditor`, `project.member`, `project.owner`) are not supported. +- `subject` (String) Unique identifier of the user, service account or client. This is usually the email address for users or service accounts, and the name in case of clients. diff --git a/docs/data-sources/server_update_schedule.md b/docs/data-sources/server_update_schedule.md new file mode 100644 index 00000000..00b678db --- /dev/null +++ b/docs/data-sources/server_update_schedule.md @@ -0,0 +1,41 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackit_server_update_schedule Data Source - stackit" +subcategory: "" +description: |- + Server update schedule datasource schema. Must have a region specified in the provider configuration. + ~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources. +--- + +# stackit_server_update_schedule (Data Source) + +Server update schedule datasource schema. Must have a `region` specified in the provider configuration. + +~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources. + +## Example Usage + +```terraform +data "stackit_server_update_schedule" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + update_schedule_id = xxxxx +} +``` + + +## Schema + +### Required + +- `project_id` (String) STACKIT Project ID to which the server is associated. +- `server_id` (String) Server ID for the update schedule. +- `update_schedule_id` (Number) Update schedule ID. + +### Read-Only + +- `enabled` (Boolean) Is the update schedule enabled or disabled. +- `id` (String) Terraform's internal resource identifier. It is structured as "`project_id`,`server_id`,`update_schedule_id`". +- `maintenance_window` (Number) Maintenance window [1..24]. +- `name` (String) The schedule name. +- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format. diff --git a/docs/data-sources/server_update_schedules.md b/docs/data-sources/server_update_schedules.md new file mode 100644 index 00000000..be485026 --- /dev/null +++ b/docs/data-sources/server_update_schedules.md @@ -0,0 +1,47 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackit_server_update_schedules Data Source - stackit" +subcategory: "" +description: |- + Server update schedules datasource schema. Must have a region specified in the provider configuration. + ~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources. +--- + +# stackit_server_update_schedules (Data Source) + +Server update schedules datasource schema. Must have a `region` specified in the provider configuration. + +~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources. + +## Example Usage + +```terraform +data "stackit_server_update_schedules" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +} +``` + + +## Schema + +### Required + +- `project_id` (String) STACKIT Project ID (UUID) to which the server is associated. +- `server_id` (String) Server ID (UUID) to which the update schedule is associated. + +### Read-Only + +- `id` (String) Terraform's internal data source identifier. It is structured as "`project_id`,`server_id`". +- `items` (Attributes List) (see [below for nested schema](#nestedatt--items)) + + +### Nested Schema for `items` + +Read-Only: + +- `enabled` (Boolean) Is the update schedule enabled or disabled. +- `maintenance_window` (Number) Maintenance window [1..24]. +- `name` (String) The update schedule name. +- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format. +- `update_schedule_id` (Number) diff --git a/docs/index.md b/docs/index.md index 25d496a6..46b23106 100644 --- a/docs/index.md +++ b/docs/index.md @@ -173,6 +173,7 @@ Note: AWS specific checks must be skipped as they do not work on STACKIT. For de - `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service - `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service - `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service +- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service - `service_account_email` (String) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource. - `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations. - `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations. diff --git a/docs/resources/resourcemanager_project.md b/docs/resources/resourcemanager_project.md index b64b4e5f..bfb8b261 100644 --- a/docs/resources/resourcemanager_project.md +++ b/docs/resources/resourcemanager_project.md @@ -35,9 +35,20 @@ resource "stackit_resourcemanager_project" "example" { ### Optional - `labels` (Map of String) Labels are key-value string pairs which can be attached to a resource container. A label key must match the regex [A-ZÄÜÖa-zäüöß0-9_-]{1,64}. A label value must match the regex ^$|[A-ZÄÜÖa-zäüöß0-9_-]{1,64} +- `members` (Attributes List, Deprecated) The members assigned to the project. At least one subject needs to be a user, and not a client or service account. This value is only considered during creation. Changing it afterwards will have no effect. + +!> The "members" field has been deprecated in favor of the "owner_email" field. Please use the "owner_email" field to assign the owner role to a user. (see [below for nested schema](#nestedatt--members)) ### Read-Only - `container_id` (String) Project container ID. Globally unique, user-friendly identifier. - `id` (String) Terraform's internal resource ID. It is structured as "`container_id`". - `project_id` (String) Project UUID identifier. This is the ID that can be used in most of the other resources to identify the project. + + +### Nested Schema for `members` + +Required: + +- `role` (String) The role of the member in the project. Possible values include, but are not limited to: `owner`, `editor`, `reader`. Legacy roles (`project.admin`, `project.auditor`, `project.member`, `project.owner`) are not supported. +- `subject` (String) Unique identifier of the user, service account or client. This is usually the email address for users or service accounts, and the name in case of clients. diff --git a/docs/resources/server_update_schedule.md b/docs/resources/server_update_schedule.md new file mode 100644 index 00000000..e824ab01 --- /dev/null +++ b/docs/resources/server_update_schedule.md @@ -0,0 +1,44 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackit_server_update_schedule Resource - stackit" +subcategory: "" +description: |- + Server update schedule resource schema. Must have a region specified in the provider configuration. + ~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources. +--- + +# stackit_server_update_schedule (Resource) + +Server update schedule resource schema. Must have a `region` specified in the provider configuration. + +~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources. + +## Example Usage + +```terraform +resource "stackit_server_update_schedule" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example_update_schedule_name" + rrule = "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1" + enabled = true + maintenance_window = 1 +} +``` + + +## Schema + +### Required + +- `enabled` (Boolean) Is the update schedule enabled or disabled. +- `maintenance_window` (Number) Maintenance window [1..24]. +- `name` (String) The schedule name. +- `project_id` (String) STACKIT Project ID to which the server is associated. +- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format. +- `server_id` (String) Server ID for the update schedule. + +### Read-Only + +- `id` (String) Terraform's internal resource identifier. It is structured as "`project_id`,`server_id`,`update_schedule_id`". +- `update_schedule_id` (Number) Update schedule ID. diff --git a/examples/data-sources/stackit_server_update_schedule/data-source.tf b/examples/data-sources/stackit_server_update_schedule/data-source.tf new file mode 100644 index 00000000..694762a7 --- /dev/null +++ b/examples/data-sources/stackit_server_update_schedule/data-source.tf @@ -0,0 +1,5 @@ +data "stackit_server_update_schedule" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + update_schedule_id = xxxxx +} diff --git a/examples/data-sources/stackit_server_update_schedules/data-source.tf b/examples/data-sources/stackit_server_update_schedules/data-source.tf new file mode 100644 index 00000000..1d291643 --- /dev/null +++ b/examples/data-sources/stackit_server_update_schedules/data-source.tf @@ -0,0 +1,4 @@ +data "stackit_server_update_schedules" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +} diff --git a/examples/resources/stackit_server_update_schedule/resource.tf b/examples/resources/stackit_server_update_schedule/resource.tf new file mode 100644 index 00000000..05731ecf --- /dev/null +++ b/examples/resources/stackit_server_update_schedule/resource.tf @@ -0,0 +1,8 @@ +resource "stackit_server_update_schedule" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example_update_schedule_name" + rrule = "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1" + enabled = true + maintenance_window = 1 +} diff --git a/go.mod b/go.mod index c093e40e..933204b8 100644 --- a/go.mod +++ b/go.mod @@ -36,7 +36,10 @@ require ( golang.org/x/mod v0.22.0 ) -require github.com/hashicorp/go-retryablehttp v0.7.7 // indirect +require ( + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2 // indirect +) require ( github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect diff --git a/go.sum b/go.sum index e04d3c56..6914449b 100644 --- a/go.sum +++ b/go.sum @@ -185,6 +185,12 @@ github.com/stackitcloud/stackit-sdk-go/services/secretsmanager v0.10.0 h1:HrnEgR github.com/stackitcloud/stackit-sdk-go/services/secretsmanager v0.10.0/go.mod h1:268uoY2gKCa5xcDL169TGVjLUNTcZ2En77YdfYOcR1w= github.com/stackitcloud/stackit-sdk-go/services/serverbackup v0.3.0 h1:Tlps8vBQmQ1mx2YFbzOzMIyWtXGJy7X3N9Qk3qk88Cc= github.com/stackitcloud/stackit-sdk-go/services/serverbackup v0.3.0/go.mod h1:+807U5ZLXns+CEbyIg483wNEwV10vaN6GjMnSZhw/64= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.1.0 h1:jTGCE71TX/Hafp6FqM/wIiHcTWreAktnmqEi4g97SGk= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.1.0/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.1 h1:fzYC2JifJUJ37mAKjGOEbNFjgK+DQRwjkoJ7NDXaQvQ= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.1/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2 h1:HXZ/hjO3shFPLrrmSb1G9eXDCSYrMeYveklm6rEUd0Y= +github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc= github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.3.0 h1:6IZBX9fyza9Eln3FHGHquvLNXQslk+dtkQp41G9+7+Y= github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.3.0/go.mod h1:zyg0hpiNdZLRbelkJb2KDf9OHQKLqqcTpePQ1qHL5dE= github.com/stackitcloud/stackit-sdk-go/services/ske v0.20.0 h1:ssEywzCS8IdRtzyxweLUKBG5GFbgwjNWJh++wGqigJM= diff --git a/stackit/internal/core/core.go b/stackit/internal/core/core.go index ecdd3361..b0e9ec2a 100644 --- a/stackit/internal/core/core.go +++ b/stackit/internal/core/core.go @@ -35,6 +35,7 @@ type ProviderData struct { SecretsManagerCustomEndpoint string SQLServerFlexCustomEndpoint string ServerBackupCustomEndpoint string + ServerUpdateCustomEndpoint string SKECustomEndpoint string ServiceEnablementCustomEndpoint string EnableBetaResources bool diff --git a/stackit/internal/services/serverupdate/schedule/resource.go b/stackit/internal/services/serverupdate/schedule/resource.go new file mode 100644 index 00000000..246cfd42 --- /dev/null +++ b/stackit/internal/services/serverupdate/schedule/resource.go @@ -0,0 +1,452 @@ +package schedule + +import ( + "context" + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/conversion" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate" + + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" +) + +// resourceBetaCheckDone is used to prevent multiple checks for beta resources. +// This is a workaround for the lack of a global state in the provider and +// needs to exist because the Configure method is called twice. +var resourceBetaCheckDone bool + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &scheduleResource{} + _ resource.ResourceWithConfigure = &scheduleResource{} + _ resource.ResourceWithImportState = &scheduleResource{} +) + +type Model struct { + ID types.String `tfsdk:"id"` + ProjectId types.String `tfsdk:"project_id"` + ServerId types.String `tfsdk:"server_id"` + UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"` + Name types.String `tfsdk:"name"` + Rrule types.String `tfsdk:"rrule"` + Enabled types.Bool `tfsdk:"enabled"` + MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"` +} + +// NewScheduleResource is a helper function to simplify the provider implementation. +func NewScheduleResource() resource.Resource { + return &scheduleResource{} +} + +// scheduleResource is the resource implementation. +type scheduleResource struct { + client *serverupdate.APIClient +} + +// Metadata returns the resource type name. +func (r *scheduleResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_server_update_schedule" +} + +// Configure adds the provider configured client to the resource. +func (r *scheduleResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + providerData, ok := req.ProviderData.(core.ProviderData) + if !ok { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData)) + return + } + + if !resourceBetaCheckDone { + features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "resource") + if resp.Diagnostics.HasError() { + return + } + resourceBetaCheckDone = true + } + + var apiClient *serverupdate.APIClient + var err error + if providerData.ServerUpdateCustomEndpoint != "" { + ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint) + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithEndpoint(providerData.ServerUpdateCustomEndpoint), + ) + } else { + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithRegion(providerData.Region), + ) + } + + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err)) + return + } + + r.client = apiClient + tflog.Info(ctx, "Server update client configured.") +} + +// Schema defines the schema for the resource. +func (r *scheduleResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Server update schedule resource schema. Must have a `region` specified in the provider configuration.", + MarkdownDescription: features.AddBetaDescription("Server update schedule resource schema. Must have a `region` specified in the provider configuration."), + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{ + Description: "The schedule name.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + stringvalidator.LengthBetween(1, 255), + }, + }, + "update_schedule_id": schema.Int64Attribute{ + Description: "Update schedule ID.", + Computed: true, + PlanModifiers: []planmodifier.Int64{ + int64planmodifier.UseStateForUnknown(), + }, + Validators: []validator.Int64{ + int64validator.AtLeast(1), + }, + }, + "project_id": schema.StringAttribute{ + Description: "STACKIT Project ID to which the server is associated.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "server_id": schema.StringAttribute{ + Description: "Server ID for the update schedule.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "rrule": schema.StringAttribute{ + Description: "Update schedule described in `rrule` (recurrence rule) format.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + validate.Rrule(), + validate.NoSeparator(), + }, + }, + "enabled": schema.BoolAttribute{ + Description: "Is the update schedule enabled or disabled.", + Required: true, + }, + "maintenance_window": schema.Int64Attribute{ + Description: "Maintenance window [1..24].", + Required: true, + Validators: []validator.Int64{ + int64validator.AtLeast(1), + int64validator.AtMost(24), + }, + }, + }, + } +} + +// Create creates the resource and sets the initial Terraform state. +func (r *scheduleResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.Plan.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + + // Enable updates if not already enabled + err := enableUpdatesService(ctx, &model, r.client) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Enabling server update project before creation: %v", err)) + return + } + + // Create new schedule + payload, err := toCreatePayload(&model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Creating API payload: %v", err)) + return + } + scheduleResp, err := r.client.CreateUpdateSchedule(ctx, projectId, serverId).CreateUpdateSchedulePayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Calling API: %v", err)) + return + } + ctx = tflog.SetField(ctx, "update_schedule_id", *scheduleResp.Id) + + // Map response body to schema + err = mapFields(scheduleResp, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Processing API payload: %v", err)) + return + } + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Server update schedule created.") +} + +// Read refreshes the Terraform state with the latest data. +func (r *scheduleResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + updateScheduleId := model.UpdateScheduleId.ValueInt64() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId) + + scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Calling API: %v", err)) + return + } + + // Map response body to schema + err = mapFields(scheduleResp, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Processing API payload: %v", err)) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Server update schedule read.") +} + +// Update updates the resource and sets the updated Terraform state on success. +func (r *scheduleResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.Plan.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + updateScheduleId := model.UpdateScheduleId.ValueInt64() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId) + + // Update schedule + payload, err := toUpdatePayload(&model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Creating API payload: %v", err)) + return + } + + scheduleResp, err := r.client.UpdateUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).UpdateUpdateSchedulePayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Calling API: %v", err)) + return + } + + // Map response body to schema + err = mapFields(scheduleResp, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Processing API payload: %v", err)) + return + } + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Server update schedule updated.") +} + +// Delete deletes the resource and removes the Terraform state on success. +func (r *scheduleResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + updateScheduleId := model.UpdateScheduleId.ValueInt64() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId) + + err := r.client.DeleteUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting server update schedule", fmt.Sprintf("Calling API: %v", err)) + return + } + tflog.Info(ctx, "Server update schedule deleted.") +} + +// ImportState imports a resource into the Terraform state on success. +// The expected format of the resource import identifier is: // project_id,server_id,schedule_id +func (r *scheduleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idParts := strings.Split(req.ID, core.Separator) + if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { + core.LogAndAddError(ctx, &resp.Diagnostics, + "Error importing server update schedule", + fmt.Sprintf("Expected import identifier with format [project_id],[server_id],[update_schedule_id], got %q", req.ID), + ) + return + } + + intId, err := strconv.ParseInt(idParts[2], 10, 64) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, + "Error importing server update schedule", + fmt.Sprintf("Expected update_schedule_id to be int64, got %q", idParts[2]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("server_id"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("update_schedule_id"), intId)...) + tflog.Info(ctx, "Server update schedule state imported.") +} + +func mapFields(schedule *serverupdate.UpdateSchedule, model *Model) error { + if schedule == nil { + return fmt.Errorf("response input is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + if schedule.Id == nil { + return fmt.Errorf("response id is nil") + } + + model.UpdateScheduleId = types.Int64PointerValue(schedule.Id) + idParts := []string{ + model.ProjectId.ValueString(), + model.ServerId.ValueString(), + strconv.FormatInt(model.UpdateScheduleId.ValueInt64(), 10), + } + model.ID = types.StringValue( + strings.Join(idParts, core.Separator), + ) + model.Name = types.StringPointerValue(schedule.Name) + model.Rrule = types.StringPointerValue(schedule.Rrule) + model.Enabled = types.BoolPointerValue(schedule.Enabled) + model.MaintenanceWindow = types.Int64PointerValue(schedule.MaintenanceWindow) + return nil +} + +// If already enabled, just continues +func enableUpdatesService(ctx context.Context, model *Model, client *serverupdate.APIClient) error { + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + enableServicePayload := serverupdate.EnableServicePayload{} + + tflog.Debug(ctx, "Enabling server update service") + err := client.EnableService(ctx, projectId, serverId).EnableServicePayload(enableServicePayload).Execute() + if err != nil { + if strings.Contains(err.Error(), "Tried to activate already active service") { + tflog.Debug(ctx, "Service for server update already enabled") + return nil + } + return fmt.Errorf("enable server update service: %w", err) + } + tflog.Info(ctx, "Enabled server update service") + return nil +} + +func toCreatePayload(model *Model) (*serverupdate.CreateUpdateSchedulePayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &serverupdate.CreateUpdateSchedulePayload{ + Enabled: conversion.BoolValueToPointer(model.Enabled), + Name: conversion.StringValueToPointer(model.Name), + Rrule: conversion.StringValueToPointer(model.Rrule), + MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow), + }, nil +} + +func toUpdatePayload(model *Model) (*serverupdate.UpdateUpdateSchedulePayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &serverupdate.UpdateUpdateSchedulePayload{ + Enabled: conversion.BoolValueToPointer(model.Enabled), + Name: conversion.StringValueToPointer(model.Name), + Rrule: conversion.StringValueToPointer(model.Rrule), + MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow), + }, nil +} diff --git a/stackit/internal/services/serverupdate/schedule/resource_test.go b/stackit/internal/services/serverupdate/schedule/resource_test.go new file mode 100644 index 00000000..aa91d4c7 --- /dev/null +++ b/stackit/internal/services/serverupdate/schedule/resource_test.go @@ -0,0 +1,223 @@ +package schedule + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" +) + +func TestMapFields(t *testing.T) { + tests := []struct { + description string + input *sdk.UpdateSchedule + expected Model + isValid bool + }{ + { + "default_values", + &sdk.UpdateSchedule{ + Id: utils.Ptr(int64(5)), + }, + Model{ + ID: types.StringValue("project_uid,server_uid,5"), + ProjectId: types.StringValue("project_uid"), + ServerId: types.StringValue("server_uid"), + UpdateScheduleId: types.Int64Value(5), + }, + true, + }, + { + "simple_values", + &sdk.UpdateSchedule{ + Id: utils.Ptr(int64(5)), + Enabled: utils.Ptr(true), + Name: utils.Ptr("update_schedule_name_1"), + Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: utils.Ptr(int64(1)), + }, + Model{ + ServerId: types.StringValue("server_uid"), + ProjectId: types.StringValue("project_uid"), + UpdateScheduleId: types.Int64Value(5), + ID: types.StringValue("project_uid,server_uid,5"), + Name: types.StringValue("update_schedule_name_1"), + Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + Enabled: types.BoolValue(true), + MaintenanceWindow: types.Int64Value(1), + }, + true, + }, + { + "nil_response", + nil, + Model{}, + false, + }, + { + "no_resource_id", + &sdk.UpdateSchedule{}, + Model{}, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + state := &Model{ + ProjectId: tt.expected.ProjectId, + ServerId: tt.expected.ServerId, + } + ctx := context.TODO() + err := mapFields(ctx, tt.input, state) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} + +func TestToCreatePayload(t *testing.T) { + tests := []struct { + description string + input *Model + expected *sdk.CreateUpdateSchedulePayload + isValid bool + }{ + { + "default_values", + &Model{}, + &sdk.CreateUpdateSchedulePayload{}, + true, + }, + { + "simple_values", + &Model{ + Name: types.StringValue("name"), + Enabled: types.BoolValue(true), + Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: types.Int64Value(1), + }, + &sdk.CreateUpdateSchedulePayload{ + Name: utils.Ptr("name"), + Enabled: utils.Ptr(true), + Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: utils.Ptr(int64(1)), + }, + true, + }, + { + "null_fields_and_int_conversions", + &Model{ + Name: types.StringValue(""), + Rrule: types.StringValue(""), + }, + &sdk.CreateUpdateSchedulePayload{ + Name: utils.Ptr(""), + Rrule: utils.Ptr(""), + }, + true, + }, + { + "nil_model", + nil, + nil, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + output, err := toCreatePayload(tt.input) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} + +func TestToUpdatePayload(t *testing.T) { + tests := []struct { + description string + input *Model + expected *sdk.UpdateUpdateSchedulePayload + isValid bool + }{ + { + "default_values", + &Model{}, + &sdk.UpdateUpdateSchedulePayload{}, + true, + }, + { + "simple_values", + &Model{ + Name: types.StringValue("name"), + Enabled: types.BoolValue(true), + Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: types.Int64Value(1), + }, + &sdk.UpdateUpdateSchedulePayload{ + Name: utils.Ptr("name"), + Enabled: utils.Ptr(true), + Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: utils.Ptr(int64(1)), + }, + true, + }, + { + "null_fields_and_int_conversions", + &Model{ + Name: types.StringValue(""), + Rrule: types.StringValue(""), + }, + &sdk.UpdateUpdateSchedulePayload{ + Name: utils.Ptr(""), + Rrule: utils.Ptr(""), + }, + true, + }, + { + "nil_model", + nil, + nil, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + output, err := toUpdatePayload(tt.input) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} diff --git a/stackit/internal/services/serverupdate/schedule/schedule_datasource.go b/stackit/internal/services/serverupdate/schedule/schedule_datasource.go new file mode 100644 index 00000000..e0170c37 --- /dev/null +++ b/stackit/internal/services/serverupdate/schedule/schedule_datasource.go @@ -0,0 +1,182 @@ +package schedule + +import ( + "context" + "fmt" + "net/http" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate" + + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" +) + +// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources. +// This is a workaround for the lack of a global state in the provider and +// needs to exist because the Configure method is called twice. +var scheduleDataSourceBetaCheckDone bool + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &scheduleDataSource{} +) + +// NewScheduleDataSource is a helper function to simplify the provider implementation. +func NewScheduleDataSource() datasource.DataSource { + return &scheduleDataSource{} +} + +// scheduleDataSource is the data source implementation. +type scheduleDataSource struct { + client *serverupdate.APIClient +} + +// Metadata returns the data source type name. +func (r *scheduleDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_server_update_schedule" +} + +// Configure adds the provider configured client to the data source. +func (r *scheduleDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + providerData, ok := req.ProviderData.(core.ProviderData) + if !ok { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData)) + return + } + + if !scheduleDataSourceBetaCheckDone { + features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "data source") + if resp.Diagnostics.HasError() { + return + } + scheduleDataSourceBetaCheckDone = true + } + + var apiClient *serverupdate.APIClient + var err error + if providerData.ServerUpdateCustomEndpoint != "" { + ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint) + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithEndpoint(providerData.ServerUpdateCustomEndpoint), + ) + } else { + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithRegion(providerData.Region), + ) + } + + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err)) + return + } + + r.client = apiClient + tflog.Info(ctx, "Server update client configured") +} + +// Schema defines the schema for the data source. +func (r *scheduleDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Server update schedule datasource schema. Must have a `region` specified in the provider configuration.", + MarkdownDescription: features.AddBetaDescription("Server update schedule datasource schema. Must have a `region` specified in the provider configuration."), + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".", + Computed: true, + }, + "name": schema.StringAttribute{ + Description: "The schedule name.", + Computed: true, + }, + "update_schedule_id": schema.Int64Attribute{ + Description: "Update schedule ID.", + Required: true, + }, + "project_id": schema.StringAttribute{ + Description: "STACKIT Project ID to which the server is associated.", + Required: true, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "server_id": schema.StringAttribute{ + Description: "Server ID for the update schedule.", + Required: true, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "rrule": schema.StringAttribute{ + Description: "Update schedule described in `rrule` (recurrence rule) format.", + Computed: true, + }, + "enabled": schema.BoolAttribute{ + Description: "Is the update schedule enabled or disabled.", + Computed: true, + }, + "maintenance_window": schema.Int64Attribute{ + Description: "Maintenance window [1..24].", + Computed: true, + }, + }, + } +} + +// Read refreshes the Terraform state with the latest data. +func (r *scheduleDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + updateScheduleId := model.UpdateScheduleId.ValueInt64() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId) + + scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Calling API: %v", err)) + return + } + + // Map response body to schema + err = mapFields(scheduleResp, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Processing API payload: %v", err)) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Server update schedule read") +} diff --git a/stackit/internal/services/serverupdate/schedule/schedules_datasource.go b/stackit/internal/services/serverupdate/schedule/schedules_datasource.go new file mode 100644 index 00000000..86cc7698 --- /dev/null +++ b/stackit/internal/services/serverupdate/schedule/schedules_datasource.go @@ -0,0 +1,233 @@ +package schedule + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" +) + +// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources. +// This is a workaround for the lack of a global state in the provider and +// needs to exist because the Configure method is called twice. +var schedulesDataSourceBetaCheckDone bool + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &schedulesDataSource{} +) + +// NewSchedulesDataSource is a helper function to simplify the provider implementation. +func NewSchedulesDataSource() datasource.DataSource { + return &schedulesDataSource{} +} + +// schedulesDataSource is the data source implementation. +type schedulesDataSource struct { + client *serverupdate.APIClient +} + +// Metadata returns the data source type name. +func (r *schedulesDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_server_update_schedules" +} + +// Configure adds the provider configured client to the data source. +func (r *schedulesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + providerData, ok := req.ProviderData.(core.ProviderData) + if !ok { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData)) + return + } + + if !schedulesDataSourceBetaCheckDone { + features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedules", "data source") + if resp.Diagnostics.HasError() { + return + } + schedulesDataSourceBetaCheckDone = true + } + + var apiClient *serverupdate.APIClient + var err error + if providerData.ServerUpdateCustomEndpoint != "" { + ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint) + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithEndpoint(providerData.ServerUpdateCustomEndpoint), + ) + } else { + apiClient, err = serverupdate.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithRegion(providerData.Region), + ) + } + + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err)) + return + } + + r.client = apiClient + tflog.Info(ctx, "Server update client configured") +} + +// Schema defines the schema for the data source. +func (r *schedulesDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Server update schedules datasource schema. Must have a `region` specified in the provider configuration.", + MarkdownDescription: features.AddBetaDescription("Server update schedules datasource schema. Must have a `region` specified in the provider configuration."), + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "Terraform's internal data source identifier. It is structured as \"`project_id`,`server_id`\".", + Computed: true, + }, + "project_id": schema.StringAttribute{ + Description: "STACKIT Project ID (UUID) to which the server is associated.", + Required: true, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "server_id": schema.StringAttribute{ + Description: "Server ID (UUID) to which the update schedule is associated.", + Required: true, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "items": schema.ListNestedAttribute{ + Computed: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "update_schedule_id": schema.Int64Attribute{ + Computed: true, + }, + "name": schema.StringAttribute{ + Description: "The update schedule name.", + Computed: true, + }, + "rrule": schema.StringAttribute{ + Description: "Update schedule described in `rrule` (recurrence rule) format.", + Computed: true, + }, + "enabled": schema.BoolAttribute{ + Description: "Is the update schedule enabled or disabled.", + Computed: true, + }, + "maintenance_window": schema.Int64Attribute{ + Description: "Maintenance window [1..24].", + Computed: true, + }, + }, + }, + }, + }, + } +} + +// schedulesDataSourceModel maps the data source schema data. +type schedulesDataSourceModel struct { + ID types.String `tfsdk:"id"` + ProjectId types.String `tfsdk:"project_id"` + ServerId types.String `tfsdk:"server_id"` + Items []schedulesDatasourceItemModel `tfsdk:"items"` +} + +// schedulesDatasourceItemModel maps schedule schema data. +type schedulesDatasourceItemModel struct { + UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"` + Name types.String `tfsdk:"name"` + Rrule types.String `tfsdk:"rrule"` + Enabled types.Bool `tfsdk:"enabled"` + MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"` +} + +// Read refreshes the Terraform state with the latest data. +func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + var model schedulesDataSourceModel + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "server_id", serverId) + + schedules, err := r.client.ListUpdateSchedules(ctx, projectId, serverId).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Calling API: %v", err)) + return + } + + // Map response body to schema + err = mapSchedulesDatasourceFields(ctx, schedules, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Processing API payload: %v", err)) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Server update schedules read") +} + +func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverupdate.GetUpdateSchedulesResponse, model *schedulesDataSourceModel) error { + if schedules == nil { + return fmt.Errorf("response input is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + + tflog.Debug(ctx, "response", map[string]any{"schedules": schedules}) + projectId := model.ProjectId.ValueString() + serverId := model.ServerId.ValueString() + + idParts := []string{projectId, serverId} + model.ID = types.StringValue( + strings.Join(idParts, core.Separator), + ) + + for _, schedule := range *schedules.Items { + scheduleState := schedulesDatasourceItemModel{ + UpdateScheduleId: types.Int64Value(*schedule.Id), + Name: types.StringValue(*schedule.Name), + Rrule: types.StringValue(*schedule.Rrule), + Enabled: types.BoolValue(*schedule.Enabled), + MaintenanceWindow: types.Int64Value(*schedule.MaintenanceWindow), + } + model.Items = append(model.Items, scheduleState) + } + return nil +} diff --git a/stackit/internal/services/serverupdate/schedule/schedules_datasource_test.go b/stackit/internal/services/serverupdate/schedule/schedules_datasource_test.go new file mode 100644 index 00000000..6ba79105 --- /dev/null +++ b/stackit/internal/services/serverupdate/schedule/schedules_datasource_test.go @@ -0,0 +1,92 @@ +package schedule + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" +) + +func TestMapSchedulesDataSourceFields(t *testing.T) { + tests := []struct { + description string + input *sdk.GetUpdateSchedulesResponse + expected schedulesDataSourceModel + isValid bool + }{ + { + "empty response", + &sdk.GetUpdateSchedulesResponse{ + Items: &[]sdk.UpdateSchedule{}, + }, + schedulesDataSourceModel{ + ID: types.StringValue("project_uid,server_uid"), + ProjectId: types.StringValue("project_uid"), + ServerId: types.StringValue("server_uid"), + Items: nil, + }, + true, + }, + { + "simple_values", + &sdk.GetUpdateSchedulesResponse{ + Items: &[]sdk.UpdateSchedule{ + { + Id: utils.Ptr(int64(5)), + Enabled: utils.Ptr(true), + Name: utils.Ptr("update_schedule_name_1"), + Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + MaintenanceWindow: utils.Ptr(int64(1)), + }, + }, + }, + schedulesDataSourceModel{ + ID: types.StringValue("project_uid,server_uid"), + ServerId: types.StringValue("server_uid"), + ProjectId: types.StringValue("project_uid"), + Items: []schedulesDatasourceItemModel{ + { + UpdateScheduleId: types.Int64Value(5), + Name: types.StringValue("update_schedule_name_1"), + Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"), + Enabled: types.BoolValue(true), + MaintenanceWindow: types.Int64Value(1), + }, + }, + }, + true, + }, + { + "nil_response", + nil, + schedulesDataSourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + state := &schedulesDataSourceModel{ + ProjectId: tt.expected.ProjectId, + ServerId: tt.expected.ServerId, + } + ctx := context.TODO() + err := mapSchedulesDatasourceFields(ctx, tt.input, state) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} diff --git a/stackit/internal/services/serverupdate/serverupdate_acc_test.go b/stackit/internal/services/serverupdate/serverupdate_acc_test.go new file mode 100644 index 00000000..a1a71804 --- /dev/null +++ b/stackit/internal/services/serverupdate/serverupdate_acc_test.go @@ -0,0 +1,231 @@ +package serverupdate_test + +import ( + "context" + "fmt" + "regexp" + "strconv" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "github.com/stackitcloud/stackit-sdk-go/services/serverupdate" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/testutil" +) + +// Server update schedule resource data +var serverUpdateScheduleResource = map[string]string{ + "project_id": testutil.ProjectId, + "server_id": testutil.ServerId, + "update_schedule_name": testutil.ResourceNameWithDateTime("server-update-schedule"), + "rrule": "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1", + "update_name": testutil.ResourceNameWithDateTime("server-update-schedule-update"), +} + +func resourceConfig(retentionPeriod int64) string { + return fmt.Sprintf(` + %s + + resource "stackit_server_update_schedule" "test_schedule" { + project_id = "%s" + server_id = "%s" + name = "%s" + rrule = "%s" + enabled = true + update_properties = { + name = "%s" + retention_period = %d + volume_ids = null + } + } + `, + testutil.ServerUpdateProviderConfig(), + serverUpdateScheduleResource["project_id"], + serverUpdateScheduleResource["server_id"], + serverUpdateScheduleResource["update_schedule_name"], + serverUpdateScheduleResource["rrule"], + serverUpdateScheduleResource["update_name"], + retentionPeriod, + ) +} + +func resourceConfigWithUpdate() string { + return fmt.Sprintf(` + %s + + resource "stackit_server_update_schedule" "test_schedule" { + project_id = "%s" + server_id = "%s" + name = "%s" + rrule = "%s" + enabled = false + update_properties = { + name = "%s" + retention_period = 20 + volume_ids = null + } + } + `, + testutil.ServerUpdateProviderConfig(), + serverUpdateScheduleResource["project_id"], + serverUpdateScheduleResource["server_id"], + serverUpdateScheduleResource["update_schedule_name"], + serverUpdateScheduleResource["rrule"], + serverUpdateScheduleResource["update_name"], + ) +} + +func TestAccServerUpdateScheduleResource(t *testing.T) { + if testutil.ServerId == "" { + fmt.Println("TF_ACC_SERVER_ID not set, skipping test") + return + } + var invalidRetentionPeriod int64 = 0 + var validRetentionPeriod int64 = 15 + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccCheckServerUpdateScheduleDestroy, + Steps: []resource.TestStep{ + // Creation fail + { + Config: resourceConfig(invalidRetentionPeriod), + ExpectError: regexp.MustCompile(`.*update_properties.retention_period value must be at least 1*`), + }, + // Creation + { + Config: resourceConfig(validRetentionPeriod), + Check: resource.ComposeAggregateTestCheckFunc( + // Update schedule data + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "project_id", serverUpdateScheduleResource["project_id"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "server_id", serverUpdateScheduleResource["server_id"]), + resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "update_schedule_id"), + resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "id"), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "name", serverUpdateScheduleResource["update_schedule_name"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "rrule", serverUpdateScheduleResource["rrule"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "enabled", strconv.FormatBool(true)), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "update_properties.name", serverUpdateScheduleResource["update_name"]), + ), + }, + // data source + { + Config: fmt.Sprintf(` + %s + + data "stackit_server_update_schedules" "schedules_data_test" { + project_id = stackit_server_update_schedule.test_schedule.project_id + server_id = stackit_server_update_schedule.test_schedule.server_id + } + + data "stackit_server_update_schedule" "schedule_data_test" { + project_id = stackit_server_update_schedule.test_schedule.project_id + server_id = stackit_server_update_schedule.test_schedule.server_id + update_schedule_id = stackit_server_update_schedule.test_schedule.update_schedule_id + }`, + resourceConfig(validRetentionPeriod), + ), + Check: resource.ComposeAggregateTestCheckFunc( + // Server update schedule data + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "project_id", serverUpdateScheduleResource["project_id"]), + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "server_id", serverUpdateScheduleResource["server_id"]), + resource.TestCheckResourceAttrSet("data.stackit_server_update_schedule.schedule_data_test", "update_schedule_id"), + resource.TestCheckResourceAttrSet("data.stackit_server_update_schedule.schedule_data_test", "id"), + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "name", serverUpdateScheduleResource["update_schedule_name"]), + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "rrule", serverUpdateScheduleResource["rrule"]), + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "enabled", strconv.FormatBool(true)), + resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "update_properties.name", serverUpdateScheduleResource["update_name"]), + + // Server update schedules data + resource.TestCheckResourceAttr("data.stackit_server_update_schedules.schedules_data_test", "project_id", serverUpdateScheduleResource["project_id"]), + resource.TestCheckResourceAttr("data.stackit_server_update_schedules.schedules_data_test", "server_id", serverUpdateScheduleResource["server_id"]), + resource.TestCheckResourceAttrSet("data.stackit_server_update_schedules.schedules_data_test", "id"), + ), + }, + // Import + { + ResourceName: "stackit_server_update_schedule.test_schedule", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_server_update_schedule.test_schedule"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_server_update_schedule.test_schedule") + } + scheduleId, ok := r.Primary.Attributes["update_schedule_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute update_schedule_id") + } + return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.ServerId, scheduleId), nil + }, + ImportState: true, + ImportStateVerify: true, + }, + // Update + { + Config: resourceConfigWithUpdate(), + Check: resource.ComposeAggregateTestCheckFunc( + // Update schedule data + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "project_id", serverUpdateScheduleResource["project_id"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "server_id", serverUpdateScheduleResource["server_id"]), + resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "update_schedule_id"), + resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "id"), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "name", serverUpdateScheduleResource["update_schedule_name"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "rrule", serverUpdateScheduleResource["rrule"]), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "enabled", strconv.FormatBool(false)), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "update_properties.retention_period", strconv.FormatInt(20, 10)), + resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "update_properties.name", serverUpdateScheduleResource["update_name"]), + ), + }, + // Deletion is done by the framework implicitly + }, + }) +} + +func testAccCheckServerUpdateScheduleDestroy(s *terraform.State) error { + ctx := context.Background() + var client *serverupdate.APIClient + var err error + if testutil.ServerUpdateCustomEndpoint == "" { + client, err = serverupdate.NewAPIClient( + config.WithRegion("eu01"), + ) + } else { + client, err = serverupdate.NewAPIClient( + config.WithEndpoint(testutil.ServerUpdateCustomEndpoint), + ) + } + if err != nil { + return fmt.Errorf("creating client: %w", err) + } + + schedulesToDestroy := []string{} + for _, rs := range s.RootModule().Resources { + if rs.Type != "stackit_server_update_schedule" { + continue + } + // server update schedule terraform ID: "[project_id],[server_id],[update_schedule_id]" + scheduleId := strings.Split(rs.Primary.ID, core.Separator)[2] + schedulesToDestroy = append(schedulesToDestroy, scheduleId) + } + + schedulesResp, err := client.ListUpdateSchedules(ctx, testutil.ProjectId, testutil.ServerId).Execute() + if err != nil { + return fmt.Errorf("getting schedulesResp: %w", err) + } + + schedules := *schedulesResp.Items + for i := range schedules { + if schedules[i].Id == nil { + continue + } + scheduleId := strconv.FormatInt(*schedules[i].Id, 10) + if utils.Contains(schedulesToDestroy, scheduleId) { + err := client.DeleteUpdateScheduleExecute(ctx, testutil.ProjectId, testutil.ServerId, scheduleId) + if err != nil { + return fmt.Errorf("destroying server update schedule %s during CheckDestroy: %w", scheduleId, err) + } + } + } + return nil +} diff --git a/stackit/internal/testutil/testutil.go b/stackit/internal/testutil/testutil.go index 9ef4c3ee..0b0786c7 100644 --- a/stackit/internal/testutil/testutil.go +++ b/stackit/internal/testutil/testutil.go @@ -65,6 +65,7 @@ var ( SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT") SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT") ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT") + ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT") SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT") // OpenStack user domain name @@ -341,6 +342,21 @@ func ServerBackupProviderConfig() string { ) } +func ServerUpdateProviderConfig() string { + if ServerUpdateCustomEndpoint == "" { + return ` + provider "stackit" { + region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackit" { + server_update_custom_endpoint = "%s" + }`, + ServerUpdateCustomEndpoint, + ) +} + func SKEProviderConfig() string { if SKECustomEndpoint == "" { return ` diff --git a/stackit/provider.go b/stackit/provider.go index 12257682..914080c1 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -56,6 +56,7 @@ import ( secretsManagerInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/instance" secretsManagerUser "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/user" serverBackupSchedule "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/serverbackup/schedule" + serverUpdateSchedule "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/serverupdate/schedule" skeCluster "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/cluster" skeKubeconfig "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/kubeconfig" skeProject "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/project" @@ -119,6 +120,7 @@ type providerModel struct { SQLServerFlexCustomEndpoint types.String `tfsdk:"sqlserverflex_custom_endpoint"` SKECustomEndpoint types.String `tfsdk:"ske_custom_endpoint"` ServerBackupCustomEndpoint types.String `tfsdk:"server_backup_custom_endpoint"` + ServerUpdateCustomEndpoint types.String `tfsdk:"server_update_custom_endpoint"` ResourceManagerCustomEndpoint types.String `tfsdk:"resourcemanager_custom_endpoint"` TokenCustomEndpoint types.String `tfsdk:"token_custom_endpoint"` EnableBetaResources types.Bool `tfsdk:"enable_beta_resources"` @@ -151,6 +153,7 @@ func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *pro "postgresflex_custom_endpoint": "Custom endpoint for the PostgresFlex service", "redis_custom_endpoint": "Custom endpoint for the Redis service", "server_backup_custom_endpoint": "Custom endpoint for the Server Backup service", + "server_update_custom_endpoint": "Custom endpoint for the Server Update service", "resourcemanager_custom_endpoint": "Custom endpoint for the Resource Manager service", "secretsmanager_custom_endpoint": "Custom endpoint for the Secrets Manager service", "sqlserverflex_custom_endpoint": "Custom endpoint for the SQL Server Flex service", @@ -271,6 +274,10 @@ func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *pro Optional: true, Description: descriptions["server_backup_custom_endpoint"], }, + "server_update_custom_endpoint": schema.StringAttribute{ + Optional: true, + Description: descriptions["server_update_custom_endpoint"], + }, "service_enablement_custom_endpoint": schema.StringAttribute{ Optional: true, Description: descriptions["service_enablement_custom_endpoint"], @@ -446,6 +453,8 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource sqlServerFlexUser.NewUserDataSource, serverBackupSchedule.NewScheduleDataSource, serverBackupSchedule.NewSchedulesDataSource, + serverUpdateSchedule.NewScheduleDataSource, + serverUpdateSchedule.NewSchedulesDataSource, skeProject.NewProjectDataSource, skeCluster.NewClusterDataSource, } @@ -503,6 +512,7 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource { sqlServerFlexInstance.NewInstanceResource, sqlServerFlexUser.NewUserResource, serverBackupSchedule.NewScheduleResource, + serverUpdateSchedule.NewScheduleResource, skeProject.NewProjectResource, skeCluster.NewClusterResource, skeKubeconfig.NewKubeconfigResource,