diff --git a/docs/data-sources/objectstorage_bucket.md b/docs/data-sources/objectstorage_bucket.md index f96f4cf7..f880af6f 100644 --- a/docs/data-sources/objectstorage_bucket.md +++ b/docs/data-sources/objectstorage_bucket.md @@ -3,12 +3,12 @@ page_title: "stackit_objectstorage_bucket Data Source - stackit" subcategory: "" description: |- - ObjectStorage credential data source schema. + ObjectStorage bucket data source schema. --- # stackit_objectstorage_bucket (Data Source) -ObjectStorage credential data source schema. +ObjectStorage bucket data source schema. diff --git a/docs/data-sources/objectstorage_credentials_group.md b/docs/data-sources/objectstorage_credentials_group.md new file mode 100644 index 00000000..d611c9d5 --- /dev/null +++ b/docs/data-sources/objectstorage_credentials_group.md @@ -0,0 +1,30 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackit_objectstorage_credentials_group Data Source - stackit" +subcategory: "" +description: |- + ObjectStorage credentials group data source schema. +--- + +# stackit_objectstorage_credentials_group (Data Source) + +ObjectStorage credentials group data source schema. + + + + +## Schema + +### Required + +- `project_id` (String) Object Storage Project ID to which the credentials group is associated. + +### Optional + +- `credentials_group_id` (String) Terraform's internal data source identifier. It is structured as "`project_id`,`credentials_group_id`". +- `name` (String) The credentials group's display name. + +### Read-Only + +- `id` (String) Terraform's internal data source identifier. It is structured as "`project_id`,`credentials_group_id`". +- `urn` (String) Credentials group uniform resource name (URN) diff --git a/docs/resources/objectstorage_credentials_group.md b/docs/resources/objectstorage_credentials_group.md new file mode 100644 index 00000000..7732da13 --- /dev/null +++ b/docs/resources/objectstorage_credentials_group.md @@ -0,0 +1,27 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackit_objectstorage_credentials_group Resource - stackit" +subcategory: "" +description: |- + ObjectStorage credentials group resource schema. +--- + +# stackit_objectstorage_credentials_group (Resource) + +ObjectStorage credentials group resource schema. + + + + +## Schema + +### Required + +- `name` (String) The credentials group's display name. +- `project_id` (String) Project ID to which the credentials group is associated. + +### Read-Only + +- `credentials_group_id` (String) The credentials group ID +- `id` (String) Terraform's internal data source identifier. It is structured as "`project_id`,`credentials_group_id`". +- `urn` (String) Credentials group uniform resource name (URN) diff --git a/examples/data-sources/stackit_object_storage_bucket/data-source.tf b/examples/data-sources/stackit_object_storage_bucket/data-source.tf new file mode 100644 index 00000000..97201b7f --- /dev/null +++ b/examples/data-sources/stackit_object_storage_bucket/data-source.tf @@ -0,0 +1,4 @@ +data "stackit_objectstorage_bucket" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + bucket_name = "example-name" +} diff --git a/examples/data-sources/stackit_object_storage_credentials_group/data-source.tf b/examples/data-sources/stackit_object_storage_credentials_group/data-source.tf new file mode 100644 index 00000000..250795f8 --- /dev/null +++ b/examples/data-sources/stackit_object_storage_credentials_group/data-source.tf @@ -0,0 +1,4 @@ +data "stackit_objectstorage_credentials_group" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + credentials_group_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +} diff --git a/examples/resources/stackit_object_storage_bucket/resource.tf b/examples/resources/stackit_object_storage_bucket/resource.tf new file mode 100644 index 00000000..47522ea6 --- /dev/null +++ b/examples/resources/stackit_object_storage_bucket/resource.tf @@ -0,0 +1,4 @@ +resource "stackit_object_storage_credentials_group" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-credentials-group" +} diff --git a/examples/resources/stackit_object_storage_credentials_group/resource.tf b/examples/resources/stackit_object_storage_credentials_group/resource.tf new file mode 100644 index 00000000..0b347866 --- /dev/null +++ b/examples/resources/stackit_object_storage_credentials_group/resource.tf @@ -0,0 +1,4 @@ +resource "stackit_object_storage_bucket" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + bucket_name = "example-bucket" +} diff --git a/stackit/internal/services/objectstorage/bucket/datasource.go b/stackit/internal/services/objectstorage/bucket/datasource.go index b7f72835..58fef520 100644 --- a/stackit/internal/services/objectstorage/bucket/datasource.go +++ b/stackit/internal/services/objectstorage/bucket/datasource.go @@ -74,7 +74,7 @@ func (r *bucketDataSource) Configure(ctx context.Context, req datasource.Configu // Schema defines the schema for the data source. func (r *bucketDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { descriptions := map[string]string{ - "main": "ObjectStorage credential data source schema.", + "main": "ObjectStorage bucket data source schema.", "id": "Terraform's internal data source identifier. It is structured as \"`project_id`,`bucket_name`\".", "bucket_name": "The bucket name. It must be DNS conform.", "project_id": "STACKIT Project ID to which the bucket is associated.", diff --git a/stackit/internal/services/objectstorage/bucket/resource.go b/stackit/internal/services/objectstorage/bucket/resource.go index 1a851d5a..fd3c4b7f 100644 --- a/stackit/internal/services/objectstorage/bucket/resource.go +++ b/stackit/internal/services/objectstorage/bucket/resource.go @@ -155,8 +155,15 @@ func (r *bucketResource) Create(ctx context.Context, req resource.CreateRequest, ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "bucket_name", bucketName) - // Create new recordset - _, err := r.client.CreateBucket(ctx, projectId, bucketName).Execute() + // Handle project init + err := enableProject(ctx, &model, r.client) + if resp.Diagnostics.HasError() { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating bucket", fmt.Sprintf("Enabling object storage project before creation: %v", err)) + return + } + + // Create new bucket + _, err = r.client.CreateBucket(ctx, projectId, bucketName).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating bucket", fmt.Sprintf("Calling API: %v", err)) return @@ -294,3 +301,19 @@ func mapFields(bucketResp *objectstorage.GetBucketResponse, model *Model) error model.URLVirtualHostedStyle = types.StringPointerValue(bucket.UrlVirtualHostedStyle) return nil } + +type objectStorageClient interface { + CreateProjectExecute(ctx context.Context, projectId string) (*objectstorage.GetProjectResponse, error) +} + +// enableProject enables object storage for the specified project. If the project is already enabled, nothing happens +func enableProject(ctx context.Context, model *Model, client objectStorageClient) error { + projectId := model.ProjectId.ValueString() + + // From the object storage OAS: Creation will also be successful if the project is already enabled, but will not create a duplicate + _, err := client.CreateProjectExecute(ctx, projectId) + if err != nil { + return fmt.Errorf("failed to create object storage project: %w", err) + } + return nil +} diff --git a/stackit/internal/services/objectstorage/bucket/resource_test.go b/stackit/internal/services/objectstorage/bucket/resource_test.go index 3330d7d8..aee118e6 100644 --- a/stackit/internal/services/objectstorage/bucket/resource_test.go +++ b/stackit/internal/services/objectstorage/bucket/resource_test.go @@ -1,6 +1,8 @@ package objectstorage import ( + "context" + "fmt" "testing" "github.com/google/go-cmp/cmp" @@ -9,6 +11,20 @@ import ( "github.com/stackitcloud/stackit-sdk-go/services/objectstorage" ) +type objectStorageClientMocked struct { + returnError bool +} + +func (c *objectStorageClientMocked) CreateProjectExecute(_ context.Context, projectId string) (*objectstorage.GetProjectResponse, error) { + if c.returnError { + return nil, fmt.Errorf("create project failed") + } + + return &objectstorage.GetProjectResponse{ + Project: utils.Ptr(projectId), + }, nil +} + func TestMapFields(t *testing.T) { tests := []struct { description string @@ -99,3 +115,36 @@ func TestMapFields(t *testing.T) { }) } } + +func TestEnableProject(t *testing.T) { + tests := []struct { + description string + enableFails bool + isValid bool + }{ + { + "default_values", + false, + true, + }, + { + "error_response", + true, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + client := &objectStorageClientMocked{ + returnError: tt.enableFails, + } + err := enableProject(context.Background(), &Model{}, client) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + }) + } +} diff --git a/stackit/internal/services/objectstorage/credentialsgroup/datasource.go b/stackit/internal/services/objectstorage/credentialsgroup/datasource.go new file mode 100644 index 00000000..5d576e77 --- /dev/null +++ b/stackit/internal/services/objectstorage/credentialsgroup/datasource.go @@ -0,0 +1,144 @@ +package objectstorage + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/services/objectstorage" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &credentialsGroupDataSource{} +) + +// NewCredentialsGroupDataSource is a helper function to simplify the provider implementation. +func NewCredentialsGroupDataSource() datasource.DataSource { + return &credentialsGroupDataSource{} +} + +// credentialsGroupDataSource is the data source implementation. +type credentialsGroupDataSource struct { + client *objectstorage.APIClient +} + +// Metadata returns the data source type name. +func (r *credentialsGroupDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_objectstorage_credentials_group" +} + +// Configure adds the provider configured client to the data source. +func (r *credentialsGroupDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + providerData, ok := req.ProviderData.(core.ProviderData) + if !ok { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData)) + return + } + + var apiClient *objectstorage.APIClient + var err error + if providerData.ObjectStorageCustomEndpoint != "" { + apiClient, err = objectstorage.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithEndpoint(providerData.ObjectStorageCustomEndpoint), + ) + } else { + apiClient, err = objectstorage.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithRegion(providerData.Region), + ) + } + + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v", err)) + return + } + + r.client = apiClient + tflog.Info(ctx, "ObjectStorage credentials group client configured") +} + +// Schema defines the schema for the data source. +func (r *credentialsGroupDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + descriptions := map[string]string{ + "main": "ObjectStorage credentials group data source schema.", + "id": "Terraform's internal data source identifier. It is structured as \"`project_id`,`credentials_group_id`\".", + "credentials_group_id": "The credentials group ID", + "name": "The credentials group's display name.", + "project_id": "Object Storage Project ID to which the credentials group is associated.", + "urn": "Credentials group uniform resource name (URN)", + } + + resp.Schema = schema.Schema{ + Description: descriptions["main"], + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: descriptions["id"], + Computed: true, + }, + "credentials_group_id": schema.StringAttribute{ + Description: descriptions["id"], + Optional: true, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Description: descriptions["project_id"], + Required: true, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "name": schema.StringAttribute{ + Description: descriptions["name"], + Optional: true, + Computed: true, + }, + "urn": schema.StringAttribute{ + Computed: true, + Description: descriptions["urn"], + }, + }, + } +} + +// Read refreshes the Terraform state with the latest data. +func (r *credentialsGroupDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + credentialsGroupId := model.CredentialsGroupId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "credentials_group_id", credentialsGroupId) + + err := readCredentialsGroups(ctx, &model, r.client) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading credentialsGroup", fmt.Sprintf("getting credential group from list of credentials groups: %v", err)) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "ObjectStorage credentials group read") +} diff --git a/stackit/internal/services/objectstorage/credentialsgroup/resource.go b/stackit/internal/services/objectstorage/credentialsgroup/resource.go new file mode 100644 index 00000000..7fdb21f3 --- /dev/null +++ b/stackit/internal/services/objectstorage/credentialsgroup/resource.go @@ -0,0 +1,339 @@ +package objectstorage + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core" + "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "github.com/stackitcloud/stackit-sdk-go/services/objectstorage" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &credentialsGroupResource{} + _ resource.ResourceWithConfigure = &credentialsGroupResource{} + _ resource.ResourceWithImportState = &credentialsGroupResource{} +) + +type Model struct { + Id types.String `tfsdk:"id"` // needed by TF + CredentialsGroupId types.String `tfsdk:"credentials_group_id"` + Name types.String `tfsdk:"name"` + ProjectId types.String `tfsdk:"project_id"` + URN types.String `tfsdk:"urn"` +} + +// NewCredentialsGroupResource is a helper function to simplify the provider implementation. +func NewCredentialsGroupResource() resource.Resource { + return &credentialsGroupResource{} +} + +// credentialsGroupResource is the resource implementation. +type credentialsGroupResource struct { + client *objectstorage.APIClient +} + +// Metadata returns the resource type name. +func (r *credentialsGroupResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_objectstorage_credentials_group" +} + +// Configure adds the provider configured client to the resource. +func (r *credentialsGroupResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + providerData, ok := req.ProviderData.(core.ProviderData) + if !ok { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData)) + return + } + + var apiClient *objectstorage.APIClient + var err error + if providerData.ObjectStorageCustomEndpoint != "" { + apiClient, err = objectstorage.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithEndpoint(providerData.ObjectStorageCustomEndpoint), + ) + } else { + apiClient, err = objectstorage.NewAPIClient( + config.WithCustomAuth(providerData.RoundTripper), + config.WithRegion(providerData.Region), + ) + } + + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v", err)) + return + } + + r.client = apiClient + tflog.Info(ctx, "ObjectStorage credentials group client configured") +} + +// Schema defines the schema for the resource. +func (r *credentialsGroupResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + descriptions := map[string]string{ + "main": "ObjectStorage credentials group resource schema.", + "id": "Terraform's internal data source identifier. It is structured as \"`project_id`,`credentials_group_id`\".", + "credentials_group_id": "The credentials group ID", + "name": "The credentials group's display name.", + "project_id": "Project ID to which the credentials group is associated.", + "urn": "Credentials group uniform resource name (URN)", + } + + resp.Schema = schema.Schema{ + Description: descriptions["main"], + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: descriptions["id"], + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{ + Description: descriptions["name"], + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, + "credentials_group_id": schema.StringAttribute{ + Description: descriptions["credentials_group_id"], + Computed: true, + }, + "project_id": schema.StringAttribute{ + Description: descriptions["project_id"], + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + validate.UUID(), + validate.NoSeparator(), + }, + }, + "urn": schema.StringAttribute{ + Description: descriptions["urn"], + Computed: true, + }, + }, + } +} + +// Create creates the resource and sets the initial Terraform state. +func (r *credentialsGroupResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.Plan.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + credentialsGroupName := model.Name.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "name", credentialsGroupName) + + createCredentialsGroupPayload := objectstorage.CreateCredentialsGroupPayload{ + DisplayName: utils.Ptr(credentialsGroupName), + } + + // Handle project init + err := enableProject(ctx, &model, r.client) + if resp.Diagnostics.HasError() { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating credentials group", fmt.Sprintf("Enabling object storage project before creation: %v", err)) + return + } + + // Create new credentials group + got, err := r.client.CreateCredentialsGroup(ctx, projectId).CreateCredentialsGroupPayload(createCredentialsGroupPayload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating credentials group", fmt.Sprintf("Calling API: %v", err)) + return + } + + // Map response body to schema + err = mapFields(got, &model) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating credentialsGroup", fmt.Sprintf("Processing API payload: %v", err)) + return + } + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "ObjectStorage credentials group created") +} + +// Read refreshes the Terraform state with the latest data. +func (r *credentialsGroupResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + credentialsGroupId := model.CredentialsGroupId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "credentials_group_id", credentialsGroupId) + + err := readCredentialsGroups(ctx, &model, r.client) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading credentialsGroup", fmt.Sprintf("getting credential group from list of credentials groups: %v", err)) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "ObjectStorage credentials group read") +} + +// Update updates the resource and sets the updated Terraform state on success. +func (r *credentialsGroupResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform + // Update shouldn't be called + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating credentials group", "CredentialsGroup can't be updated") +} + +// Delete deletes the resource and removes the Terraform state on success. +func (r *credentialsGroupResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform + var model Model + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + projectId := model.ProjectId.ValueString() + credentialsGroupId := model.CredentialsGroupId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "credentials_group_id", credentialsGroupId) + + // Delete existing credentials group + _, err := r.client.DeleteCredentialsGroup(ctx, projectId, credentialsGroupId).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting credentials group", fmt.Sprintf("Calling API: %v", err)) + } + + tflog.Info(ctx, "ObjectStorage credentials group deleted") +} + +// ImportState imports a resource into the Terraform state on success. +// The expected format of the resource import identifier is: project_id, credentials_group_id +func (r *credentialsGroupResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idParts := strings.Split(req.ID, core.Separator) + if len(idParts) != 2 || idParts[0] == "" || idParts[1] == "" { + core.LogAndAddError(ctx, &resp.Diagnostics, + "Error importing credentialsGroup", + fmt.Sprintf("Expected import identifier with format [project_id],[credentials_group_id], got %q", req.ID), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("credentials_group_id"), idParts[1])...) + tflog.Info(ctx, "ObjectStorage credentials group state imported") +} + +func mapFields(credentialsGroupResp *objectstorage.CreateCredentialsGroupResponse, model *Model) error { + if credentialsGroupResp == nil { + return fmt.Errorf("response input is nil") + } + if credentialsGroupResp.CredentialsGroup == nil { + return fmt.Errorf("response credentialsGroup is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + credentialsGroup := credentialsGroupResp.CredentialsGroup + + mapCredentialsGroup(*credentialsGroup, model) + return nil +} + +func mapCredentialsGroup(credentialsGroup objectstorage.CredentialsGroup, model *Model) { + model.URN = types.StringPointerValue(credentialsGroup.Urn) + model.Name = types.StringPointerValue(credentialsGroup.DisplayName) + + idParts := []string{ + model.ProjectId.ValueString(), + model.CredentialsGroupId.ValueString(), + } + model.Id = types.StringValue( + strings.Join(idParts, core.Separator), + ) +} + +type objectStorageClient interface { + CreateProjectExecute(ctx context.Context, projectId string) (*objectstorage.GetProjectResponse, error) + GetCredentialsGroupsExecute(ctx context.Context, projectId string) (*objectstorage.GetCredentialsGroupsResponse, error) +} + +// enableProject enables object storage for the specified project. If the project is already enabled, nothing happens +func enableProject(ctx context.Context, model *Model, client objectStorageClient) error { + projectId := model.ProjectId.ValueString() + + // From the object storage OAS: Creation will also be successful if the project is already enabled, but will not create a duplicate + _, err := client.CreateProjectExecute(ctx, projectId) + if err != nil { + return fmt.Errorf("failed to create object storage project: %w", err) + } + return nil +} + +// readCredentialsGroups gets all the existing credentials groups for the specified project, +// finds the credentials group that is being read and updates the state. If the credentials group cannot be found, it throws an error +func readCredentialsGroups(ctx context.Context, model *Model, client objectStorageClient) error { + found := false + + if model.CredentialsGroupId.ValueString() == "" && model.Name.ValueString() == "" { + return fmt.Errorf("missing configuration: either name or credentials group id must be provided") + } + + credentialsGroupsResp, err := client.GetCredentialsGroupsExecute(ctx, model.ProjectId.ValueString()) + if err != nil { + return fmt.Errorf("getting credentials groups: %w", err) + } + + if credentialsGroupsResp == nil { + return fmt.Errorf("nil response from GET credentials groups") + } + + for _, credentialsGroup := range *credentialsGroupsResp.CredentialsGroups { + if *credentialsGroup.CredentialsGroupId != model.CredentialsGroupId.ValueString() && *credentialsGroup.DisplayName != model.Name.ValueString() { + continue + } + found = true + mapCredentialsGroup(credentialsGroup, model) + break + } + + if !found { + return fmt.Errorf("credentials group could not be found") + } + + return nil +} diff --git a/stackit/internal/services/objectstorage/credentialsgroup/resource_test.go b/stackit/internal/services/objectstorage/credentialsgroup/resource_test.go new file mode 100644 index 00000000..635b55fe --- /dev/null +++ b/stackit/internal/services/objectstorage/credentialsgroup/resource_test.go @@ -0,0 +1,298 @@ +package objectstorage + +import ( + "context" + "fmt" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "github.com/stackitcloud/stackit-sdk-go/services/objectstorage" +) + +type objectStorageClientMocked struct { + returnError bool + getCredentialsGroupsResp *objectstorage.GetCredentialsGroupsResponse +} + +func (c *objectStorageClientMocked) CreateProjectExecute(_ context.Context, projectId string) (*objectstorage.GetProjectResponse, error) { + if c.returnError { + return nil, fmt.Errorf("create project failed") + } + + return &objectstorage.GetProjectResponse{ + Project: utils.Ptr(projectId), + }, nil +} + +func (c *objectStorageClientMocked) GetCredentialsGroupsExecute(_ context.Context, _ string) (*objectstorage.GetCredentialsGroupsResponse, error) { + if c.returnError { + return nil, fmt.Errorf("get credentials groups failed") + } + + return c.getCredentialsGroupsResp, nil +} + +func TestMapFields(t *testing.T) { + tests := []struct { + description string + input *objectstorage.CreateCredentialsGroupResponse + expected Model + isValid bool + }{ + { + "default_values", + &objectstorage.CreateCredentialsGroupResponse{ + CredentialsGroup: &objectstorage.CredentialsGroup{}, + }, + Model{ + Id: types.StringValue("pid,cid"), + Name: types.StringNull(), + ProjectId: types.StringValue("pid"), + CredentialsGroupId: types.StringValue("cid"), + URN: types.StringNull(), + }, + true, + }, + { + "simple_values", + &objectstorage.CreateCredentialsGroupResponse{ + CredentialsGroup: &objectstorage.CredentialsGroup{ + DisplayName: utils.Ptr("name"), + Urn: utils.Ptr("urn"), + }, + }, + Model{ + Id: types.StringValue("pid,cid"), + Name: types.StringValue("name"), + ProjectId: types.StringValue("pid"), + CredentialsGroupId: types.StringValue("cid"), + URN: types.StringValue("urn"), + }, + true, + }, + { + "empty_strings", + &objectstorage.CreateCredentialsGroupResponse{ + CredentialsGroup: &objectstorage.CredentialsGroup{ + DisplayName: utils.Ptr(""), + Urn: utils.Ptr(""), + }, + }, + Model{ + Id: types.StringValue("pid,cid"), + Name: types.StringValue(""), + ProjectId: types.StringValue("pid"), + CredentialsGroupId: types.StringValue("cid"), + URN: types.StringValue(""), + }, + true, + }, + { + "nil_response", + nil, + Model{}, + false, + }, + { + "no_bucket", + &objectstorage.CreateCredentialsGroupResponse{}, + Model{}, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + model := &Model{ + ProjectId: tt.expected.ProjectId, + CredentialsGroupId: tt.expected.CredentialsGroupId, + } + err := mapFields(tt.input, model) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(model, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} + +func TestEnableProject(t *testing.T) { + tests := []struct { + description string + enableFails bool + isValid bool + }{ + { + "default_values", + false, + true, + }, + { + "error_response", + true, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + client := &objectStorageClientMocked{ + returnError: tt.enableFails, + } + err := enableProject(context.Background(), &Model{}, client) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + }) + } +} + +func TestReadCredentialsGroups(t *testing.T) { + tests := []struct { + description string + mockedResp *objectstorage.GetCredentialsGroupsResponse + expected Model + getCredentialsGroupsFails bool + isValid bool + }{ + { + "default_values", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: &[]objectstorage.CredentialsGroup{ + { + CredentialsGroupId: utils.Ptr("cid"), + }, + { + CredentialsGroupId: utils.Ptr("foo-id"), + }, + }, + }, + Model{ + Id: types.StringValue("pid,cid"), + Name: types.StringNull(), + ProjectId: types.StringValue("pid"), + CredentialsGroupId: types.StringValue("cid"), + URN: types.StringNull(), + }, + false, + true, + }, + { + "simple_values", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: &[]objectstorage.CredentialsGroup{ + { + CredentialsGroupId: utils.Ptr("cid"), + DisplayName: utils.Ptr("name"), + Urn: utils.Ptr("urn"), + }, + { + CredentialsGroupId: utils.Ptr("foo-cid"), + DisplayName: utils.Ptr("foo-name"), + Urn: utils.Ptr("foo-urn"), + }, + }, + }, + Model{ + Id: types.StringValue("pid,cid"), + Name: types.StringValue("name"), + ProjectId: types.StringValue("pid"), + CredentialsGroupId: types.StringValue("cid"), + URN: types.StringValue("urn"), + }, + false, + true, + }, + { + "empty_credentials_groups", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: &[]objectstorage.CredentialsGroup{}, + }, + Model{}, + false, + false, + }, + { + "nil_credentials_groups", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: nil, + }, + Model{}, + false, + false, + }, + { + "nil_response", + nil, + Model{}, + false, + false, + }, + { + "non_matching_credentials_group", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: &[]objectstorage.CredentialsGroup{ + { + CredentialsGroupId: utils.Ptr("foo-other"), + DisplayName: utils.Ptr("foo-name"), + Urn: utils.Ptr("foo-urn"), + }, + }, + }, + Model{}, + false, + false, + }, + { + "error_response", + &objectstorage.GetCredentialsGroupsResponse{ + CredentialsGroups: &[]objectstorage.CredentialsGroup{ + { + CredentialsGroupId: utils.Ptr("other_id"), + DisplayName: utils.Ptr("name"), + Urn: utils.Ptr("urn"), + }, + }, + }, + Model{}, + true, + false, + }, + } + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + client := &objectStorageClientMocked{ + returnError: tt.getCredentialsGroupsFails, + getCredentialsGroupsResp: tt.mockedResp, + } + model := &Model{ + ProjectId: tt.expected.ProjectId, + CredentialsGroupId: tt.expected.CredentialsGroupId, + } + err := readCredentialsGroups(context.Background(), model, client) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(model, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }) + } +} diff --git a/stackit/internal/services/objectstorage/objectstorage_acc_test.go b/stackit/internal/services/objectstorage/objectstorage_acc_test.go index e0631648..fcdeb833 100644 --- a/stackit/internal/services/objectstorage/objectstorage_acc_test.go +++ b/stackit/internal/services/objectstorage/objectstorage_acc_test.go @@ -24,6 +24,12 @@ var bucketResource = map[string]string{ "bucket_name": fmt.Sprintf("acc-test-%s", acctest.RandStringFromCharSet(20, acctest.CharSetAlpha)), } +// Credentials group resource data +var credentialsGroupResource = map[string]string{ + "project_id": testutil.ProjectId, + "name": fmt.Sprintf("acc-test-%s", acctest.RandStringFromCharSet(20, acctest.CharSetAlpha)), +} + func resourceConfig() string { return fmt.Sprintf(` %s @@ -32,10 +38,17 @@ func resourceConfig() string { project_id = "%s" bucket_name = "%s" } + + resource "stackit_objectstorage_credentials_group" "credentials_group" { + project_id = "%s" + name = "%s" + } `, testutil.ObjectStorageProviderConfig(), bucketResource["project_id"], bucketResource["bucket_name"], + credentialsGroupResource["project_id"], + credentialsGroupResource["name"], ) } @@ -49,11 +62,17 @@ func TestAccObjectStorageResource(t *testing.T) { { Config: resourceConfig(), Check: resource.ComposeAggregateTestCheckFunc( - // Instance data + // Bucket data resource.TestCheckResourceAttr("stackit_objectstorage_bucket.bucket", "project_id", bucketResource["project_id"]), resource.TestCheckResourceAttr("stackit_objectstorage_bucket.bucket", "bucket_name", bucketResource["bucket_name"]), resource.TestCheckResourceAttrSet("stackit_objectstorage_bucket.bucket", "url_path_style"), resource.TestCheckResourceAttrSet("stackit_objectstorage_bucket.bucket", "url_virtual_hosted_style"), + + // Credentials group data + resource.TestCheckResourceAttr("stackit_objectstorage_credentials_group.credentials_group", "project_id", credentialsGroupResource["project_id"]), + resource.TestCheckResourceAttr("stackit_objectstorage_credentials_group.credentials_group", "name", credentialsGroupResource["name"]), + resource.TestCheckResourceAttrSet("stackit_objectstorage_credentials_group.credentials_group", "credentials_group_id"), + resource.TestCheckResourceAttrSet("stackit_objectstorage_credentials_group.credentials_group", "urn"), ), }, // Data source @@ -64,11 +83,16 @@ func TestAccObjectStorageResource(t *testing.T) { data "stackit_objectstorage_bucket" "bucket" { project_id = stackit_objectstorage_bucket.bucket.project_id bucket_name = stackit_objectstorage_bucket.bucket.bucket_name + } + + data "stackit_objectstorage_credentials_group" "credentials_group" { + project_id = stackit_objectstorage_credentials_group.credentials_group.project_id + credentials_group_id = stackit_objectstorage_credentials_group.credentials_group.credentials_group_id }`, resourceConfig(), ), Check: resource.ComposeAggregateTestCheckFunc( - // Instance data + // Bucket data resource.TestCheckResourceAttr("data.stackit_objectstorage_bucket.bucket", "project_id", bucketResource["project_id"]), resource.TestCheckResourceAttrPair( "stackit_objectstorage_bucket.bucket", "bucket_name", @@ -82,22 +106,37 @@ func TestAccObjectStorageResource(t *testing.T) { "stackit_objectstorage_bucket.bucket", "url_virtual_hosted_style", "data.stackit_objectstorage_bucket.bucket", "url_virtual_hosted_style", ), + + // Credentials group data + resource.TestCheckResourceAttr("data.stackit_objectstorage_credentials_group.credentials_group", "project_id", credentialsGroupResource["project_id"]), + resource.TestCheckResourceAttrPair( + "stackit_objectstorage_credentials_group.credentials_group", "credentials_group_id", + "data.stackit_objectstorage_credentials_group.credentials_group", "credentials_group_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_objectstorage_credentials_group.credentials_group", "name", + "data.stackit_objectstorage_credentials_group.credentials_group", "name", + ), + resource.TestCheckResourceAttrPair( + "stackit_objectstorage_credentials_group.credentials_group", "urn", + "data.stackit_objectstorage_credentials_group.credentials_group", "urn", + ), ), }, // Import { - ResourceName: "stackit_objectstorage_bucket.bucket", + ResourceName: "stackit_objectstorage_credentials_group.credentials_group", ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_objectstorage_bucket.bucket"] + r, ok := s.RootModule().Resources["stackit_objectstorage_credentials_group.credentials_group"] if !ok { - return "", fmt.Errorf("couldn't find resource stackit_objectstorage_bucket.bucket") + return "", fmt.Errorf("couldn't find resource stackit_objectstorage_credentials_group.credentials_group") } - bucketName, ok := r.Primary.Attributes["bucket_name"] + credentialsGroupId, ok := r.Primary.Attributes["credentials_group_id"] if !ok { - return "", fmt.Errorf("couldn't find attribute bucket_name") + return "", fmt.Errorf("couldn't find attribute credentials_group_id") } - return fmt.Sprintf("%s,%s", testutil.ProjectId, bucketName), nil + return fmt.Sprintf("%s,%s", testutil.ProjectId, credentialsGroupId), nil }, ImportState: true, ImportStateVerify: true, @@ -145,7 +184,7 @@ func testAccCheckObjectStorageDestroy(s *terraform.State) error { continue } bucketName := *bucket.Name - if utils.Contains(bucketsToDestroy, *bucket.Name) { + if utils.Contains(bucketsToDestroy, bucketName) { _, err := client.DeleteBucketExecute(ctx, testutil.ProjectId, bucketName) if err != nil { return fmt.Errorf("destroying bucket %s during CheckDestroy: %w", bucketName, err) @@ -156,5 +195,34 @@ func testAccCheckObjectStorageDestroy(s *terraform.State) error { } } } + + credentialsGroupsToDestroy := []string{} + for _, rs := range s.RootModule().Resources { + if rs.Type != "stackit_objectstorage_credentials_group" { + continue + } + // credentials group terraform ID: "[project_id],[credentials_group_id]" + credentialsGroupId := strings.Split(rs.Primary.ID, core.Separator)[1] + credentialsGroupsToDestroy = append(credentialsGroupsToDestroy, credentialsGroupId) + } + + credentialsGroupsResp, err := client.GetCredentialsGroups(ctx, testutil.ProjectId).Execute() + if err != nil { + return fmt.Errorf("getting bucketsResp: %w", err) + } + + groups := *credentialsGroupsResp.CredentialsGroups + for _, group := range groups { + if group.CredentialsGroupId == nil { + continue + } + groupId := *group.CredentialsGroupId + if utils.Contains(credentialsGroupsToDestroy, groupId) { + _, err := client.DeleteCredentialsGroupExecute(ctx, testutil.ProjectId, groupId) + if err != nil { + return fmt.Errorf("destroying credentials group %s during CheckDestroy: %w", groupId, err) + } + } + } return nil } diff --git a/stackit/provider.go b/stackit/provider.go index 00f3f0b2..629c2802 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -19,6 +19,7 @@ import ( mariaDBCredential "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/mariadb/credential" mariaDBInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/mariadb/instance" objectStorageBucket "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/objectstorage/bucket" + objecStorageCredentialsGroup "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/objectstorage/credentialsgroup" openSearchCredential "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/opensearch/credential" openSearchInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/opensearch/instance" postgresFlexInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/postgresflex/instance" @@ -313,6 +314,7 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource mariaDBInstance.NewInstanceDataSource, mariaDBCredential.NewCredentialDataSource, objectStorageBucket.NewBucketDataSource, + objecStorageCredentialsGroup.NewCredentialsGroupDataSource, openSearchInstance.NewInstanceDataSource, openSearchCredential.NewCredentialDataSource, rabbitMQInstance.NewInstanceDataSource, @@ -341,6 +343,7 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource { mariaDBInstance.NewInstanceResource, mariaDBCredential.NewCredentialResource, objectStorageBucket.NewBucketResource, + objecStorageCredentialsGroup.NewCredentialsGroupResource, openSearchInstance.NewInstanceResource, openSearchCredential.NewCredentialResource, rabbitMQInstance.NewInstanceResource,