diff --git a/.changelog/3610.txt b/.changelog/3610.txt new file mode 100644 index 0000000000..df0639c44f --- /dev/null +++ b/.changelog/3610.txt @@ -0,0 +1,11 @@ +```release-note:note +resource/mongodbatlas_stream_connection: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead + +``` +```release-note:note +data-source/mongodbatlas_stream_connection: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead +``` + +```release-note:note +data-source/mongodbatlas_stream_connections: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead +``` \ No newline at end of file diff --git a/docs/data-sources/stream_connection.md b/docs/data-sources/stream_connection.md index a1445c0f2c..e1c7e8ee51 100644 --- a/docs/data-sources/stream_connection.md +++ b/docs/data-sources/stream_connection.md @@ -11,7 +11,17 @@ subcategory: "Streams" ```terraform data "mongodbatlas_stream_connection" "example" { project_id = "" - instance_name = "" + workspace_name = "" + connection_name = "" +} +``` + +### Example using workspace_name + +```terraform +data "mongodbatlas_stream_connection" "example" { + project_id = "" + workspace_name = "" connection_name = "" } ``` @@ -19,9 +29,12 @@ data "mongodbatlas_stream_connection" "example" { ## Argument Reference * `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. -* `instance_name` - (Required) Human-readable label that identifies the stream instance. +* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`. +* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `workspace_name`. * `connection_name` - (Required) Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source. +~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`. + ## Attributes Reference * `type` - Type of connection. Can be `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`. diff --git a/docs/data-sources/stream_connections.md b/docs/data-sources/stream_connections.md index 45a4bec8d1..df209043d9 100644 --- a/docs/data-sources/stream_connections.md +++ b/docs/data-sources/stream_connections.md @@ -11,14 +11,17 @@ subcategory: "Streams" ```terraform data "mongodbatlas_stream_connections" "test" { project_id = "" - instance_name = "" + workspace_name = "" } ``` ## Argument Reference * `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. -* `instance_name` - (Required) Human-readable label that identifies the stream instance. +* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`. +* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `instance_name`. + +~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`. * `page_num` - (Optional) Number of the page that displays the current set of the total objects that the response returns. Defaults to `1`. * `items_per_page` - (Optional) Number of items that the response returns per page, up to a maximum of `500`. Defaults to `100`. @@ -34,7 +37,7 @@ In addition to all arguments above, it also exports the following attributes: ### Stream Connection * `project_id` - Unique 24-hexadecimal digit string that identifies your project. -* `instance_name` - Human-readable label that identifies the stream instance. +* `workspace_name` - Human-readable label that identifies the stream instance. * `connection_name` - Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source. * `type` - Type of connection. `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`. diff --git a/docs/resources/stream_connection.md b/docs/resources/stream_connection.md index becfc6eff5..faacd8b3fe 100644 --- a/docs/resources/stream_connection.md +++ b/docs/resources/stream_connection.md @@ -16,7 +16,7 @@ subcategory: "Streams" ```terraform resource "mongodbatlas_stream_connection" "test" { project_id = var.project_id - instance_name = "InstanceName" + workspace_name = "WorkspaceName" connection_name = "ConnectionName" type = "Cluster" cluster_name = "Cluster0" @@ -31,7 +31,7 @@ resource "mongodbatlas_stream_connection" "test" { ```terraform resource "mongodbatlas_stream_connection" "test" { project_id = var.project_id - instance_name = "InstanceName" + workspace_name = "WorskpaceName" connection_name = "ConnectionName" type = "Cluster" cluster_name = "OtherCluster" @@ -44,7 +44,7 @@ resource "mongodbatlas_stream_connection" "test" { ```terraform resource "mongodbatlas_stream_connection" "test" { project_id = var.project_id - instance_name = "NewInstance" + workspace_name = "NewWorkspace" connection_name = "KafkaConnection" type = "Kafka" authentication = { @@ -99,7 +99,7 @@ resource "mongodbatlas_stream_connection" "example-kafka-oauthbearer" { ```terraform resource "mongodbatlas_stream_connection" "test" { project_id = var.project_id - instance_name = "NewInstance" + workspace_name = "NewWorkspace" connection_name = "KafkaConnection" type = "Kafka" authentication = { @@ -123,7 +123,7 @@ resource "mongodbatlas_stream_connection" "test" { ```terraform resource "mongodbatlas_stream_connection" "test" { project_id = var.project_id - instance_name = "NewInstance" + workspace_name = "NewWorkspace" connection_name = "AWSLambdaConnection" type = "AWSLambda" aws = { @@ -138,7 +138,7 @@ resource "mongodbatlas_stream_connection" "test" { ```terraform resource "mongodbatlas_stream_connection" "example-https" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "https_connection_tf_new" type = "Https" url = "https://example.com" @@ -152,10 +152,13 @@ resource "mongodbatlas_stream_connection" "example-https" { ## Argument Reference * `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. -* `instance_name` - (Required) Human-readable label that identifies the stream instance. +* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`. +* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `instance_name`. * `connection_name` - (Required) Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source. * `type` - (Required) Type of connection. Can be `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`. +~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`. + If `type` is of value `Cluster` the following additional arguments are defined: * `cluster_name` - Name of the cluster configured for this connection. * `db_role_to_execute` - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See [DBRoleToExecute](#DBRoleToExecute). @@ -209,7 +212,7 @@ If `type` is of value `Https` the following additional attributes are defined: ## Import -You can import a stream connection resource using the instance name, project ID, and connection name. The format must be `INSTANCE_NAME-PROJECT_ID-CONNECTION_NAME`. For example: +You can import a stream connection resource using the workspace name, project ID, and connection name. The format must be `WORKSPACE_NAME-PROJECT_ID-CONNECTION_NAME`. For example: ``` $ terraform import mongodbatlas_stream_connection.test "DefaultInstance-12251446ae5f3f6ec7968b13-NewConnection" diff --git a/examples/mongodbatlas_stream_connection/main.tf b/examples/mongodbatlas_stream_connection/main.tf index e871e1a26f..8d096e8f2e 100644 --- a/examples/mongodbatlas_stream_connection/main.tf +++ b/examples/mongodbatlas_stream_connection/main.tf @@ -9,7 +9,7 @@ resource "mongodbatlas_stream_instance" "example" { resource "mongodbatlas_stream_connection" "example-cluster" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "ClusterConnection" type = "Cluster" cluster_name = var.cluster_name @@ -21,7 +21,7 @@ resource "mongodbatlas_stream_connection" "example-cluster" { resource "mongodbatlas_stream_connection" "example-cross-project-cluster" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "ClusterCrossProjectConnection" type = "Cluster" cluster_name = var.other_cluster @@ -34,7 +34,7 @@ resource "mongodbatlas_stream_connection" "example-cross-project-cluster" { resource "mongodbatlas_stream_connection" "example-kafka-plaintext" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "KafkaPlaintextConnection" type = "Kafka" authentication = { @@ -86,7 +86,7 @@ resource "mongodbatlas_stream_connection" "example-kafka-oauthbearer" { resource "mongodbatlas_stream_connection" "example-kafka-ssl" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "KafkaSSLConnection" type = "Kafka" authentication = { @@ -106,14 +106,14 @@ resource "mongodbatlas_stream_connection" "example-kafka-ssl" { resource "mongodbatlas_stream_connection" "example-sample" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "sample_stream_solar" type = "Sample" } resource "mongodbatlas_stream_connection" "example-aws-lambda" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = "AWSLambdaConnection" type = "AWSLambda" aws = { @@ -135,7 +135,7 @@ resource "mongodbatlas_stream_connection" "example-https" { data "mongodbatlas_stream_connection" "example-kafka-ssl" { project_id = var.project_id - instance_name = mongodbatlas_stream_instance.example.instance_name + workspace_name = mongodbatlas_stream_instance.example.instance_name connection_name = mongodbatlas_stream_connection.example-kafka-ssl.connection_name } diff --git a/internal/service/streamconnection/data_source_stream_connection.go b/internal/service/streamconnection/data_source_stream_connection.go index 925cda7096..3fa725cba6 100644 --- a/internal/service/streamconnection/data_source_stream_connection.go +++ b/internal/service/streamconnection/data_source_stream_connection.go @@ -2,8 +2,14 @@ package streamconnection import ( "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/datasource" + dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" ) @@ -25,7 +31,24 @@ type streamConnectionDS struct { func (d *streamConnectionDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = conversion.DataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.DataSourceSchemaRequest{ - RequiredFields: []string{"project_id", "instance_name", "connection_name"}, + RequiredFields: []string{"project_id", "connection_name"}, + OverridenFields: map[string]dsschema.Attribute{ + "instance_name": dsschema.StringAttribute{ + Optional: true, + MarkdownDescription: "Human-readable label that identifies the stream instance. Conflicts with `workspace_name`.", + DeprecationMessage: fmt.Sprintf(constant.DeprecationParamWithReplacement, "workspace_name"), + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("workspace_name")), + }, + }, + "workspace_name": dsschema.StringAttribute{ + Optional: true, + MarkdownDescription: "Human-readable label that identifies the stream instance. This is an alias for `instance_name`. Conflicts with `instance_name`.", + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("instance_name")), + }, + }, + }, }) } @@ -38,15 +61,21 @@ func (d *streamConnectionDS) Read(ctx context.Context, req datasource.ReadReques connV2 := d.Client.AtlasV2 projectID := streamConnectionConfig.ProjectID.ValueString() - instanceName := streamConnectionConfig.InstanceName.ValueString() + workspaceOrInstanceName := getWorkspaceOrInstanceName(&streamConnectionConfig) + if workspaceOrInstanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } connectionName := streamConnectionConfig.ConnectionName.ValueString() - apiResp, _, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, instanceName, connectionName).Execute() + apiResp, _, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, workspaceOrInstanceName, connectionName).Execute() if err != nil { resp.Diagnostics.AddError("error fetching resource", err.Error()) return } - newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, nil, apiResp) + instanceName := streamConnectionConfig.InstanceName.ValueString() + workspaceName := streamConnectionConfig.WorkspaceName.ValueString() + newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, nil, apiResp) if diags.HasError() { resp.Diagnostics.Append(diags...) return diff --git a/internal/service/streamconnection/data_source_stream_connections.go b/internal/service/streamconnection/data_source_stream_connections.go index 7df0edfd8c..986adf11bc 100644 --- a/internal/service/streamconnection/data_source_stream_connections.go +++ b/internal/service/streamconnection/data_source_stream_connections.go @@ -4,8 +4,13 @@ import ( "context" "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/datasource" + dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" "go.mongodb.org/atlas-sdk/v20250312008/admin" @@ -28,11 +33,39 @@ type streamConnectionsDS struct { func (d *streamConnectionsDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = conversion.PluralDataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.PluralDataSourceSchemaRequest{ - RequiredFields: []string{"project_id", "instance_name"}, + RequiredFields: []string{"project_id"}, HasLegacyFields: true, + OverridenRootFields: map[string]dsschema.Attribute{ + "instance_name": dsschema.StringAttribute{ + Optional: true, + MarkdownDescription: "Human-readable label that identifies the stream instance. Conflicts with `workspace_name`.", + DeprecationMessage: fmt.Sprintf(constant.DeprecationParamWithReplacement, "workspace_name"), + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("workspace_name")), + }, + }, + "workspace_name": dsschema.StringAttribute{ + Optional: true, + MarkdownDescription: "Human-readable label that identifies the stream instance. This is an alias for `instance_name`. Conflicts with `instance_name`.", + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("instance_name")), + }, + }, + }, }) } +// getWorkspaceOrInstanceNameForDS returns the workspace name from either instance_name or workspace_name field for datasource model +func getWorkspaceOrInstanceNameForDS(model *TFStreamConnectionsDSModel) string { + if !model.WorkspaceName.IsNull() && !model.WorkspaceName.IsUnknown() { + return model.WorkspaceName.ValueString() + } + if !model.InstanceName.IsNull() && !model.InstanceName.IsUnknown() { + return model.InstanceName.ValueString() + } + return "" +} + func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var streamConnectionsConfig TFStreamConnectionsDSModel resp.Diagnostics.Append(req.Config.Get(ctx, &streamConnectionsConfig)...) @@ -42,13 +75,17 @@ func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadReque connV2 := d.Client.AtlasV2 projectID := streamConnectionsConfig.ProjectID.ValueString() - instanceName := streamConnectionsConfig.InstanceName.ValueString() + workspaceOrInstanceName := getWorkspaceOrInstanceNameForDS(&streamConnectionsConfig) + if workspaceOrInstanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } itemsPerPage := streamConnectionsConfig.ItemsPerPage.ValueInt64Pointer() pageNum := streamConnectionsConfig.PageNum.ValueInt64Pointer() apiResp, _, err := connV2.StreamsApi.ListStreamConnectionsWithParams(ctx, &admin.ListStreamConnectionsApiParams{ GroupId: projectID, - TenantName: instanceName, + TenantName: workspaceOrInstanceName, ItemsPerPage: conversion.Int64PtrToIntPtr(itemsPerPage), PageNum: conversion.Int64PtrToIntPtr(pageNum), }).Execute() @@ -67,11 +104,12 @@ func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadReque } type TFStreamConnectionsDSModel struct { - ID types.String `tfsdk:"id"` - ProjectID types.String `tfsdk:"project_id"` - InstanceName types.String `tfsdk:"instance_name"` - Results []TFStreamConnectionModel `tfsdk:"results"` - PageNum types.Int64 `tfsdk:"page_num"` - ItemsPerPage types.Int64 `tfsdk:"items_per_page"` - TotalCount types.Int64 `tfsdk:"total_count"` + ID types.String `tfsdk:"id"` + ProjectID types.String `tfsdk:"project_id"` + InstanceName types.String `tfsdk:"instance_name"` + WorkspaceName types.String `tfsdk:"workspace_name"` + Results []TFStreamConnectionModel `tfsdk:"results"` + PageNum types.Int64 `tfsdk:"page_num"` + ItemsPerPage types.Int64 `tfsdk:"items_per_page"` + TotalCount types.Int64 `tfsdk:"total_count"` } diff --git a/internal/service/streamconnection/model_stream_connection.go b/internal/service/streamconnection/model_stream_connection.go index 3283108427..6d877f6a72 100644 --- a/internal/service/streamconnection/model_stream_connection.go +++ b/internal/service/streamconnection/model_stream_connection.go @@ -123,12 +123,18 @@ func NewStreamConnectionUpdateReq(ctx context.Context, plan *TFStreamConnectionM return streamConnection, nil } -func NewTFStreamConnection(ctx context.Context, projID, instanceName string, currAuthConfig *types.Object, apiResp *admin.StreamsConnection) (*TFStreamConnectionModel, diag.Diagnostics) { - rID := fmt.Sprintf("%s-%s-%s", instanceName, projID, conversion.SafeString(apiResp.Name)) +// NewTFStreamConnection determines if the original model was created with instance_name or workspace_name and sets the appropriate field. +func NewTFStreamConnection(ctx context.Context, projID, instanceName, workspaceName string, currAuthConfig *types.Object, apiResp *admin.StreamsConnection) (*TFStreamConnectionModel, diag.Diagnostics) { + streamWorkspaceName := workspaceName + if instanceName != "" { + streamWorkspaceName = instanceName + } + + rID := fmt.Sprintf("%s-%s-%s", streamWorkspaceName, projID, conversion.SafeString(apiResp.Name)) + connectionModel := TFStreamConnectionModel{ ID: types.StringValue(rID), ProjectID: types.StringValue(projID), - InstanceName: types.StringValue(instanceName), ConnectionName: types.StringPointerValue(apiResp.Name), Type: types.StringPointerValue(apiResp.Type), ClusterName: types.StringPointerValue(apiResp.ClusterName), @@ -137,6 +143,16 @@ func NewTFStreamConnection(ctx context.Context, projID, instanceName string, cur URL: types.StringPointerValue(apiResp.Url), } + // Set the appropriate field based on the original model + if workspaceName != "" { + connectionModel.WorkspaceName = types.StringValue(workspaceName) + connectionModel.InstanceName = types.StringNull() + } else { + // Default to instance_name for backward compatibility + connectionModel.InstanceName = types.StringValue(instanceName) + connectionModel.WorkspaceName = types.StringNull() + } + authModel, diags := newTFConnectionAuthenticationModel(ctx, currAuthConfig, apiResp.Authentication) if diags.HasError() { return nil, diags @@ -254,22 +270,27 @@ func NewTFStreamConnections(ctx context.Context, paginatedResult *admin.PaginatedApiStreamsConnection) (*TFStreamConnectionsDSModel, diag.Diagnostics) { input := paginatedResult.GetResults() results := make([]TFStreamConnectionModel, len(input)) + + workspaceName := streamConnectionsConfig.WorkspaceName.ValueString() + instanceName := streamConnectionsConfig.InstanceName.ValueString() + for i := range input { projectID := streamConnectionsConfig.ProjectID.ValueString() - instanceName := streamConnectionsConfig.InstanceName.ValueString() - connectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, nil, &input[i]) + connectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, nil, &input[i]) if diags.HasError() { return nil, diags } results[i] = *connectionModel } + return &TFStreamConnectionsDSModel{ - ID: types.StringValue(id.UniqueId()), - ProjectID: streamConnectionsConfig.ProjectID, - InstanceName: streamConnectionsConfig.InstanceName, - Results: results, - PageNum: streamConnectionsConfig.PageNum, - ItemsPerPage: streamConnectionsConfig.ItemsPerPage, - TotalCount: types.Int64PointerValue(conversion.IntPtrToInt64Ptr(paginatedResult.TotalCount)), + ID: types.StringValue(id.UniqueId()), + ProjectID: streamConnectionsConfig.ProjectID, + InstanceName: streamConnectionsConfig.InstanceName, + WorkspaceName: streamConnectionsConfig.WorkspaceName, + Results: results, + PageNum: streamConnectionsConfig.PageNum, + ItemsPerPage: streamConnectionsConfig.ItemsPerPage, + TotalCount: types.Int64PointerValue(conversion.IntPtrToInt64Ptr(paginatedResult.TotalCount)), }, nil } diff --git a/internal/service/streamconnection/model_stream_connection_test.go b/internal/service/streamconnection/model_stream_connection_test.go index afefd09668..a024057bb4 100644 --- a/internal/service/streamconnection/model_stream_connection_test.go +++ b/internal/service/streamconnection/model_stream_connection_test.go @@ -76,7 +76,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: nil, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Cluster"), ClusterName: types.StringValue(clusterName), @@ -106,7 +106,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: nil, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Cluster"), ClusterName: types.StringValue(clusterName), @@ -141,7 +141,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: &authConfigWithPasswordDefined, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Kafka"), Authentication: tfAuthenticationObject(t, authMechanism, authUsername, "raw password"), // password value is obtained from config, not api resp. @@ -179,7 +179,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: &authConfigWithOAuth, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Kafka"), Authentication: tfAuthenticationObjectForOAuth(t, authMechanism, clientID, clientSecret, tokenEndpointURL, scope, saslOauthbearerExtentions, method), // password value is obtained from config, not api resp. @@ -203,7 +203,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: nil, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Kafka"), Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes), @@ -236,7 +236,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedAuthConfig: nil, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(connectionName), Type: types.StringValue("Kafka"), Authentication: tfAuthenticationObjectWithNoPassword(t, authMechanism, authUsername), @@ -259,7 +259,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedInstanceName: instanceName, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(sampleConnectionName), Type: types.StringValue("Sample"), Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes), @@ -282,7 +282,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { providedInstanceName: instanceName, expectedTFModel: &streamconnection.TFStreamConnectionModel{ ProjectID: types.StringValue(dummyProjectID), - InstanceName: types.StringValue(instanceName), + WorkspaceName: types.StringValue(instanceName), ConnectionName: types.StringValue(awslambdaConnectionName), Type: types.StringValue("AWSLambda"), Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes), @@ -298,7 +298,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - resultModel, diags := streamconnection.NewTFStreamConnection(t.Context(), tc.providedProjID, tc.providedInstanceName, tc.providedAuthConfig, tc.SDKResp) + resultModel, diags := streamconnection.NewTFStreamConnection(t.Context(), tc.providedProjID, "", tc.providedInstanceName, tc.providedAuthConfig, tc.SDKResp) if diags.HasError() { t.Fatalf("unexpected errors found: %s", diags.Errors()[0].Summary()) } @@ -482,6 +482,25 @@ func TestStreamConnectionsSDKToTFModel(t *testing.T) { Results: []streamconnection.TFStreamConnectionModel{}, }, }, + { + name: "With workspace name and no page options", + SDKResp: &admin.PaginatedApiStreamsConnection{ + Results: &[]admin.StreamsConnection{}, + TotalCount: admin.PtrInt(0), + }, + providedConfig: &streamconnection.TFStreamConnectionsDSModel{ + ProjectID: types.StringValue(dummyProjectID), + WorkspaceName: types.StringValue(instanceName), + }, + expectedTFModel: &streamconnection.TFStreamConnectionsDSModel{ + ProjectID: types.StringValue(dummyProjectID), + WorkspaceName: types.StringValue(instanceName), + PageNum: types.Int64Null(), + ItemsPerPage: types.Int64Null(), + TotalCount: types.Int64Value(0), + Results: []streamconnection.TFStreamConnectionModel{}, + }, + }, } for _, tc := range testCases { diff --git a/internal/service/streamconnection/resource_schema.go b/internal/service/streamconnection/resource_schema.go index 4effb025d7..42b0f1ed6b 100644 --- a/internal/service/streamconnection/resource_schema.go +++ b/internal/service/streamconnection/resource_schema.go @@ -2,14 +2,17 @@ package streamconnection import ( "context" + "fmt" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" ) func ResourceSchema(ctx context.Context) schema.Schema { @@ -28,10 +31,23 @@ func ResourceSchema(ctx context.Context) schema.Schema { }, }, "instance_name": schema.StringAttribute{ - Required: true, + Optional: true, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("workspace_name")), + }, + DeprecationMessage: fmt.Sprintf(constant.DeprecationParamWithReplacement, "workspace_name"), + }, + "workspace_name": schema.StringAttribute{ + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot("instance_name")), + }, }, "connection_name": schema.StringAttribute{ Required: true, diff --git a/internal/service/streamconnection/resource_stream_connection.go b/internal/service/streamconnection/resource_stream_connection.go index 4c16b41c57..85c9164a1f 100644 --- a/internal/service/streamconnection/resource_stream_connection.go +++ b/internal/service/streamconnection/resource_stream_connection.go @@ -36,6 +36,7 @@ type streamConnectionRS struct { type TFStreamConnectionModel struct { ID types.String `tfsdk:"id"` ProjectID types.String `tfsdk:"project_id"` + WorkspaceName types.String `tfsdk:"workspace_name"` InstanceName types.String `tfsdk:"instance_name"` ConnectionName types.String `tfsdk:"connection_name"` Type types.String `tfsdk:"type"` @@ -128,6 +129,17 @@ func (r *streamConnectionRS) Schema(ctx context.Context, req resource.SchemaRequ conversion.UpdateSchemaDescription(&resp.Schema) } +// getWorkspaceOrInstanceName returns the workspace name from workspace_name or instance_name field +func getWorkspaceOrInstanceName(model *TFStreamConnectionModel) string { + if !model.WorkspaceName.IsNull() && !model.WorkspaceName.IsUnknown() { + return model.WorkspaceName.ValueString() + } + if !model.InstanceName.IsNull() && !model.InstanceName.IsUnknown() { + return model.InstanceName.ValueString() + } + return "" +} + func (r *streamConnectionRS) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var streamConnectionPlan TFStreamConnectionModel resp.Diagnostics.Append(req.Plan.Get(ctx, &streamConnectionPlan)...) @@ -137,19 +149,27 @@ func (r *streamConnectionRS) Create(ctx context.Context, req resource.CreateRequ connV2 := r.Client.AtlasV2 projectID := streamConnectionPlan.ProjectID.ValueString() - instanceName := streamConnectionPlan.InstanceName.ValueString() + workspaceOrInstanceName := getWorkspaceOrInstanceName(&streamConnectionPlan) + if workspaceOrInstanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } + streamConnectionReq, diags := NewStreamConnectionReq(ctx, &streamConnectionPlan) if diags.HasError() { resp.Diagnostics.Append(diags...) return } - apiResp, _, err := connV2.StreamsApi.CreateStreamConnection(ctx, projectID, instanceName, streamConnectionReq).Execute() + apiResp, _, err := connV2.StreamsApi.CreateStreamConnection(ctx, projectID, workspaceOrInstanceName, streamConnectionReq).Execute() if err != nil { resp.Diagnostics.AddError("error creating resource", err.Error()) return } - newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, &streamConnectionPlan.Authentication, apiResp) + instanceName := streamConnectionPlan.InstanceName.ValueString() + workspaceName := streamConnectionPlan.WorkspaceName.ValueString() + + newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, &streamConnectionPlan.Authentication, apiResp) if diags.HasError() { resp.Diagnostics.Append(diags...) return @@ -166,9 +186,13 @@ func (r *streamConnectionRS) Read(ctx context.Context, req resource.ReadRequest, connV2 := r.Client.AtlasV2 projectID := streamConnectionState.ProjectID.ValueString() - instanceName := streamConnectionState.InstanceName.ValueString() + workspaceOrInstanceName := getWorkspaceOrInstanceName(&streamConnectionState) + if workspaceOrInstanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } connectionName := streamConnectionState.ConnectionName.ValueString() - apiResp, getResp, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, instanceName, connectionName).Execute() + apiResp, getResp, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, workspaceOrInstanceName, connectionName).Execute() if err != nil { if validate.StatusNotFound(getResp) { resp.State.RemoveResource(ctx) @@ -178,7 +202,9 @@ func (r *streamConnectionRS) Read(ctx context.Context, req resource.ReadRequest, return } - newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, &streamConnectionState.Authentication, apiResp) + instanceName := streamConnectionState.InstanceName.ValueString() + workspaceName := streamConnectionState.WorkspaceName.ValueString() + newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, &streamConnectionState.Authentication, apiResp) if diags.HasError() { resp.Diagnostics.Append(diags...) return @@ -195,20 +221,26 @@ func (r *streamConnectionRS) Update(ctx context.Context, req resource.UpdateRequ connV2 := r.Client.AtlasV2 projectID := streamConnectionPlan.ProjectID.ValueString() - instanceName := streamConnectionPlan.InstanceName.ValueString() + workspaceOrInstanceName := getWorkspaceOrInstanceName(&streamConnectionPlan) + if workspaceOrInstanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } connectionName := streamConnectionPlan.ConnectionName.ValueString() streamConnectionReq, diags := NewStreamConnectionUpdateReq(ctx, &streamConnectionPlan) if diags.HasError() { resp.Diagnostics.Append(diags...) return } - apiResp, _, err := connV2.StreamsApi.UpdateStreamConnection(ctx, projectID, instanceName, connectionName, streamConnectionReq).Execute() + apiResp, _, err := connV2.StreamsApi.UpdateStreamConnection(ctx, projectID, workspaceOrInstanceName, connectionName, streamConnectionReq).Execute() if err != nil { resp.Diagnostics.AddError("error updating resource", err.Error()) return } - newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, &streamConnectionPlan.Authentication, apiResp) + instanceName := streamConnectionPlan.InstanceName.ValueString() + workspaceName := streamConnectionPlan.WorkspaceName.ValueString() + newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, &streamConnectionPlan.Authentication, apiResp) if diags.HasError() { resp.Diagnostics.Append(diags...) return @@ -225,7 +257,11 @@ func (r *streamConnectionRS) Delete(ctx context.Context, req resource.DeleteRequ connV2 := r.Client.AtlasV2 projectID := streamConnectionState.ProjectID.ValueString() - instanceName := streamConnectionState.InstanceName.ValueString() + instanceName := getWorkspaceOrInstanceName(streamConnectionState) + if instanceName == "" { + resp.Diagnostics.AddError("validation error", "workspace_name must be provided") + return + } connectionName := streamConnectionState.ConnectionName.ValueString() if err := DeleteStreamConnection(ctx, connV2.StreamsApi, projectID, instanceName, connectionName, 10*time.Minute); err != nil { resp.Diagnostics.AddError("error deleting resource", err.Error()) @@ -234,27 +270,27 @@ func (r *streamConnectionRS) Delete(ctx context.Context, req resource.DeleteRequ } func (r *streamConnectionRS) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - instanceName, projectID, connectionName, err := splitStreamConnectionImportID(req.ID) + workspaceName, projectID, connectionName, err := splitStreamConnectionImportID(req.ID) if err != nil { resp.Diagnostics.AddError("error splitting stream connection import ID", err.Error()) return } - - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_name"), instanceName)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_name"), workspaceName)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("workspace_name"), workspaceName)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("connection_name"), connectionName)...) } -func splitStreamConnectionImportID(id string) (instanceName, projectID, connectionName string, err error) { +func splitStreamConnectionImportID(id string) (workspaceName, projectID, connectionName string, err error) { var re = regexp.MustCompile(`^(.*)-([0-9a-fA-F]{24})-(.*)$`) parts := re.FindStringSubmatch(id) if len(parts) != 4 { - err = errors.New("use the format {instance_name}-{project_id}-{connection_name}") + err = errors.New("use the format {workspace_name}-{project_id}-{connection_name}") return } - instanceName = parts[1] + workspaceName = parts[1] projectID = parts[2] connectionName = parts[3] return diff --git a/internal/service/streamconnection/resource_stream_connection_migration_test.go b/internal/service/streamconnection/resource_stream_connection_migration_test.go index c0f8b18e93..ddf497b903 100644 --- a/internal/service/streamconnection/resource_stream_connection_migration_test.go +++ b/internal/service/streamconnection/resource_stream_connection_migration_test.go @@ -9,10 +9,10 @@ import ( func TestMigStreamRSStreamConnection_kafkaPlaintext(t *testing.T) { mig.SkipIfVersionBelow(t, "1.16.0") // when reached GA - mig.CreateAndRunTest(t, testCaseKafkaPlaintext(t, "-mig")) + mig.CreateAndRunTest(t, testCaseKafkaPlaintextMigration(t)) } func TestMigStreamRSStreamConnection_cluster(t *testing.T) { mig.SkipIfVersionBelow(t, "1.16.0") // when reached GA - mig.CreateAndRunTest(t, testCaseCluster(t, "-mig")) + mig.CreateAndRunTest(t, testCaseClusterMigration(t)) } diff --git a/internal/service/streamconnection/resource_stream_connection_test.go b/internal/service/streamconnection/resource_stream_connection_test.go index e7152df52d..d972386ffe 100644 --- a/internal/service/streamconnection/resource_stream_connection_test.go +++ b/internal/service/streamconnection/resource_stream_connection_test.go @@ -19,7 +19,7 @@ const ( dataSourceConfig = ` data "mongodbatlas_stream_connection" "test" { project_id = mongodbatlas_stream_connection.test.project_id - instance_name = mongodbatlas_stream_connection.test.instance_name + workspace_name = mongodbatlas_stream_connection.test.workspace_name connection_name = mongodbatlas_stream_connection.test.connection_name } ` @@ -27,10 +27,35 @@ data "mongodbatlas_stream_connection" "test" { dataSourcePluralConfig = ` data "mongodbatlas_stream_connections" "test" { project_id = mongodbatlas_stream_connection.test.project_id - instance_name = mongodbatlas_stream_connection.test.instance_name + workspace_name = mongodbatlas_stream_connection.test.workspace_name } ` dataSourcePluralConfigWithPage = ` +data "mongodbatlas_stream_connections" "test" { + project_id = mongodbatlas_stream_connection.test.project_id + workspace_name = mongodbatlas_stream_connection.test.workspace_name + page_num = 2 # no specific reason for 2, just to test pagination + items_per_page = 1 + } + ` +) + +const ( + dataSourceConfigMigration = ` +data "mongodbatlas_stream_connection" "test" { + project_id = mongodbatlas_stream_connection.test.project_id + instance_name = mongodbatlas_stream_connection.test.instance_name + connection_name = mongodbatlas_stream_connection.test.connection_name +} +` + + dataSourcePluralConfigMigration = ` +data "mongodbatlas_stream_connections" "test" { + project_id = mongodbatlas_stream_connection.test.project_id + instance_name = mongodbatlas_stream_connection.test.instance_name +} +` + dataSourcePluralConfigWithPageMigration = ` data "mongodbatlas_stream_connections" "test" { project_id = mongodbatlas_stream_connection.test.project_id instance_name = mongodbatlas_stream_connection.test.instance_name @@ -65,15 +90,15 @@ var ( ) func TestAccStreamRSStreamConnection_kafkaPlaintext(t *testing.T) { - testCase := testCaseKafkaPlaintext(t, "") + testCase := testCaseKafkaPlaintext(t) resource.ParallelTest(t, *testCase) } -func testCaseKafkaPlaintext(t *testing.T, nameSuffix string) *resource.TestCase { +func testCaseKafkaPlaintext(t *testing.T) *resource.TestCase { t.Helper() var ( projectID, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) - connectionName = "kafka-conn-plaintext" + nameSuffix + connectionName = "kafka-conn-plaintext" ) return &resource.TestCase{ PreCheck: func() { acc.PreCheckBasic(t) }, @@ -83,17 +108,55 @@ func testCaseKafkaPlaintext(t *testing.T, nameSuffix string) *resource.TestCase { Config: dataSourcesConfig + configureKafka(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("PLAIN", "user", "rawpassword", "", "", "", "", "", ""), "localhost:9092,localhost:9092", "earliest", "", false), Check: resource.ComposeAggregateTestCheckFunc( - checkKafkaAttributes(resourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, true), - checkKafkaAttributes(dataSourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, false), - streamConnectionsAttributeChecks(pluralDataSourceName, nil, nil), + checkKafkaAttributesAcceptance(resourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, true), + checkKafkaAttributesAcceptance(dataSourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, false), + streamConnectionsAttributeChecksAcceptance(pluralDataSourceName, nil, nil), ), }, { Config: dataSourcesWithPagination + configureKafka(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("PLAIN", "user2", "otherpassword", "", "", "", "", "", ""), "localhost:9093", "latest", kafkaNetworkingPublic, false), Check: resource.ComposeAggregateTestCheckFunc( - checkKafkaAttributes(resourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, true), - checkKafkaAttributes(dataSourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, false), - streamConnectionsAttributeChecks(pluralDataSourceName, conversion.Pointer(2), conversion.Pointer(1)), + checkKafkaAttributesAcceptance(resourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, true), + checkKafkaAttributesAcceptance(dataSourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, false), + streamConnectionsAttributeChecksAcceptance(pluralDataSourceName, conversion.Pointer(2), conversion.Pointer(1)), + ), + }, + { + ResourceName: resourceName, + ImportStateIdFunc: checkStreamConnectionImportStateIDFunc(resourceName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"authentication.password"}, + }, + }, + } +} + +func testCaseKafkaPlaintextMigration(t *testing.T) *resource.TestCase { + t.Helper() + var ( + projectID, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) + connectionName = "kafka-conn-plaintext-mig" + ) + return &resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: CheckDestroyStreamConnection, + Steps: []resource.TestStep{ + { + Config: dataSourceConfigMigration + dataSourcePluralConfigMigration + configureKafkaMigration(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("PLAIN", "user", "rawpassword", "", "", "", "", "", ""), "localhost:9092,localhost:9092", "earliest", "", false), + Check: resource.ComposeAggregateTestCheckFunc( + checkKafkaAttributesMigration(resourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, true), + checkKafkaAttributesMigration(dataSourceName, instanceName, connectionName, "user", "rawpassword", "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, false), + streamConnectionsAttributeChecksMigration(pluralDataSourceName, nil, nil), + ), + }, + { + Config: dataSourceConfigMigration + dataSourcePluralConfigWithPageMigration + configureKafka(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("PLAIN", "user2", "otherpassword", "", "", "", "", "", ""), "localhost:9093", "latest", kafkaNetworkingPublic, false), + Check: resource.ComposeAggregateTestCheckFunc( + checkKafkaAttributesMigration(resourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, true), + checkKafkaAttributesMigration(dataSourceName, instanceName, connectionName, "user2", "otherpassword", "localhost:9093", "latest", networkingTypePublic, false, false), + streamConnectionsAttributeChecksMigration(pluralDataSourceName, conversion.Pointer(2), conversion.Pointer(1)), ), }, { @@ -122,16 +185,16 @@ func TestAccStreamRSStreamConnection_kafkaOAuthBearer(t *testing.T) { { Config: dataSourcesConfig + configureKafka(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("OAUTHBEARER", "", "", tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtentions, method), "localhost:9092,localhost:9092", "earliest", "", false), Check: resource.ComposeAggregateTestCheckFunc( - checkKafkaOAuthAttributes(resourceName, instanceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtentions, method, "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, true), - checkKafkaOAuthAttributes(dataSourceName, instanceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtentions, method, "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, false), + checkKafkaOAuthAttributes(resourceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtentions, method, "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, true), + checkKafkaOAuthAttributes(dataSourceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtentions, method, "localhost:9092,localhost:9092", "earliest", networkingTypePublic, false, false), streamConnectionsAttributeChecks(pluralDataSourceName, nil, nil), ), }, { Config: dataSourcesWithPagination + configureKafka(fmt.Sprintf("%q", projectID), instanceName, connectionName, getKafkaAuthenticationConfig("OAUTHBEARER", "", "", tokenEndpointURL, "clientId2", "clientSecret", scope, saslOauthbearerExtentions, method), "localhost:9093", "latest", kafkaNetworkingPublic, false), Check: resource.ComposeAggregateTestCheckFunc( - checkKafkaOAuthAttributes(resourceName, instanceName, connectionName, tokenEndpointURL, "clientId2", "clientSecret", scope, saslOauthbearerExtentions, method, "localhost:9093", "latest", networkingTypePublic, false, true), - checkKafkaOAuthAttributes(dataSourceName, instanceName, connectionName, tokenEndpointURL, "clientId2", "clientSecret", scope, saslOauthbearerExtentions, method, "localhost:9093", "latest", networkingTypePublic, false, false), + checkKafkaOAuthAttributes(resourceName, connectionName, tokenEndpointURL, "clientId2", "clientSecret", scope, saslOauthbearerExtentions, method, "localhost:9093", "latest", networkingTypePublic, false, true), + checkKafkaOAuthAttributes(dataSourceName, connectionName, tokenEndpointURL, "clientId2", "clientSecret", scope, saslOauthbearerExtentions, method, "localhost:9093", "latest", networkingTypePublic, false, false), streamConnectionsAttributeChecks(pluralDataSourceName, conversion.Pointer(2), conversion.Pointer(1)), ), }, @@ -166,7 +229,7 @@ func TestAccStreamRSStreamConnection_kafkaNetworkingVPC(t *testing.T) { Steps: []resource.TestStep{ { Config: networkPeeringConfig + configureKafka("mongodbatlas_network_peering.test.project_id", instanceName, "kafka-conn-vpc", getKafkaAuthenticationConfig("PLAIN", "user", "rawpassword", "", "", "", "", "", ""), "localhost:9092", "earliest", kafkaNetworkingVPC, true), - Check: checkKafkaAttributes(resourceName, instanceName, "kafka-conn-vpc", "user", "rawpassword", "localhost:9092", "earliest", networkingTypeVPC, true, true), + Check: checkKafkaAttributesAcceptance(resourceName, instanceName, "kafka-conn-vpc", "user", "rawpassword", "localhost:9092", "earliest", networkingTypeVPC, true, true), }, { ResourceName: resourceName, @@ -198,8 +261,8 @@ func TestAccStreamRSStreamConnection_kafkaSSL(t *testing.T) { { Config: fmt.Sprintf("%s\n%s", configureKafka(fmt.Sprintf("%q", projectID), instanceName, "kafka-conn-ssl", getKafkaAuthenticationConfig("PLAIN", "user", "rawpassword", "", "", "", "", "", ""), "localhost:9092", "earliest", kafkaNetworkingPublic, true), dataSourceConfig), Check: resource.ComposeAggregateTestCheckFunc( - checkKafkaAttributes(resourceName, instanceName, "kafka-conn-ssl", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePublic, true, true), - checkKafkaAttributes(dataSourceName, instanceName, "kafka-conn-ssl", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePublic, true, false), + checkKafkaAttributesAcceptance(resourceName, instanceName, "kafka-conn-ssl", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePublic, true, true), + checkKafkaAttributesAcceptance(dataSourceName, instanceName, "kafka-conn-ssl", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePublic, true, false), ), }, // cannot change networking access type once set @@ -219,16 +282,16 @@ func TestAccStreamRSStreamConnection_kafkaSSL(t *testing.T) { } func TestAccStreamRSStreamConnection_cluster(t *testing.T) { - testCase := testCaseCluster(t, "") + testCase := testCaseCluster(t) resource.ParallelTest(t, *testCase) } -func testCaseCluster(t *testing.T, nameSuffix string) *resource.TestCase { +func testCaseCluster(t *testing.T) *resource.TestCase { t.Helper() var ( projectID, clusterName = acc.ClusterNameExecution(t, false) _, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) - connectionName = "conn-cluster" + nameSuffix + connectionName = "conn-cluster" ) return &resource.TestCase{ PreCheck: func() { acc.PreCheckBasic(t) }, @@ -238,8 +301,37 @@ func testCaseCluster(t *testing.T, nameSuffix string) *resource.TestCase { { Config: dataSourcesConfig + configureCluster(projectID, instanceName, connectionName, clusterName), Check: resource.ComposeAggregateTestCheckFunc( - checkClusterAttributes(resourceName, clusterName), - checkClusterAttributes(dataSourceName, clusterName), + checkClusterAttributesAcceptance(resourceName, clusterName), + checkClusterAttributesAcceptance(dataSourceName, clusterName), + ), + }, + { + ResourceName: resourceName, + ImportStateIdFunc: checkStreamConnectionImportStateIDFunc(resourceName), + ImportState: true, + ImportStateVerify: true, + }, + }, + } +} + +func testCaseClusterMigration(t *testing.T) *resource.TestCase { + t.Helper() + var ( + projectID, clusterName = acc.ClusterNameExecution(t, false) + _, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) + connectionName = "conn-cluster-mig" + ) + return &resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: CheckDestroyStreamConnection, + Steps: []resource.TestStep{ + { + Config: dataSourceConfigMigration + dataSourcePluralConfigMigration + configureClusterMigration(projectID, instanceName, connectionName, clusterName), + Check: resource.ComposeAggregateTestCheckFunc( + checkClusterAttributesMigration(resourceName, clusterName), + checkClusterAttributesMigration(dataSourceName, clusterName), ), }, { @@ -282,10 +374,10 @@ func TestAccStreamRSStreamConnection_sample(t *testing.T) { func TestAccStreamStreamConnection_https(t *testing.T) { var ( - projectID, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) - url = "https://example.com" - updatedURL = "https://example2.com" - headerStr = `headers = { + projectID, workspaceName = acc.ProjectIDExecutionWithStreamInstance(t) + url = "https://example.com" + updatedURL = "https://example2.com" + headerStr = `headers = { Authorization = "Bearer token" key1 = "value1" }` @@ -300,26 +392,26 @@ func TestAccStreamStreamConnection_https(t *testing.T) { CheckDestroy: CheckDestroyStreamConnection, Steps: []resource.TestStep{ { - Config: configureHTTPS(projectID, instanceName, url, headerStr), + Config: configureHTTPS(projectID, workspaceName, url, headerStr), Check: resource.ComposeAggregateTestCheckFunc( - checkHTTPSAttributes(instanceName, url), + checkHTTPSAttributes(workspaceName, url), resource.TestCheckResourceAttr(resourceName, "headers.%", "2"), resource.TestCheckResourceAttr(resourceName, "headers.Authorization", "Bearer token"), resource.TestCheckResourceAttr(resourceName, "headers.key1", "value1"), ), }, { - Config: configureHTTPS(projectID, instanceName, updatedURL, updatedHeaderStr), + Config: configureHTTPS(projectID, workspaceName, updatedURL, updatedHeaderStr), Check: resource.ComposeAggregateTestCheckFunc( - checkHTTPSAttributes(instanceName, updatedURL), + checkHTTPSAttributes(workspaceName, updatedURL), resource.TestCheckResourceAttr(resourceName, "headers.%", "1"), resource.TestCheckResourceAttr(resourceName, "headers.updatedKey", "updatedValue"), ), }, { - Config: configureHTTPS(projectID, instanceName, updatedURL, emptyHeaders), + Config: configureHTTPS(projectID, workspaceName, updatedURL, emptyHeaders), Check: resource.ComposeAggregateTestCheckFunc( - checkHTTPSAttributes(instanceName, updatedURL), + checkHTTPSAttributes(workspaceName, updatedURL), resource.TestCheckResourceAttr(resourceName, "headers.%", "0"), ), }, @@ -362,7 +454,7 @@ func TestAccStreamPrivatelinkEndpoint_streamConnection(t *testing.T) { %[1]s %[2]s `, privatelinkConfig, configureKafka(fmt.Sprintf("%q", projectID), instanceName, "kafka-conn-privatelink", getKafkaAuthenticationConfig("PLAIN", "user", "rawpassword", "", "", "", "", "", ""), "localhost:9092", "earliest", kafkaNetworkingPrivatelink, true)), - Check: checkKafkaAttributes(resourceName, instanceName, "kafka-conn-privatelink", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePrivatelink, true, true), + Check: checkKafkaAttributesAcceptance(resourceName, instanceName, "kafka-conn-privatelink", "user", "rawpassword", "localhost:9092", "earliest", networkingTypePrivatelink, true, true), }, { ResourceName: resourceName, @@ -401,6 +493,58 @@ func TestAccStreamRSStreamConnection_AWSLambda(t *testing.T) { }) } +func TestAccStreamRSStreamConnection_instanceName(t *testing.T) { + var ( + projectID, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) + connectionName = acc.RandomName() + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: CheckDestroyStreamConnection, + Steps: []resource.TestStep{ + { + Config: configureKafkaWithInstanceName(projectID, instanceName, connectionName, "user", "password", "localhost:9092"), + Check: resource.ComposeAggregateTestCheckFunc( + checkStreamConnectionExists(), + resource.TestCheckResourceAttr(resourceName, "instance_name", instanceName), + resource.TestCheckResourceAttr(resourceName, "connection_name", connectionName), + resource.TestCheckResourceAttr(resourceName, "type", "Kafka"), + resource.TestCheckNoResourceAttr(resourceName, "workspace_name"), + ), + }, + { + ResourceName: resourceName, + ImportStateIdFunc: checkStreamConnectionImportStateIDFunc(resourceName), + ImportState: true, + ImportStateVerify: true, + // When the new resource is imported, it will contain workspace_name instead of instance_name. This is expected so we will ignore it + ImportStateVerifyIgnore: []string{"authentication.password", "instance_name", "workspace_name"}, + }, + }, + }) +} + +func TestAccStreamRSStreamConnection_conflictingFields(t *testing.T) { + var ( + projectID, instanceName = acc.ProjectIDExecutionWithStreamInstance(t) + connectionName = "conflict-test" + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: CheckDestroyStreamConnection, + Steps: []resource.TestStep{ + { + Config: configureKafkaWithInstanceAndWorkspaceName(projectID, instanceName, connectionName, "user", "password", "localhost:9092"), + ExpectError: regexp.MustCompile("Attribute \"workspace_name\" cannot be specified when \"instance_name\" is\n.*specified"), + }, + }, + }) +} + func getKafkaAuthenticationConfig(mechanism, username, password, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtensions, method string) string { if mechanism == "PLAIN" { return fmt.Sprintf(`authentication = { @@ -420,7 +564,38 @@ func getKafkaAuthenticationConfig(mechanism, username, password, tokenEndpointUR }`, mechanism, method, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtensions) } -func configureKafka(projectRef, instanceName, connectionName, authenticationConfig, bootstrapServers, configValue, networkingConfig string, useSSL bool) string { +func configureKafka(projectRef, workspaceName, connectionName, authenticationConfig, bootstrapServers, configValue, networkingConfig string, useSSL bool) string { + securityConfig := ` + security = { + protocol = "SASL_PLAINTEXT" + }` + + if useSSL { + securityConfig = fmt.Sprintf(` + security = { + broker_public_certificate = %q + protocol = "SASL_SSL" + }`, DummyCACert) + } + return fmt.Sprintf(` + resource "mongodbatlas_stream_connection" "test" { + project_id = %[1]s + workspace_name = %[2]q + connection_name = %[3]q + type = "Kafka" + %[4]s + bootstrap_servers = %[5]q + config = { + "auto.offset.reset": %[6]q + } + %[7]s + %[8]s + } + `, projectRef, workspaceName, connectionName, authenticationConfig, bootstrapServers, configValue, networkingConfig, securityConfig) +} + +// configureKafkaForMigration uses instance_name for compatibility with older provider versions +func configureKafkaMigration(projectRef, instanceName, connectionName, authenticationConfig, bootstrapServers, configValue, networkingConfig string, useSSL bool) string { securityConfig := ` security = { protocol = "SASL_PLAINTEXT" @@ -450,37 +625,85 @@ func configureKafka(projectRef, instanceName, connectionName, authenticationConf `, projectRef, instanceName, connectionName, authenticationConfig, bootstrapServers, configValue, networkingConfig, securityConfig) } -func configureSampleStream(projectID, instanceName, sampleName string) string { - streamInstanceConfig := acc.StreamInstanceConfig(projectID, instanceName, "VIRGINIA_USA", "AWS") +func configureSampleStream(projectID, workspaceName, sampleName string) string { + streamInstanceConfig := acc.StreamInstanceConfig(projectID, workspaceName, "VIRGINIA_USA", "AWS") return fmt.Sprintf(` %[1]s resource "mongodbatlas_stream_connection" "test" { project_id = mongodbatlas_stream_instance.test.project_id - instance_name = mongodbatlas_stream_instance.test.instance_name + workspace_name = mongodbatlas_stream_instance.test.instance_name connection_name = %[2]q type = "Sample" } `, streamInstanceConfig, sampleName) } +// configureKafkaWithInstanceName tests that the deprecated isntance_name field is still functional +func configureKafkaWithInstanceName(projectID, instanceName, connectionName, username, password, bootstrapServers string) string { + return fmt.Sprintf(` + resource "mongodbatlas_stream_connection" "test" { + project_id = %[1]q + instance_name = %[2]q + connection_name = %[3]q + type = "Kafka" + authentication = { + mechanism = "PLAIN" + username = %[4]q + password = %[5]q + } + bootstrap_servers = %[6]q + config = { + "auto.offset.reset": "earliest" + } + security = { + protocol = "SASL_PLAINTEXT" + } + } + `, projectID, instanceName, connectionName, username, password, bootstrapServers) +} + +func configureKafkaWithInstanceAndWorkspaceName(projectID, instanceName, connectionName, username, password, bootstrapServers string) string { + return fmt.Sprintf(` + resource "mongodbatlas_stream_connection" "test" { + project_id = %[1]q + instance_name = %[2]q + workspace_name = %[2]q + connection_name = %[3]q + type = "Kafka" + authentication = { + mechanism = "PLAIN" + username = %[4]q + password = %[5]q + } + bootstrap_servers = %[6]q + config = { + "auto.offset.reset": "earliest" + } + security = { + protocol = "SASL_PLAINTEXT" + } + } + `, projectID, instanceName, connectionName, username, password, bootstrapServers) +} + func checkSampleStreamAttributes( - resourceName, instanceName, sampleName string) resource.TestCheckFunc { + resourceName, workspaceName, sampleName string) resource.TestCheckFunc { resourceChecks := []resource.TestCheckFunc{ checkStreamConnectionExists(), resource.TestCheckResourceAttrSet(resourceName, "project_id"), - resource.TestCheckResourceAttr(resourceName, "instance_name", instanceName), + resource.TestCheckResourceAttr(resourceName, "workspace_name", workspaceName), resource.TestCheckResourceAttr(resourceName, "connection_name", sampleName), resource.TestCheckResourceAttr(resourceName, "type", "Sample"), } return resource.ComposeAggregateTestCheckFunc(resourceChecks...) } -func checkHTTPSAttributes(instanceName, url string) resource.TestCheckFunc { +func checkHTTPSAttributes(workspaceName, url string) resource.TestCheckFunc { setChecks := []string{"project_id"} mapChecks := map[string]string{ - "instance_name": instanceName, + "workspace_name": workspaceName, "connection_name": "ConnectionNameHttps", "type": "Https", "url": url, @@ -490,7 +713,7 @@ func checkHTTPSAttributes(instanceName, url string) resource.TestCheckFunc { } func checkKafkaAttributes( - resourceName, instanceName, connectionName, username, password, bootstrapServers, configValue, networkingType string, usesSSL, checkPassword bool) resource.TestCheckFunc { + resourceName, connectionName, username, password, bootstrapServers, configValue, networkingType string, usesSSL, checkPassword bool) resource.TestCheckFunc { authAttrs := map[string]string{ "authentication.mechanism": "PLAIN", "authentication.username": username, @@ -498,11 +721,11 @@ func checkKafkaAttributes( if checkPassword { authAttrs["authentication.password"] = password } - return checkKafkaConnectionAttributes(resourceName, instanceName, connectionName, bootstrapServers, configValue, networkingType, usesSSL, authAttrs) + return checkKafkaConnectionAttributes(resourceName, connectionName, bootstrapServers, configValue, networkingType, usesSSL, authAttrs) } func checkKafkaOAuthAttributes( - resourceName, instanceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtensions, method, bootstrapServers, configValue, networkingType string, usesSSL, checkClientSecret bool) resource.TestCheckFunc { + resourceName, connectionName, tokenEndpointURL, clientID, clientSecret, scope, saslOauthbearerExtensions, method, bootstrapServers, configValue, networkingType string, usesSSL, checkClientSecret bool) resource.TestCheckFunc { authAttrs := map[string]string{ "authentication.mechanism": "OAUTHBEARER", "authentication.method": method, @@ -514,16 +737,15 @@ func checkKafkaOAuthAttributes( if checkClientSecret { authAttrs["authentication.client_secret"] = clientSecret } - return checkKafkaConnectionAttributes(resourceName, instanceName, connectionName, bootstrapServers, configValue, networkingType, usesSSL, authAttrs) + return checkKafkaConnectionAttributes(resourceName, connectionName, bootstrapServers, configValue, networkingType, usesSSL, authAttrs) } -func checkKafkaConnectionAttributes(resourceName, instanceName, connectionName, bootstrapServers, configValue, networkingType string, usesSSL bool, authAttrs map[string]string) resource.TestCheckFunc { +func checkKafkaConnectionAttributes(resourceName, connectionName, bootstrapServers, configValue, networkingType string, usesSSL bool, authAttrs map[string]string) resource.TestCheckFunc { resourceChecks := []resource.TestCheckFunc{ checkStreamConnectionExists(), resource.TestCheckResourceAttrSet(resourceName, "project_id"), resource.TestCheckResourceAttr(resourceName, "connection_name", connectionName), resource.TestCheckResourceAttr(resourceName, "type", "Kafka"), - resource.TestCheckResourceAttr(resourceName, "instance_name", instanceName), resource.TestCheckResourceAttr(resourceName, "bootstrap_servers", bootstrapServers), resource.TestCheckResourceAttr(resourceName, "config.auto.offset.reset", configValue), } @@ -544,7 +766,36 @@ func checkKafkaConnectionAttributes(resourceName, instanceName, connectionName, return resource.ComposeAggregateTestCheckFunc(resourceChecks...) } -func configureCluster(projectID, instanceName, connectionName, clusterName string) string { +func checkKafkaAttributesMigration( + resourceName, instanceName, connectionName, username, password, bootstrapServers, configValue, networkingType string, usesSSL, checkPassword bool) resource.TestCheckFunc { + commonTests := checkKafkaAttributes(resourceName, connectionName, username, password, bootstrapServers, configValue, networkingType, usesSSL, checkPassword) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttr(resourceName, "instance_name", instanceName)) +} + +func checkKafkaAttributesAcceptance( + resourceName, workspaceName, connectionName, username, password, bootstrapServers, configValue, networkingType string, usesSSL, checkPassword bool) resource.TestCheckFunc { + commonTests := checkKafkaAttributes(resourceName, connectionName, username, password, bootstrapServers, configValue, networkingType, usesSSL, checkPassword) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttr(resourceName, "workspace_name", workspaceName)) +} + +func configureCluster(projectID, workspaceName, connectionName, clusterName string) string { + return fmt.Sprintf(` + resource "mongodbatlas_stream_connection" "test" { + project_id = %[1]q + workspace_name = %[2]q + connection_name = %[3]q + type = "Cluster" + cluster_name = %[4]q + db_role_to_execute = { + role = "atlasAdmin" + type = "BUILT_IN" + } + } + `, projectID, workspaceName, connectionName, clusterName) +} + +// configureClusterMigration uses instance_name for compatibility with older provider versions +func configureClusterMigration(projectID, instanceName, connectionName, clusterName string) string { return fmt.Sprintf(` resource "mongodbatlas_stream_connection" "test" { project_id = %[1]q @@ -560,11 +811,11 @@ func configureCluster(projectID, instanceName, connectionName, clusterName strin `, projectID, instanceName, connectionName, clusterName) } -func configureHTTPS(projectID, instanceName, url, headers string) string { +func configureHTTPS(projectID, workspaceName, url, headers string) string { return fmt.Sprintf(` resource "mongodbatlas_stream_connection" "test" { project_id = %[1]q - instance_name = %[2]q + workspace_name = %[2]q connection_name = "ConnectionNameHttps" type = "Https" url = %[3]q @@ -573,17 +824,16 @@ func configureHTTPS(projectID, instanceName, url, headers string) string { data "mongodbatlas_stream_connection" "test" { project_id = %[1]q - instance_name = %[2]q + workspace_name = %[2]q connection_name = mongodbatlas_stream_connection.test.connection_name } - `, projectID, instanceName, url, headers) + `, projectID, workspaceName, url, headers) } func checkClusterAttributes(resourceName, clusterName string) resource.TestCheckFunc { resourceChecks := []resource.TestCheckFunc{ checkStreamConnectionExists(), resource.TestCheckResourceAttrSet(resourceName, "project_id"), - resource.TestCheckResourceAttrSet(resourceName, "instance_name"), resource.TestCheckResourceAttrSet(resourceName, "connection_name"), resource.TestCheckResourceAttr(resourceName, "type", "Cluster"), resource.TestCheckResourceAttr(resourceName, "cluster_name", clusterName), @@ -593,13 +843,23 @@ func checkClusterAttributes(resourceName, clusterName string) resource.TestCheck return resource.ComposeAggregateTestCheckFunc(resourceChecks...) } +func checkClusterAttributesAcceptance(resourceName, clusterName string) resource.TestCheckFunc { + commonTests := checkClusterAttributes(resourceName, clusterName) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttrSet(resourceName, "workspace_name")) +} + +func checkClusterAttributesMigration(resourceName, clusterName string) resource.TestCheckFunc { + commonTests := checkClusterAttributes(resourceName, clusterName) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttrSet(resourceName, "instance_name")) +} + func checkStreamConnectionImportStateIDFunc(resourceName string) resource.ImportStateIdFunc { return func(s *terraform.State) (string, error) { rs, ok := s.RootModule().Resources[resourceName] if !ok { return "", fmt.Errorf("not found: %s", resourceName) } - return fmt.Sprintf("%s-%s-%s", rs.Primary.Attributes["instance_name"], rs.Primary.Attributes["project_id"], rs.Primary.Attributes["connection_name"]), nil + return rs.Primary.ID, nil } } @@ -610,11 +870,14 @@ func checkStreamConnectionExists() resource.TestCheckFunc { continue } projectID := rs.Primary.Attributes["project_id"] - instanceName := rs.Primary.Attributes["instance_name"] + workspaceName := rs.Primary.Attributes["workspace_name"] + if workspaceName == "" { + workspaceName = rs.Primary.Attributes["instance_name"] + } connectionName := rs.Primary.Attributes["connection_name"] - _, _, err := acc.ConnV2().StreamsApi.GetStreamConnection(context.Background(), projectID, instanceName, connectionName).Execute() + _, _, err := acc.ConnV2().StreamsApi.GetStreamConnection(context.Background(), projectID, workspaceName, connectionName).Execute() if err != nil { - return fmt.Errorf("stream connection (%s:%s:%s) does not exist", projectID, instanceName, connectionName) + return fmt.Errorf("stream connection (%s:%s:%s) does not exist", projectID, workspaceName, connectionName) } } return nil @@ -630,11 +893,14 @@ func CheckDestroyStreamConnection(state *terraform.State) error { continue } projectID := rs.Primary.Attributes["project_id"] - instanceName := rs.Primary.Attributes["instance_name"] + workspaceName := rs.Primary.Attributes["workspace_name"] + if workspaceName == "" { + workspaceName = rs.Primary.Attributes["instance_name"] + } connectionName := rs.Primary.Attributes["connection_name"] - _, _, err := acc.ConnV2().StreamsApi.GetStreamConnection(context.Background(), projectID, instanceName, connectionName).Execute() + _, _, err := acc.ConnV2().StreamsApi.GetStreamConnection(context.Background(), projectID, workspaceName, connectionName).Execute() if err == nil { - return fmt.Errorf("stream connection (%s:%s:%s) still exists", projectID, instanceName, connectionName) + return fmt.Errorf("stream connection (%s:%s:%s) still exists", projectID, workspaceName, connectionName) } } return nil @@ -701,7 +967,7 @@ func configureAWSLambda(projectID, instanceName, connectionName, awsIamRoleName resource "mongodbatlas_stream_connection" "test" { project_id = %[1]q - instance_name = %[2]q + workspace_name = %[2]q connection_name = %[3]q type = "AWSLambda" aws = { @@ -712,11 +978,11 @@ func configureAWSLambda(projectID, instanceName, connectionName, awsIamRoleName return config } -func checkAWSLambdaAttributes(resourceName, instanceName, connectionName string) resource.TestCheckFunc { +func checkAWSLambdaAttributes(resourceName, workspaceName, connectionName string) resource.TestCheckFunc { resourceChecks := []resource.TestCheckFunc{ checkStreamConnectionExists(), resource.TestCheckResourceAttrSet(resourceName, "project_id"), - resource.TestCheckResourceAttr(resourceName, "instance_name", instanceName), + resource.TestCheckResourceAttr(resourceName, "workspace_name", workspaceName), resource.TestCheckResourceAttr(resourceName, "connection_name", connectionName), resource.TestCheckResourceAttr(resourceName, "type", "AWSLambda"), resource.TestCheckResourceAttrSet(resourceName, "aws.role_arn"), @@ -727,7 +993,6 @@ func checkAWSLambdaAttributes(resourceName, instanceName, connectionName string) func streamConnectionsAttributeChecks(resourceName string, pageNum, itemsPerPage *int) resource.TestCheckFunc { resourceChecks := []resource.TestCheckFunc{ resource.TestCheckResourceAttrSet(resourceName, "project_id"), - resource.TestCheckResourceAttrSet(resourceName, "instance_name"), resource.TestCheckResourceAttrSet(resourceName, "total_count"), resource.TestCheckResourceAttrSet(resourceName, "results.#"), } @@ -739,3 +1004,13 @@ func streamConnectionsAttributeChecks(resourceName string, pageNum, itemsPerPage } return resource.ComposeAggregateTestCheckFunc(resourceChecks...) } + +func streamConnectionsAttributeChecksAcceptance(resourceName string, pageNum, itemsPerPage *int) resource.TestCheckFunc { + commonTests := streamConnectionsAttributeChecks(resourceName, pageNum, itemsPerPage) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttrSet(resourceName, "workspace_name")) +} + +func streamConnectionsAttributeChecksMigration(resourceName string, pageNum, itemsPerPage *int) resource.TestCheckFunc { + commonTests := streamConnectionsAttributeChecks(resourceName, pageNum, itemsPerPage) + return resource.ComposeAggregateTestCheckFunc(commonTests, resource.TestCheckResourceAttrSet(resourceName, "instance_name")) +}