Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
31304ee
doc: update stream_connection docs for workspace_name nomenclature
kpatel71716 Sep 24, 2025
4d06c1e
feat: add workspace_name to stream connection schema and models
kpatel71716 Sep 25, 2025
4530ef5
feat: add workspace_name to stream connection resource and datasource
kpatel71716 Sep 25, 2025
bdeb3ef
fix ds schema and model tests
kpatel71716 Sep 25, 2025
096bc32
doc: add changelog and fix example
kpatel71716 Sep 25, 2025
d04b471
doc: clean up documentation
kpatel71716 Sep 25, 2025
4254b34
Merge remote-tracking branch 'upstream' into CLOUDP-339895
kpatel71716 Oct 1, 2025
77e7b9e
address PR feedback
kpatel71716 Oct 2, 2025
f5f3d27
add deprecation message and add overriddden fields
kpatel71716 Oct 2, 2025
0ff807b
fix examples and tests
kpatel71716 Oct 3, 2025
7b9bd5a
add missing legacy InstanceName field in model
kpatel71716 Oct 3, 2025
65d04b1
add compute field to schema and update integration tests
kpatel71716 Oct 6, 2025
088c340
fix import id and acceptance tests
kpatel71716 Oct 9, 2025
9585758
fix schema and remaining acceptance tests
kpatel71716 Oct 10, 2025
6956236
refactor migration tests to still use instance_name
kpatel71716 Oct 13, 2025
4ab07bc
cleanup instance_name usage and refactor test assertions
kpatel71716 Oct 13, 2025
c9de513
use common streamsconnections attribute func
kpatel71716 Oct 13, 2025
f293a11
update migration test config contants
kpatel71716 Oct 13, 2025
ce30a96
reduce shadowing of instanceName var and fix Acc test
kpatel71716 Oct 13, 2025
437b816
refactor NewTFStreamConnection method
kpatel71716 Oct 14, 2025
aef4ffe
remove custom id mapping logic
kpatel71716 Oct 14, 2025
38991cd
fix comment syntax
kpatel71716 Oct 15, 2025
77badc2
Merge remote-tracking branch 'upstream' into CLOUDP-339895
kpatel71716 Oct 15, 2025
6ac12af
fix lint from merge
kpatel71716 Oct 15, 2025
4c667ae
fix migration test broken in merge resolution
kpatel71716 Oct 16, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .changelog/3610.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
```release-note:note
resource/mongodbatlas_stream_connection: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead

```
```release-note:note
data-source/mongodbatlas_stream_connection: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead
```

```release-note:note
data-source/mongodbatlas_stream_connections: Deprecates the `instance_name` attribute. All configurations using `instance_name` should be updated to use the new `workspace_name` attribute instead
```
17 changes: 15 additions & 2 deletions docs/data-sources/stream_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,30 @@ subcategory: "Streams"
```terraform
data "mongodbatlas_stream_connection" "example" {
project_id = "<PROJECT_ID>"
instance_name = "<INSTANCE_NAME>"
workspace_name = "<WORKSPACE_NAME>"
connection_name = "<CONNECTION_NAME>"
}
```

### Example using workspace_name

```terraform
data "mongodbatlas_stream_connection" "example" {
project_id = "<PROJECT_ID>"
workspace_name = "<WORKSPACE_NAME>"
connection_name = "<CONNECTION_NAME>"
}
```

## Argument Reference

* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project.
* `instance_name` - (Required) Human-readable label that identifies the stream instance.
* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`.
* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `workspace_name`.
* `connection_name` - (Required) Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.

~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`.

## Attributes Reference

* `type` - Type of connection. Can be `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`.
Expand Down
9 changes: 6 additions & 3 deletions docs/data-sources/stream_connections.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,17 @@ subcategory: "Streams"
```terraform
data "mongodbatlas_stream_connections" "test" {
project_id = "<PROJECT_ID>"
instance_name = "<INSTANCE_NAME>"
workspace_name = "<WORKSPACE_NAME>"
}
```

## Argument Reference

* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project.
* `instance_name` - (Required) Human-readable label that identifies the stream instance.
* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`.
* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `instance_name`.

~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`.

* `page_num` - (Optional) Number of the page that displays the current set of the total objects that the response returns. Defaults to `1`.
* `items_per_page` - (Optional) Number of items that the response returns per page, up to a maximum of `500`. Defaults to `100`.
Expand All @@ -34,7 +37,7 @@ In addition to all arguments above, it also exports the following attributes:
### Stream Connection

* `project_id` - Unique 24-hexadecimal digit string that identifies your project.
* `instance_name` - Human-readable label that identifies the stream instance.
* `workspace_name` - Human-readable label that identifies the stream instance.
* `connection_name` - Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
* `type` - Type of connection. `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`.

Expand Down
19 changes: 11 additions & 8 deletions docs/resources/stream_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ subcategory: "Streams"
```terraform
resource "mongodbatlas_stream_connection" "test" {
project_id = var.project_id
instance_name = "InstanceName"
workspace_name = "WorkspaceName"
connection_name = "ConnectionName"
type = "Cluster"
cluster_name = "Cluster0"
Expand All @@ -31,7 +31,7 @@ resource "mongodbatlas_stream_connection" "test" {
```terraform
resource "mongodbatlas_stream_connection" "test" {
project_id = var.project_id
instance_name = "InstanceName"
workspace_name = "WorskpaceName"
connection_name = "ConnectionName"
type = "Cluster"
cluster_name = "OtherCluster"
Expand All @@ -44,7 +44,7 @@ resource "mongodbatlas_stream_connection" "test" {
```terraform
resource "mongodbatlas_stream_connection" "test" {
project_id = var.project_id
instance_name = "NewInstance"
workspace_name = "NewWorkspace"
connection_name = "KafkaConnection"
type = "Kafka"
authentication = {
Expand Down Expand Up @@ -99,7 +99,7 @@ resource "mongodbatlas_stream_connection" "example-kafka-oauthbearer" {
```terraform
resource "mongodbatlas_stream_connection" "test" {
project_id = var.project_id
instance_name = "NewInstance"
workspace_name = "NewWorkspace"
connection_name = "KafkaConnection"
type = "Kafka"
authentication = {
Expand All @@ -123,7 +123,7 @@ resource "mongodbatlas_stream_connection" "test" {
```terraform
resource "mongodbatlas_stream_connection" "test" {
project_id = var.project_id
instance_name = "NewInstance"
workspace_name = "NewWorkspace"
connection_name = "AWSLambdaConnection"
type = "AWSLambda"
aws = {
Expand All @@ -138,7 +138,7 @@ resource "mongodbatlas_stream_connection" "test" {
```terraform
resource "mongodbatlas_stream_connection" "example-https" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "https_connection_tf_new"
type = "Https"
url = "https://example.com"
Expand All @@ -152,10 +152,13 @@ resource "mongodbatlas_stream_connection" "example-https" {
## Argument Reference

* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project.
* `instance_name` - (Required) Human-readable label that identifies the stream instance.
* `instance_name` - (Deprecated) Human-readable label that identifies the stream instance. Attribute is deprecated and will be removed in following major versions in favor of `workspace_name`.
* `workspace_name` - (Optional) Human-readable label that identifies the stream instance. Conflicts with `instance_name`.
* `connection_name` - (Required) Human-readable label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
* `type` - (Required) Type of connection. Can be `AWSLambda`, `Cluster`, `Https`, `Kafka` or `Sample`.

~> **NOTE:** Either `workspace_name` or `instance_name` must be provided, but not both. These fields are functionally identical and `workspace_name` is an alias for `instance_name`. `workspace_name` should be used instead of `instance_name`.

If `type` is of value `Cluster` the following additional arguments are defined:
* `cluster_name` - Name of the cluster configured for this connection.
* `db_role_to_execute` - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See [DBRoleToExecute](#DBRoleToExecute).
Expand Down Expand Up @@ -209,7 +212,7 @@ If `type` is of value `Https` the following additional attributes are defined:

## Import

You can import a stream connection resource using the instance name, project ID, and connection name. The format must be `INSTANCE_NAME-PROJECT_ID-CONNECTION_NAME`. For example:
You can import a stream connection resource using the workspace name, project ID, and connection name. The format must be `WORKSPACE_NAME-PROJECT_ID-CONNECTION_NAME`. For example:

```
$ terraform import mongodbatlas_stream_connection.test "DefaultInstance-12251446ae5f3f6ec7968b13-NewConnection"
Expand Down
14 changes: 7 additions & 7 deletions examples/mongodbatlas_stream_connection/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ resource "mongodbatlas_stream_instance" "example" {

resource "mongodbatlas_stream_connection" "example-cluster" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "ClusterConnection"
type = "Cluster"
cluster_name = var.cluster_name
Expand All @@ -21,7 +21,7 @@ resource "mongodbatlas_stream_connection" "example-cluster" {

resource "mongodbatlas_stream_connection" "example-cross-project-cluster" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "ClusterCrossProjectConnection"
type = "Cluster"
cluster_name = var.other_cluster
Expand All @@ -34,7 +34,7 @@ resource "mongodbatlas_stream_connection" "example-cross-project-cluster" {

resource "mongodbatlas_stream_connection" "example-kafka-plaintext" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "KafkaPlaintextConnection"
type = "Kafka"
authentication = {
Expand Down Expand Up @@ -86,7 +86,7 @@ resource "mongodbatlas_stream_connection" "example-kafka-oauthbearer" {

resource "mongodbatlas_stream_connection" "example-kafka-ssl" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "KafkaSSLConnection"
type = "Kafka"
authentication = {
Expand All @@ -106,14 +106,14 @@ resource "mongodbatlas_stream_connection" "example-kafka-ssl" {

resource "mongodbatlas_stream_connection" "example-sample" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "sample_stream_solar"
type = "Sample"
}

resource "mongodbatlas_stream_connection" "example-aws-lambda" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = "AWSLambdaConnection"
type = "AWSLambda"
aws = {
Expand All @@ -135,7 +135,7 @@ resource "mongodbatlas_stream_connection" "example-https" {

data "mongodbatlas_stream_connection" "example-kafka-ssl" {
project_id = var.project_id
instance_name = mongodbatlas_stream_instance.example.instance_name
workspace_name = mongodbatlas_stream_instance.example.instance_name
connection_name = mongodbatlas_stream_connection.example-kafka-ssl.connection_name
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,14 @@ package streamconnection

import (
"context"
"fmt"

"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
)
Expand All @@ -25,7 +31,24 @@ type streamConnectionDS struct {

func (d *streamConnectionDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = conversion.DataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.DataSourceSchemaRequest{
RequiredFields: []string{"project_id", "instance_name", "connection_name"},
RequiredFields: []string{"project_id", "connection_name"},
OverridenFields: map[string]dsschema.Attribute{
"instance_name": dsschema.StringAttribute{
Optional: true,
MarkdownDescription: "Human-readable label that identifies the stream instance. Conflicts with `workspace_name`.",
DeprecationMessage: fmt.Sprintf(constant.DeprecationParamWithReplacement, "workspace_name"),
Validators: []validator.String{
stringvalidator.ConflictsWith(path.MatchRoot("workspace_name")),
},
},
"workspace_name": dsschema.StringAttribute{
Optional: true,
MarkdownDescription: "Human-readable label that identifies the stream instance. This is an alias for `instance_name`. Conflicts with `instance_name`.",
Validators: []validator.String{
stringvalidator.ConflictsWith(path.MatchRoot("instance_name")),
},
},
},
})
}

Expand All @@ -38,15 +61,21 @@ func (d *streamConnectionDS) Read(ctx context.Context, req datasource.ReadReques

connV2 := d.Client.AtlasV2
projectID := streamConnectionConfig.ProjectID.ValueString()
instanceName := streamConnectionConfig.InstanceName.ValueString()
workspaceOrInstanceName := getWorkspaceOrInstanceName(&streamConnectionConfig)
if workspaceOrInstanceName == "" {
resp.Diagnostics.AddError("validation error", "workspace_name must be provided")
return
}
connectionName := streamConnectionConfig.ConnectionName.ValueString()
apiResp, _, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, instanceName, connectionName).Execute()
apiResp, _, err := connV2.StreamsApi.GetStreamConnection(ctx, projectID, workspaceOrInstanceName, connectionName).Execute()
if err != nil {
resp.Diagnostics.AddError("error fetching resource", err.Error())
return
}

newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, nil, apiResp)
instanceName := streamConnectionConfig.InstanceName.ValueString()
workspaceName := streamConnectionConfig.WorkspaceName.ValueString()
newStreamConnectionModel, diags := NewTFStreamConnection(ctx, projectID, instanceName, workspaceName, nil, apiResp)
if diags.HasError() {
resp.Diagnostics.Append(diags...)
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,13 @@ import (
"context"
"fmt"

"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
"go.mongodb.org/atlas-sdk/v20250312008/admin"
Expand All @@ -28,11 +33,39 @@ type streamConnectionsDS struct {

func (d *streamConnectionsDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = conversion.PluralDataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.PluralDataSourceSchemaRequest{
RequiredFields: []string{"project_id", "instance_name"},
RequiredFields: []string{"project_id"},
HasLegacyFields: true,
OverridenRootFields: map[string]dsschema.Attribute{
"instance_name": dsschema.StringAttribute{
Optional: true,
MarkdownDescription: "Human-readable label that identifies the stream instance. Conflicts with `workspace_name`.",
DeprecationMessage: fmt.Sprintf(constant.DeprecationParamWithReplacement, "workspace_name"),
Validators: []validator.String{
stringvalidator.ConflictsWith(path.MatchRoot("workspace_name")),
},
},
"workspace_name": dsschema.StringAttribute{
Optional: true,
MarkdownDescription: "Human-readable label that identifies the stream instance. This is an alias for `instance_name`. Conflicts with `instance_name`.",
Validators: []validator.String{
stringvalidator.ConflictsWith(path.MatchRoot("instance_name")),
},
},
},
})
}

// getWorkspaceOrInstanceNameForDS returns the workspace name from either instance_name or workspace_name field for datasource model
func getWorkspaceOrInstanceNameForDS(model *TFStreamConnectionsDSModel) string {
if !model.WorkspaceName.IsNull() && !model.WorkspaceName.IsUnknown() {
return model.WorkspaceName.ValueString()
}
if !model.InstanceName.IsNull() && !model.InstanceName.IsUnknown() {
return model.InstanceName.ValueString()
}
return ""
}

func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
var streamConnectionsConfig TFStreamConnectionsDSModel
resp.Diagnostics.Append(req.Config.Get(ctx, &streamConnectionsConfig)...)
Expand All @@ -42,13 +75,17 @@ func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadReque

connV2 := d.Client.AtlasV2
projectID := streamConnectionsConfig.ProjectID.ValueString()
instanceName := streamConnectionsConfig.InstanceName.ValueString()
workspaceOrInstanceName := getWorkspaceOrInstanceNameForDS(&streamConnectionsConfig)
if workspaceOrInstanceName == "" {
resp.Diagnostics.AddError("validation error", "workspace_name must be provided")
return
}
itemsPerPage := streamConnectionsConfig.ItemsPerPage.ValueInt64Pointer()
pageNum := streamConnectionsConfig.PageNum.ValueInt64Pointer()

apiResp, _, err := connV2.StreamsApi.ListStreamConnectionsWithParams(ctx, &admin.ListStreamConnectionsApiParams{
GroupId: projectID,
TenantName: instanceName,
TenantName: workspaceOrInstanceName,
ItemsPerPage: conversion.Int64PtrToIntPtr(itemsPerPage),
PageNum: conversion.Int64PtrToIntPtr(pageNum),
}).Execute()
Expand All @@ -67,11 +104,12 @@ func (d *streamConnectionsDS) Read(ctx context.Context, req datasource.ReadReque
}

type TFStreamConnectionsDSModel struct {
ID types.String `tfsdk:"id"`
ProjectID types.String `tfsdk:"project_id"`
InstanceName types.String `tfsdk:"instance_name"`
Results []TFStreamConnectionModel `tfsdk:"results"`
PageNum types.Int64 `tfsdk:"page_num"`
ItemsPerPage types.Int64 `tfsdk:"items_per_page"`
TotalCount types.Int64 `tfsdk:"total_count"`
ID types.String `tfsdk:"id"`
ProjectID types.String `tfsdk:"project_id"`
InstanceName types.String `tfsdk:"instance_name"`
WorkspaceName types.String `tfsdk:"workspace_name"`
Results []TFStreamConnectionModel `tfsdk:"results"`
PageNum types.Int64 `tfsdk:"page_num"`
ItemsPerPage types.Int64 `tfsdk:"items_per_page"`
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not probably for this PR, but we're preferring not to expose pagination parameter and instead just return the whole list of items

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that sgtm, I just took the existing pattern but will make a JIRA ticket for us to track cleaning this up

TotalCount types.Int64 `tfsdk:"total_count"`
}
Loading
Loading