diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3b44e65..0ea40b4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -53,7 +53,8 @@ jobs: name: Terraform Provider Acceptance Tests needs: build runs-on: ubuntu-latest - timeout-minutes: 15 + environment: Acceptance Testing + timeout-minutes: 30 strategy: fail-fast: false matrix: diff --git a/CHANGELOG.md b/CHANGELOG.md index 836dc1e..2305dbb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,7 @@ ## 1.0.0 (Initial Release) FEATURES: - * Ambar initial Terraform support. \ No newline at end of file + * Ambar initial Terraform support. + * Support for Ambar DataSource resources like the Postgres DataSourceType + * Support for Ambar Filter resources, allowing to define a record sequence filter to be applied to a DataSource. + * Support for Ambar DataDestination resources, allowing delivery of one or more filtered record sequences from one or more DataDestinations \ No newline at end of file diff --git a/README.md b/README.md index b8e1ba3..7ac61a5 100644 --- a/README.md +++ b/README.md @@ -1,64 +1,17 @@ -# Terraform Provider Scaffolding (Terraform Plugin Framework) - -_This template repository is built on the [Terraform Plugin Framework](https://github.com/hashicorp/terraform-plugin-framework). The template repository built on the [Terraform Plugin SDK](https://github.com/hashicorp/terraform-plugin-sdk) can be found at [terraform-provider-scaffolding](https://github.com/hashicorp/terraform-provider-scaffolding). See [Which SDK Should I Use?](https://developer.hashicorp.com/terraform/plugin/framework-benefits) in the Terraform documentation for additional information._ - -This repository is a *template* for a [Terraform](https://www.terraform.io) provider. It is intended as a starting point for creating Terraform providers, containing: - -- A resource and a data source (`internal/provider/`), -- Examples (`examples/`) and generated documentation (`docs/`), -- Miscellaneous meta files. - -These files contain boilerplate code that you will need to edit to create your own Terraform provider. Tutorials for creating Terraform providers can be found on the [HashiCorp Developer](https://developer.hashicorp.com/terraform/tutorials/providers-plugin-framework) platform. _Terraform Plugin Framework specific guides are titled accordingly._ - -Please see the [GitHub template repository documentation](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/creating-a-repository-from-a-template) for how to create a new repository from this template on GitHub. - -Once you've written your provider, you'll want to [publish it on the Terraform Registry](https://developer.hashicorp.com/terraform/registry/providers/publishing) so that others can use it. - -## Requirements - -- [Terraform](https://developer.hashicorp.com/terraform/downloads) >= 1.0 -- [Go](https://golang.org/doc/install) >= 1.20 - -## Building The Provider - -1. Clone the repository -1. Enter the repository directory -1. Build the provider using the Go `install` command: - -```shell -go install -``` - -## Adding Dependencies - -This provider uses [Go modules](https://github.com/golang/go/wiki/Modules). -Please see the Go documentation for the most up to date information about using Go modules. - -To add a new dependency `github.com/author/dependency` to your Terraform provider: - -```shell -go get github.com/author/dependency -go mod tidy -``` - -Then commit the changes to `go.mod` and `go.sum`. - -## Using the provider - -Fill this in for each provider - -## Developing the Provider - -If you wish to work on the provider, you'll first need [Go](http://www.golang.org) installed on your machine (see [Requirements](#requirements) above). - -To compile the provider, run `go install`. This will build the provider and put the provider binary in the `$GOPATH/bin` directory. - -To generate or update documentation, run `go generate`. - -In order to run the full suite of Acceptance tests, run `make testacc`. - -*Note:* Acceptance tests create real resources, and often cost money to run. - -```shell -make testacc -``` + + + + + + Terraform logo + + + +# Terraform Ambar Provider + +The [Ambar Provider](https://registry.terraform.io/providers/ambarltd/ambar/latest/docs) allows [Terraform](https://terraform.io) to manage [Ambar](https://ambar.cloud) resources. + +- [Contributing guide] *coming soon* +- [FAQ] *coming soon* +- [Tutorials and Examples] *coming soon* +- [Help and Support] *coming soon* \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index eabde48..0cd2dca 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,7 +16,7 @@ Interact with your regional Ambar environment. terraform { required_providers { ambar = { - source = "ambar.cloud/terraform/ambar" + source = "ambarltd/ambar" } } } diff --git a/docs/resources/data_destination.md b/docs/resources/data_destination.md index 6577c7c..a2df416 100644 --- a/docs/resources/data_destination.md +++ b/docs/resources/data_destination.md @@ -51,5 +51,7 @@ Import is supported using the following syntax: ```shell # Ambar DataDestinations can be imported by specifying the resource identifier. +# Note: Some sensitive fields like usernames and passwords will not get imported into Terraform state +# from existing resources and may require further action to manage via Terraform templates. terraform import ambar_data_destination.example_data_destination AMBAR-1234567890 ``` diff --git a/docs/resources/data_source.md b/docs/resources/data_source.md index 912193a..304ca8f 100644 --- a/docs/resources/data_source.md +++ b/docs/resources/data_source.md @@ -20,13 +20,17 @@ resource "ambar_data_source" "example_data_source" { serial_column = "serial" username = "username" password = "password" + # data_source_config key-values depend on the type of DataSource being created. + # See Ambar docs for more details. data_source_config = { "hostname" : "host", "hostPort" : "5432", "databaseName" : "postgres", "tableName" : "events", "publicationName" : "example_pub", - "additionalColumns" : "some,other,column" + # columns should include all columns to be read from the database + # including the partition and serial columns + "columns" : "partition,serial,some,other,column" } } ``` @@ -58,5 +62,7 @@ Import is supported using the following syntax: ```shell # Ambar DataSources can be imported by specifying the resource identifier. +# Note: Some sensitive fields like usernames and passwords will not get imported into Terraform state +# from existing resources and may require further action to manage via Terraform templates. terraform import ambar_data_source.example_data_source AMBAR-1234567890 ``` diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf index 688a981..d4a11d3 100644 --- a/examples/provider/provider.tf +++ b/examples/provider/provider.tf @@ -1,7 +1,7 @@ terraform { required_providers { ambar = { - source = "ambar.cloud/terraform/ambar" + source = "ambarltd/ambar" } } } diff --git a/examples/resources/ambar_data_destination/import.sh b/examples/resources/ambar_data_destination/import.sh index d81d762..c693b04 100644 --- a/examples/resources/ambar_data_destination/import.sh +++ b/examples/resources/ambar_data_destination/import.sh @@ -1,2 +1,4 @@ # Ambar DataDestinations can be imported by specifying the resource identifier. +# Note: Some sensitive fields like usernames and passwords will not get imported into Terraform state +# from existing resources and may require further action to manage via Terraform templates. terraform import ambar_data_destination.example_data_destination AMBAR-1234567890 \ No newline at end of file diff --git a/examples/resources/ambar_data_source/import.sh b/examples/resources/ambar_data_source/import.sh index b9c9878..0426aa8 100644 --- a/examples/resources/ambar_data_source/import.sh +++ b/examples/resources/ambar_data_source/import.sh @@ -1,2 +1,4 @@ # Ambar DataSources can be imported by specifying the resource identifier. +# Note: Some sensitive fields like usernames and passwords will not get imported into Terraform state +# from existing resources and may require further action to manage via Terraform templates. terraform import ambar_data_source.example_data_source AMBAR-1234567890 \ No newline at end of file diff --git a/examples/resources/ambar_data_source/resource.tf b/examples/resources/ambar_data_source/resource.tf index 4848f1d..88e2beb 100644 --- a/examples/resources/ambar_data_source/resource.tf +++ b/examples/resources/ambar_data_source/resource.tf @@ -5,12 +5,16 @@ resource "ambar_data_source" "example_data_source" { serial_column = "serial" username = "username" password = "password" + # data_source_config key-values depend on the type of DataSource being created. + # See Ambar docs for more details. data_source_config = { "hostname" : "host", "hostPort" : "5432", "databaseName" : "postgres", "tableName" : "events", "publicationName" : "example_pub", - "additionalColumns" : "some,other,column" + # columns should include all columns to be read from the database + # including the partition and serial columns + "columns" : "partition,serial,some,other,column" } } \ No newline at end of file diff --git a/internal/provider/data_destination_resource.go b/internal/provider/data_destination_resource.go index a78e610..4271bf3 100644 --- a/internal/provider/data_destination_resource.go +++ b/internal/provider/data_destination_resource.go @@ -168,6 +168,12 @@ func (r *DataDestinationResource) Create(ctx context.Context, req resource.Creat return } + plan.ResourceId = types.StringValue(createResourceResponse.ResourceId) + plan.State = types.StringValue(createResourceResponse.ResourceState) + + diags = resp.State.Set(ctx, plan) + resp.Diagnostics.Append(diags...) + var describeDataDestination Ambar.DescribeResourceRequest describeDataDestination.ResourceId = createResourceResponse.ResourceId @@ -193,9 +199,6 @@ func (r *DataDestinationResource) Create(ctx context.Context, req resource.Creat // Set state to fully populated data diags = resp.State.Set(ctx, plan) resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } } func (r *DataDestinationResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { @@ -257,6 +260,10 @@ func (r *DataDestinationResource) Update(ctx context.Context, req resource.Updat return } + // partial state save in case of interrupt + data.State = types.StringValue(updateResourceResponse.ResourceState) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + // Wait for the update to complete var describeResourceResponse *Ambar.DataDestination var describeDataDestination Ambar.DescribeResourceRequest diff --git a/internal/provider/data_source_resource.go b/internal/provider/data_source_resource.go index d6d6965..2995afb 100644 --- a/internal/provider/data_source_resource.go +++ b/internal/provider/data_source_resource.go @@ -180,6 +180,14 @@ func (r *dataSourceResource) Create(ctx context.Context, req resource.CreateRequ return } + // Map response body to schema and populate Computed attribute values + plan.ResourceId = types.StringValue(createResourceResponse.ResourceId) + plan.State = types.StringValue(createResourceResponse.ResourceState) + + // Set state to fully populated data + diags = resp.State.Set(ctx, plan) + resp.Diagnostics.Append(diags...) + var describeDataSource Ambar.DescribeResourceRequest describeDataSource.ResourceId = createResourceResponse.ResourceId @@ -199,15 +207,11 @@ func (r *dataSourceResource) Create(ctx context.Context, req resource.CreateRequ } // Map response body to schema and populate Computed attribute values - plan.ResourceId = types.StringValue(createResourceResponse.ResourceId) plan.State = types.StringValue(describeResourceResponse.State) // Set state to fully populated data diags = resp.State.Set(ctx, plan) resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } } func (r *dataSourceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { @@ -274,6 +278,10 @@ func (r *dataSourceResource) Update(ctx context.Context, req resource.UpdateRequ return } + // partial state save in case of interrupt + data.State = types.StringValue(updateResourceResponse.ResourceState) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + // Wait for the update to complete var describeDataSource Ambar.DescribeResourceRequest describeDataSource.ResourceId = data.ResourceId.ValueString() diff --git a/internal/provider/data_source_resource_test.go b/internal/provider/data_source_resource_test.go index 46a5282..8ce0030 100644 --- a/internal/provider/data_source_resource_test.go +++ b/internal/provider/data_source_resource_test.go @@ -9,18 +9,18 @@ const ( exampleDataSourceConfig = ` resource "ambar_data_source" "test_data_source" { data_source_type = "postgres" - description = "My Terraform DataSource" - partitioning_column = "partition" - serial_column = "serial" - username = "postgres" - password = "password" + description = "My Terraform Acceptance Test DataSource" + # partitioning_column = "partition" + # serial_column = "serial" + # username = "postgres" + # password = "password" data_source_config = { - "hostname": "host", + # "hostname": "host", "hostPort": "5432", "databaseName": "postgres", "tableName": "events", - "publicationName": "example_pub", - "additionalColumns": "seqid,seqnum,value" + "publicationName": "acceptance_test_pub", + # "additionalColumns": "seqid,seqnum,value" } }` ) diff --git a/internal/provider/provider_test.go b/internal/provider/provider_test.go index 697e068..46582fd 100644 --- a/internal/provider/provider_test.go +++ b/internal/provider/provider_test.go @@ -4,21 +4,19 @@ package provider import ( - "testing" - "github.com/hashicorp/terraform-plugin-framework/providerserver" "github.com/hashicorp/terraform-plugin-go/tfprotov6" ) const ( // providerConfig is a shared configuration to combine with the actual - // test configuration so the HashiCups client is properly configured. - // It is also possible to use the HASHICUPS_ environment variables instead, + // test configuration so the Ambar client is properly configured. + // It is also possible to use the AMBAR_ environment variables instead, // such as updating the Makefile and running the testing through that tool. providerConfig = ` provider "ambar" { - endpoint = "region.api.ambar.cloud" - api_key = "your-key" + # endpoint = "region.api.ambar.cloud" + # api_key = "your-key" } ` ) @@ -30,9 +28,3 @@ const ( var testAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){ "ambar": providerserver.NewProtocol6WithError(New("test")()), } - -func testAccPreCheck(t *testing.T) { - // You can add code here to run prior to any test case execution, for example assertions - // about the appropriate environment variables being set are common to see in a pre-check - // function. -} diff --git a/main.go b/main.go index 7104e10..03808ea 100644 --- a/main.go +++ b/main.go @@ -8,7 +8,7 @@ import ( "flag" "log" - // Provider framework backend server, allows us to connect to hasicorp and do local testing + // Provider framework backend server, allows us to connect to hasicorp and do local testing. "github.com/hashicorp/terraform-plugin-framework/providerserver" "terraform-provider-ambar/internal/provider" @@ -41,8 +41,7 @@ func main() { opts := providerserver.ServeOpts{ Address: "registry.terraform.io/hashicorp/ambar", - // Stealing this value from the tutorial for a moment. Todo: replace this with the above. - Debug: debug, + Debug: debug, } err := providerserver.Serve(context.Background(), provider.New(version), opts)