Merge branch 'main' into dependabot/github_actions/github-actions-120bda3975
This commit is contained in:
commit
dd5077c245
|
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "file_snapshot Data Source - file"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
File Snapshot data source.
|
||||
This data source retrieves the contents of a file from the output of a file_snapshot datasource.Warning! Using this resource places the plain text contents of the snapshot in your state file.
|
||||
---
|
||||
|
||||
# file_snapshot (Data Source)
|
||||
|
||||
File Snapshot data source.
|
||||
This data source retrieves the contents of a file from the output of a file_snapshot datasource.Warning! Using this resource places the plain text contents of the snapshot in your state file.
|
||||
|
||||
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `contents` (String) The contents of the snapshot to retrieve. This could be any gzip compressed base64 encoded data. If the data isn't compressed, set the decompress argument to false, or leave it blank. If the decompress argument is false, the data will be the base64 decoded contents.
|
||||
|
||||
### Optional
|
||||
|
||||
- `decompress` (Boolean) Whether or not to decompress the contents. If left empty, this will default to false.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `data` (String) The resulting data output. This is the plain text representation of the contents attribute. This is computed by first decoding the data from base64, then decompressing the resulting gzip. If decompress is false, then this will be the base64 decoded version of the contents.
|
||||
- `id` (String) Unique identifier for the datasource. The SHA256 hash of the contents.
|
||||
|
|
@ -4,29 +4,40 @@ page_title: "file_snapshot Resource - file"
|
|||
subcategory: ""
|
||||
description: |-
|
||||
File Snapshot resource.
|
||||
This resource saves some content in state and doesn't update it until the trigger argument changes. Importantly, this resource ignores changes in the configuration for the contents argument.The refresh phase doesn't update state, instead the state can only change on create or update and only when the update_trigger argument changes.
|
||||
This resource saves some content in state and doesn't update it until the trigger argument changes. The refresh phase doesn't update state, instead the state can only change on create or update and only when the update_trigger argument changes.
|
||||
---
|
||||
|
||||
# file_snapshot (Resource)
|
||||
|
||||
File Snapshot resource.
|
||||
This resource saves some content in state and doesn't update it until the trigger argument changes. Importantly, this resource ignores changes in the configuration for the contents argument.The refresh phase doesn't update state, instead the state can only change on create or update and only when the update_trigger argument changes.
|
||||
This resource saves some content in state and doesn't update it until the trigger argument changes. The refresh phase doesn't update state, instead the state can only change on create or update and only when the update_trigger argument changes.
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
# basic use case
|
||||
resource "file_local" "snapshot_file_basic_example" {
|
||||
name = "snapshot_resource_basic_example.txt"
|
||||
contents = "this is an example file that is used to show how snapshots work"
|
||||
}
|
||||
resource "file_snapshot" "basic_example" {
|
||||
contents = "An example implementation, saving contents to state."
|
||||
depends_on = [
|
||||
file_local.snapshot_file_basic_example,
|
||||
]
|
||||
name = "snapshot_resource_basic_example.txt"
|
||||
update_trigger = "an arbitrary string"
|
||||
}
|
||||
|
||||
output "snapshot_basic" {
|
||||
value = file_snapshot.basic_example.snapshot
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
# A more advanced use case:
|
||||
# We use a terraform_data resource to write a file
|
||||
# We use a file_local resource to write a local file in the current directory
|
||||
# then we create a snapshot of the file using file_snapshot
|
||||
# then we update the file using a terraform_data resource
|
||||
# then we get the contents of the file using a file_local resource
|
||||
# then we output both the file_local and file_snapshot, observing that they are different
|
||||
# then we get the contents of the file using a file_local datasource
|
||||
# then we output both the file_local datasource and file_snapshot resource, observing that they are different
|
||||
resource "file_local" "snapshot_file_example" {
|
||||
name = "snapshot_resource_test.txt"
|
||||
contents = "this is an example file that is used to show how snapshots work"
|
||||
|
|
@ -35,7 +46,7 @@ resource "file_snapshot" "file_example" {
|
|||
depends_on = [
|
||||
file_local.snapshot_file_example,
|
||||
]
|
||||
contents = file_local.snapshot_file_example.contents
|
||||
name = "snapshot_resource_test.txt"
|
||||
update_trigger = "code-change-necessary"
|
||||
}
|
||||
resource "terraform_data" "update_file" {
|
||||
|
|
@ -56,7 +67,6 @@ data "file_local" "snapshot_file_example_after_update" {
|
|||
terraform_data.update_file,
|
||||
]
|
||||
name = "snapshot_resource_test.txt"
|
||||
directory = "."
|
||||
}
|
||||
|
||||
output "file" {
|
||||
|
|
@ -64,7 +74,8 @@ output "file" {
|
|||
# this updates a file that is used to show how snapshots work
|
||||
}
|
||||
output "snapshot" {
|
||||
value = file_snapshot.file_example.contents
|
||||
value = base64decode(file_snapshot.file_example.snapshot)
|
||||
sensitive = true
|
||||
# this is an example file that is used to show how snapshots work
|
||||
}
|
||||
```
|
||||
|
|
@ -74,13 +85,18 @@ output "snapshot" {
|
|||
|
||||
### Required
|
||||
|
||||
- `contents` (String) Contents to save. While this argument is exposed, you shouldn't use its output. Instead use the snapshot attribute to get the data saved in the snapshot.
|
||||
- `update_trigger` (String) When this argument changes the snapshot will be updated to whatever is in the contents.
|
||||
- `name` (String) Name of the file to save. Changing this forces recreate, moving the file isn't supported.
|
||||
- `update_trigger` (String) When this argument changes the snapshot will be updated.
|
||||
|
||||
### Optional
|
||||
|
||||
- `compress` (Boolean) Whether the provider should compress the contents and snapshot or not. Defaults to 'false'. When set to 'true' the provider will compress the contents and snapshot attributes using the gzip compression algorithm. Changing this attribute forces recreate, compressing snapshots which are already saved in state isn't supported. Warning! To prevent memory errors the provider generates temporary files to facilitate encoding and compression.
|
||||
- `directory` (String) Path of the file to save. Changing this forces recreate, moving the file isn't supported.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `id` (String) Unique identifier for the resource. The SHA256 hash of the base64 encoded contents.
|
||||
- `snapshot` (String) Saved contents. This will match the contents during create and whenever the update_trigger changes.
|
||||
- `snapshot` (String, Sensitive) Base64 encoded contents of the file specified in the name and directory fields. This data will be added on create and only updated when the update_trigger field changes. Warning! To prevent memory errors the provider generates temporary files to facilitate encoding and compression.
|
||||
|
||||
## Import
|
||||
|
||||
|
|
@ -90,7 +106,6 @@ The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/c
|
|||
|
||||
```shell
|
||||
# IDENTIFIER="$(echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}')"
|
||||
|
||||
terraform import file_snapshot.example "IDENTIFIER"
|
||||
|
||||
# after this is run you will need to refine the resource more by defining the contents and update_trigger
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
|
||||
|
||||
|
||||
terraform {
|
||||
backend "local" {}
|
||||
}
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
terraform {
|
||||
backend "local" {}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# IDENTIFIER="$(echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}')"
|
||||
|
||||
# IDENTIFIER="$(echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}')"
|
||||
terraform import file_snapshot.example "IDENTIFIER"
|
||||
|
||||
# after this is run you will need to refine the resource more by defining the contents and update_trigger
|
||||
|
|
|
|||
|
|
@ -1,17 +1,27 @@
|
|||
|
||||
|
||||
# basic use case
|
||||
resource "file_local" "snapshot_file_basic_example" {
|
||||
name = "snapshot_resource_basic_example.txt"
|
||||
contents = "this is an example file that is used to show how snapshots work"
|
||||
}
|
||||
resource "file_snapshot" "basic_example" {
|
||||
contents = "An example implementation, saving contents to state."
|
||||
depends_on = [
|
||||
file_local.snapshot_file_basic_example,
|
||||
]
|
||||
name = "snapshot_resource_basic_example.txt"
|
||||
update_trigger = "an arbitrary string"
|
||||
}
|
||||
|
||||
output "snapshot_basic" {
|
||||
value = file_snapshot.basic_example.snapshot
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
# A more advanced use case:
|
||||
# We use a terraform_data resource to write a file
|
||||
# We use a file_local resource to write a local file in the current directory
|
||||
# then we create a snapshot of the file using file_snapshot
|
||||
# then we update the file using a terraform_data resource
|
||||
# then we get the contents of the file using a file_local resource
|
||||
# then we output both the file_local and file_snapshot, observing that they are different
|
||||
# then we get the contents of the file using a file_local datasource
|
||||
# then we output both the file_local datasource and file_snapshot resource, observing that they are different
|
||||
resource "file_local" "snapshot_file_example" {
|
||||
name = "snapshot_resource_test.txt"
|
||||
contents = "this is an example file that is used to show how snapshots work"
|
||||
|
|
@ -20,7 +30,7 @@ resource "file_snapshot" "file_example" {
|
|||
depends_on = [
|
||||
file_local.snapshot_file_example,
|
||||
]
|
||||
contents = file_local.snapshot_file_example.contents
|
||||
name = "snapshot_resource_test.txt"
|
||||
update_trigger = "code-change-necessary"
|
||||
}
|
||||
resource "terraform_data" "update_file" {
|
||||
|
|
@ -41,7 +51,6 @@ data "file_local" "snapshot_file_example_after_update" {
|
|||
terraform_data.update_file,
|
||||
]
|
||||
name = "snapshot_resource_test.txt"
|
||||
directory = "."
|
||||
}
|
||||
|
||||
output "file" {
|
||||
|
|
@ -49,6 +58,7 @@ output "file" {
|
|||
# this updates a file that is used to show how snapshots work
|
||||
}
|
||||
output "snapshot" {
|
||||
value = file_snapshot.file_example.contents
|
||||
value = base64decode(file_snapshot.file_example.snapshot)
|
||||
sensitive = true
|
||||
# this is an example file that is used to show how snapshots work
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,23 @@
|
|||
# Basic Snapshot Use Case
|
||||
|
||||
This is a basic example of how you could use the file_snapshot resource.
|
||||
This config creates a file, takes a snapshot of the file, updates the file,
|
||||
retrieves in the updated contents, then outputs the file's contents and the snapshot contents.
|
||||
This is an example of how you could use the file_snapshot resource.
|
||||
WARNING! Please remember that Terraform must load the entire state into memory,
|
||||
ensure you have enough memory on the server running Terraform to store or retrieve the data you are storing.
|
||||
For larger files, please see the snapshot_compressed use-case for more details.
|
||||
|
||||
We use the uuid() function for testing purposes, every update the file will be changed and the snapshot will remain the same.
|
||||
We use the uuid() function for testing purposes.
|
||||
Every update, the file will be changed and the snapshot will remain the same.
|
||||
|
||||
# Updating the snapshot
|
||||
|
||||
To get the snapshot to update you can send in the "update" argument and change it.
|
||||
The snapshot will update on that apply and remain static until the update argument is changed again.
|
||||
|
||||
# Base 64 Decode
|
||||
|
||||
Notice that the snapshot outputs use base64decode to return the actual file's value.
|
||||
|
||||
# Snapshots are Sensitive
|
||||
|
||||
You could achieve the goals of this resource using a terraform_data with some life-cycle options, except for this part.
|
||||
The Snapshot resource's "snapshot" attribute is sensitive, this keeps sensitive or long files from being spewed into the logs.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
|
||||
|
||||
|
||||
terraform {
|
||||
backend "local" {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,11 +4,23 @@ provider "file" {}
|
|||
locals {
|
||||
update = (var.update == "" ? "code-change-necessary" : var.update)
|
||||
pesky_id = uuid()
|
||||
name = var.name
|
||||
directory = var.directory
|
||||
}
|
||||
# on first update the pesky_id and the snapshot will match
|
||||
# on subsequent updates the snapshot will remain as the first id and the pesky_id will change
|
||||
# when the update input is changed, then the snapshot will match again
|
||||
resource "file_snapshot" "example" {
|
||||
|
||||
resource "file_local" "snapshot_use_case_basic" {
|
||||
name = local.name
|
||||
directory = local.directory
|
||||
contents = local.pesky_id
|
||||
}
|
||||
resource "file_snapshot" "use_case_basic" {
|
||||
depends_on = [
|
||||
file_local.snapshot_use_case_basic,
|
||||
]
|
||||
name = local.name
|
||||
directory = local.directory
|
||||
update_trigger = local.update
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,5 +4,6 @@ output "pesky_id" {
|
|||
}
|
||||
|
||||
output "snapshot" {
|
||||
value = file_snapshot.example.snapshot
|
||||
value = base64decode(file_snapshot.use_case_basic.snapshot)
|
||||
sensitive = true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,3 +3,13 @@ variable "update" {
|
|||
type = string
|
||||
default = "code-change-necessary"
|
||||
}
|
||||
|
||||
variable "directory" {
|
||||
type = string
|
||||
default = "."
|
||||
}
|
||||
|
||||
variable "name" {
|
||||
type = string
|
||||
default = "snapshot_resource_basic_use_case.txt"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
# Compressed Snapshot Use Case
|
||||
|
||||
This is an example of how you could use the file_snapshot resource.
|
||||
WARNING! Please remember that Terraform must load the entire state into memory,
|
||||
make sure you have the resources available on the machine running Terraform to handle any file you save like this.
|
||||
|
||||
This shows how to use the compress argument.
|
||||
We wanted a way to compress the data that we are saving into the state so that we can store larger files
|
||||
without running the machine running Terraform out of memory.
|
||||
|
||||
We use the uuid() function for testing purposes, every update the file will be changed and the snapshot will remain the same.
|
||||
|
||||
# Updating the snapshot
|
||||
|
||||
To get the snapshot to update you can send in the "update" argument and change it.
|
||||
The snapshot will update on that apply and remain static until the update argument is changed again.
|
||||
|
||||
# Getting the data back out of the file
|
||||
|
||||
The snapshot data will be compressed and base64 encoded, so retrieving the actual contents is a little bit harder.
|
||||
|
||||
This is why we made the Snapshot datasource, given a snapshot output it can decode the contents into usable text?
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
terraform {
|
||||
backend "local" {}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
provider "file" {}
|
||||
|
||||
locals {
|
||||
pesky_id = uuid() // this is for testing purposes only, any file which changes outside of Terraform could be used
|
||||
update = var.update
|
||||
name = var.name
|
||||
directory = var.directory
|
||||
}
|
||||
# on first update the pesky_id and the snapshot output will match
|
||||
# on subsequent updates the snapshot output will stay the same and the pesky_id will change
|
||||
# when the update input is changed, then the snapshot output will match the pesky_id again
|
||||
|
||||
resource "file_local" "snapshot_use_case_compressed" {
|
||||
name = local.name
|
||||
directory = local.directory
|
||||
contents = local.pesky_id
|
||||
}
|
||||
resource "file_snapshot" "use_case_compressed" {
|
||||
depends_on = [
|
||||
file_local.snapshot_use_case_compressed,
|
||||
]
|
||||
name = local.name
|
||||
directory = local.directory
|
||||
update_trigger = local.update
|
||||
compress = true
|
||||
}
|
||||
data "file_snapshot" "use_case_compressed" {
|
||||
depends_on = [
|
||||
file_local.snapshot_use_case_compressed,
|
||||
file_snapshot.use_case_compressed,
|
||||
]
|
||||
contents = file_snapshot.use_case_compressed.snapshot
|
||||
decompress = true
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
output "pesky_id" {
|
||||
value = local.pesky_id
|
||||
}
|
||||
|
||||
output "snapshot" {
|
||||
value = data.file_snapshot.use_case_compressed.data
|
||||
sensitive = true
|
||||
}
|
||||
|
|
@ -1,4 +1,8 @@
|
|||
|
||||
variable "update" {
|
||||
type = string
|
||||
default = "code-change-necessary"
|
||||
}
|
||||
|
||||
variable "directory" {
|
||||
type = string
|
||||
|
|
@ -7,10 +11,5 @@ variable "directory" {
|
|||
|
||||
variable "name" {
|
||||
type = string
|
||||
default = "snapshot_use_case_test.txt"
|
||||
}
|
||||
|
||||
variable "update" {
|
||||
type = string
|
||||
default = "code-change-necessary"
|
||||
default = "snapshot_resource_basic_use_case.txt"
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
# Basic Snapshot Use Case
|
||||
|
||||
This is a basic example of how you could use the file_snapshot resource.
|
||||
This config creates a file, takes a snapshot of the file, updates the file,
|
||||
retrieves in the updated contents, then outputs the file's contents and the snapshot contents.
|
||||
|
||||
We use the uuid() function for testing purposes, every update the file will be changed and the snapshot will remain the same.
|
||||
|
||||
# Updating the snapshot
|
||||
|
||||
To get the snapshot to update you can send in the "update" argument and change it.
|
||||
The snapshot will update on that apply and remain static until the update argument is changed again.
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
|
||||
|
||||
provider "file" {}
|
||||
|
||||
locals {
|
||||
directory = (var.directory == "" ? "." : var.directory)
|
||||
name = (var.name == "" ? "snapshot_use_case_test.txt" : var.name)
|
||||
update = (var.update == "" ? "code-change-necessary" : var.update)
|
||||
pesky_id = uuid()
|
||||
}
|
||||
|
||||
# We use a terraform_data resource to write a file
|
||||
# then we update the file using a terraform_data resource
|
||||
# then we get the contents of the file using a file_local resource
|
||||
# then we snapshot the contents using a file_snapshot resource
|
||||
# then we output both the file_local and file_snapshot
|
||||
# On first run the outputs will match, on subsequent runs the outputs won't match.
|
||||
# the output for the file will always be a new uuid, while the snapshot will be the first uuid
|
||||
# When the update argument is changed, then the snapshot will match the file_local again
|
||||
# and again on subsequent runs the snapshot will remain the same while the file always changes
|
||||
resource "file_local" "example" {
|
||||
name = local.name
|
||||
contents = "this is an example file that is used to show how snapshots work"
|
||||
}
|
||||
# this always updates the file
|
||||
resource "terraform_data" "update_file" {
|
||||
depends_on = [
|
||||
file_local.example,
|
||||
]
|
||||
triggers_replace = [
|
||||
local.pesky_id
|
||||
]
|
||||
provisioner "local-exec" {
|
||||
command = <<-EOT
|
||||
printf '${local.pesky_id}' > ${local.name}
|
||||
EOT
|
||||
}
|
||||
}
|
||||
# since the update will always run, make sure to always get the data
|
||||
data "file_local" "example_after_update" {
|
||||
depends_on = [
|
||||
file_local.example,
|
||||
terraform_data.update_file,
|
||||
]
|
||||
name = local.name
|
||||
directory = local.directory
|
||||
}
|
||||
|
||||
# the snapshot will always be a uuid because the pesky update_file runs before it in the chain
|
||||
# however, it will always be the same uuid until the update argument is changed
|
||||
resource "file_snapshot" "example" {
|
||||
depends_on = [
|
||||
file_local.example,
|
||||
terraform_data.update_file,
|
||||
data.file_local.example_after_update,
|
||||
]
|
||||
contents = data.file_local.example_after_update.contents
|
||||
update_trigger = local.update
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
|
||||
output "file" {
|
||||
value = data.file_local.example_after_update.contents
|
||||
}
|
||||
|
||||
output "snapshot" {
|
||||
value = file_snapshot.example.contents
|
||||
}
|
||||
|
|
@ -20,11 +20,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1756819007,
|
||||
"narHash": "sha256-12V64nKG/O/guxSYnr5/nq1EfqwJCdD2+cIGmhz3nrE=",
|
||||
"lastModified": 1757967192,
|
||||
"narHash": "sha256-/aA9A/OBmnuOMgwfzdsXRusqzUpd8rQnQY8jtrHK+To=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "aaff8c16d7fc04991cac6245bee1baa31f72b1e1",
|
||||
"rev": "0d7c15863b251a7a50265e57c1dca1a7add2e291",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
|
|||
40
go.mod
40
go.mod
|
|
@ -11,65 +11,25 @@ require (
|
|||
)
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v1.2.1 // indirect
|
||||
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver/v3 v3.2.0 // indirect
|
||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.6 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/armon/go-radix v1.0.0 // indirect
|
||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/fatih/color v1.16.0 // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/hashicorp/cli v1.1.7 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-plugin v1.6.3 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/hc-install v0.9.2 // indirect
|
||||
github.com/hashicorp/terraform-exec v0.23.0 // indirect
|
||||
github.com/hashicorp/terraform-json v0.25.0 // indirect
|
||||
github.com/hashicorp/terraform-plugin-docs v0.22.0 // indirect
|
||||
github.com/hashicorp/terraform-registry-address v0.2.5 // indirect
|
||||
github.com/hashicorp/terraform-svchost v0.1.1 // indirect
|
||||
github.com/hashicorp/yamux v0.1.1 // indirect
|
||||
github.com/huandu/xstrings v1.3.3 // indirect
|
||||
github.com/imdario/mergo v0.3.15 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/oklog/run v1.0.0 // indirect
|
||||
github.com/posener/complete v1.2.3 // indirect
|
||||
github.com/shopspring/decimal v1.3.1 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/stretchr/testify v1.10.0 // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yuin/goldmark v1.7.7 // indirect
|
||||
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
||||
github.com/zclconf/go-cty v1.16.3 // indirect
|
||||
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
|
||||
golang.org/x/crypto v0.38.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect
|
||||
golang.org/x/mod v0.25.0 // indirect
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a // indirect
|
||||
google.golang.org/grpc v1.72.1 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
|
|||
122
go.sum
122
go.sum
|
|
@ -1,27 +1,5 @@
|
|||
github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
|
||||
github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||
github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0=
|
||||
github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc=
|
||||
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
|
||||
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
|
||||
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
|
||||
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
|
||||
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
|
||||
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||
github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
|
||||
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA=
|
||||
github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8=
|
||||
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
|
||||
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
|
@ -36,41 +14,14 @@ github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek
|
|||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU=
|
||||
github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU=
|
||||
github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
|
||||
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg=
|
||||
github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
|
||||
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24=
|
||||
github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I=
|
||||
github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I=
|
||||
github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY=
|
||||
github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ=
|
||||
github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc=
|
||||
github.com/hashicorp/terraform-plugin-docs v0.22.0 h1:fwIDStbFel1PPNkM+mDPnpB4efHZBdGoMz/zt5FbTDw=
|
||||
github.com/hashicorp/terraform-plugin-docs v0.22.0/go.mod h1:55DJVyZ7BNK4t/lANcQ1YpemRuS6KsvIO1BbGA+xzGE=
|
||||
github.com/hashicorp/terraform-plugin-framework v1.15.1 h1:2mKDkwb8rlx/tvJTlIcpw0ykcmvdWv+4gY3SIgk8Pq8=
|
||||
github.com/hashicorp/terraform-plugin-framework v1.15.1/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI=
|
||||
github.com/hashicorp/terraform-plugin-framework-validators v0.18.0 h1:OQnlOt98ua//rCw+QhBbSqfW3QbwtVrcdWeQN5gI3Hw=
|
||||
|
|
@ -85,50 +36,23 @@ github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S
|
|||
github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc=
|
||||
github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE=
|
||||
github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ=
|
||||
github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
|
||||
github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
||||
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||
github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c=
|
||||
github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo=
|
||||
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||
github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU=
|
||||
github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8=
|
||||
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo=
|
||||
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
|
||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
|
||||
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
||||
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
|
|
@ -136,15 +60,6 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU
|
|||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU=
|
||||
github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
|
||||
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
|
||||
github.com/zclconf/go-cty v1.16.3 h1:osr++gw2T61A8KVYHoQiFbFd1Lh3JOCXc/jFLJXKTxk=
|
||||
github.com/zclconf/go-cty v1.16.3/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
|
||||
go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw=
|
||||
go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
|
||||
|
|
@ -157,52 +72,18 @@ go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce
|
|||
go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w=
|
||||
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
|
||||
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME=
|
||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
|
||||
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a h1:51aaUVRocpvUOSQKM6Q7VuoaktNIaMCLuhZB6DKksq4=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a/go.mod h1:uRxBH1mhmO8PGhU89cMcHaXKZqO+OfakD8QQO0oYwlQ=
|
||||
google.golang.org/grpc v1.72.1 h1:HR03wO6eyZ7lknl75XlxABNVLLFc2PAb6mHlYh756mA=
|
||||
|
|
@ -210,8 +91,5 @@ google.golang.org/grpc v1.72.1/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3i
|
|||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
package file_client
|
||||
|
||||
type FileClient interface {
|
||||
Create(directory string, name string, data string, permissions string) error
|
||||
// If file isn't found the error message must have err.Error() == "file not found"
|
||||
Read(directory string, name string) (string, string, error) // permissions, contents, error
|
||||
Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error
|
||||
Delete(directory string, name string) error
|
||||
Compress(directory string, name string, compressedName string) error
|
||||
Encode(directory string, name string, encodedName string) error
|
||||
Hash(directory string, name string) (string, error) // Sha256Hash, error
|
||||
}
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
package file_client
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type MemoryFileClient struct {
|
||||
file map[string]string
|
||||
}
|
||||
|
||||
var _ FileClient = &MemoryFileClient{} // make sure the MemoryFileClient implements the FileClient
|
||||
|
||||
func (c *MemoryFileClient) Create(directory string, name string, data string, permissions string) error {
|
||||
|
||||
c.file = make(map[string]string)
|
||||
c.file["directory"] = directory
|
||||
c.file["name"] = name
|
||||
c.file["contents"] = data
|
||||
c.file["permissions"] = permissions
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Read(directory string, name string) (string, string, error) {
|
||||
if c.file["directory"] == "" || c.file["name"] == "" {
|
||||
return "", "", fmt.Errorf("file not found")
|
||||
}
|
||||
return c.file["permissions"], c.file["contents"], nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error {
|
||||
c.file["directory"] = newDirectory
|
||||
c.file["name"] = newName
|
||||
c.file["contents"] = data
|
||||
c.file["permissions"] = permissions
|
||||
c.file["compressed"] = "false"
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Delete(directory string, name string) error {
|
||||
if c.file["directory"] == directory && c.file["name"] == name {
|
||||
c.file = nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Encode(directory string, name string, encodedName string) error {
|
||||
contents := []byte(c.file["contents"])
|
||||
encoded := make([]byte, base64.StdEncoding.EncodedLen(len(contents)))
|
||||
base64.StdEncoding.Encode(encoded, contents)
|
||||
c.file["contents"] = string(encoded)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Compress(directory string, name string, compressedName string) error {
|
||||
c.file["compressed"] = "true"
|
||||
contents := []byte(c.file["contents"])
|
||||
var compressedBuffer bytes.Buffer
|
||||
gzipWriter := gzip.NewWriter(&compressedBuffer)
|
||||
_, err := gzipWriter.Write(contents)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := gzipWriter.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
contents = compressedBuffer.Bytes()
|
||||
c.file["content"] = string(contents)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MemoryFileClient) Hash(directory string, name string) (string, error) {
|
||||
contents := []byte(c.file["contents"])
|
||||
|
||||
hasher := sha256.New()
|
||||
hasher.Write(contents)
|
||||
hashBytes := hasher.Sum(nil)
|
||||
hashString := hex.EncodeToString(hashBytes)
|
||||
|
||||
return hashString, nil
|
||||
}
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
package file_client
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// The default FileClient, using the os package.
|
||||
type OsFileClient struct{}
|
||||
|
||||
var _ FileClient = &OsFileClient{} // make sure the OsFileClient implements the FileClient
|
||||
|
||||
func (c *OsFileClient) Create(directory string, name string, data string, permissions string) error {
|
||||
path := filepath.Join(directory, name)
|
||||
modeInt, err := strconv.ParseUint(permissions, 8, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(path, []byte(data), os.FileMode(modeInt))
|
||||
}
|
||||
|
||||
func (c *OsFileClient) Read(directory string, name string) (string, string, error) {
|
||||
path := filepath.Join(directory, name)
|
||||
info, err := os.Stat(path)
|
||||
if err != nil && os.IsNotExist(err) {
|
||||
return "", "", fmt.Errorf("file not found")
|
||||
}
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
mode := fmt.Sprintf("%#o", info.Mode().Perm())
|
||||
contents, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
return mode, string(contents), nil
|
||||
}
|
||||
|
||||
func (c *OsFileClient) Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error {
|
||||
currentPath := filepath.Join(currentDirectory, currentName)
|
||||
newPath := filepath.Join(newDirectory, newName)
|
||||
if currentPath != newPath {
|
||||
err := os.Rename(currentPath, newPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
modeInt, err := strconv.ParseUint(permissions, 8, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = os.WriteFile(newPath, []byte(data), os.FileMode(modeInt)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *OsFileClient) Delete(directory string, name string) error {
|
||||
path := filepath.Join(directory, name)
|
||||
return os.Remove(path)
|
||||
}
|
||||
|
||||
func (c *OsFileClient) Compress(directory string, name string, outputName string) error {
|
||||
inFilePath := filepath.Join(directory, name)
|
||||
inFile, err := os.Open(inFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inFile.Close()
|
||||
|
||||
// Create a tmp file to hold compressed data during conversion
|
||||
outFilePath := filepath.Join(directory, outputName)
|
||||
outFile, err := os.Create(outFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
// copy inFile to gzip writer, which writes to outFile
|
||||
// use the best compression ratio possible
|
||||
gzipWriter, err := gzip.NewWriterLevel(outFile, gzip.BestCompression)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.Copy(gzipWriter, inFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return gzipWriter.Close()
|
||||
}
|
||||
|
||||
// base64 encodes a file.
|
||||
func (c *OsFileClient) Encode(directory string, name string, outputName string) error {
|
||||
inFilePath := filepath.Join(directory, name)
|
||||
inFile, err := os.Open(inFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inFile.Close()
|
||||
|
||||
// Create a tmp file to hold encoded data during conversion
|
||||
outFilePath := filepath.Join(directory, outputName)
|
||||
outFile, err := os.Create(outFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
// Copy writes to the base64Encoder, which writes to the outFile
|
||||
base64Encoder := base64.NewEncoder(base64.StdEncoding, outFile)
|
||||
_, err = io.Copy(base64Encoder, inFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return base64Encoder.Close()
|
||||
}
|
||||
|
||||
// get the sha256 hash of the file, formatted as hex.
|
||||
func (c *OsFileClient) Hash(directory string, name string) (string, error) {
|
||||
filePath := filepath.Join(directory, name)
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
hasher := sha256.New()
|
||||
_, err = io.Copy(hasher, file)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
contentsHash := hasher.Sum(nil)
|
||||
hexContents := hex.EncodeToString(contentsHash)
|
||||
return hexContents, nil
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MPL-2.0
|
||||
|
||||
package local
|
||||
package file_local
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
|
|
@ -23,7 +24,7 @@ func NewLocalDataSource() datasource.DataSource {
|
|||
}
|
||||
|
||||
type LocalDataSource struct {
|
||||
client fileClient
|
||||
client c.FileClient
|
||||
}
|
||||
|
||||
type LocalDataSourceModel struct {
|
||||
|
|
@ -87,10 +88,10 @@ func (r *LocalDataSource) Configure(ctx context.Context, req datasource.Configur
|
|||
func (r *LocalDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Request Object: %#v", req))
|
||||
|
||||
// Allow the ability to inject a file client, but use the osFileClient by default.
|
||||
// Allow the ability to inject a file client, but use the OsFileClient by default.
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default osFileClient.")
|
||||
r.client = &osFileClient{}
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var config LocalDataSourceModel
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MPL-2.0
|
||||
|
||||
package local
|
||||
package file_local
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -12,6 +12,7 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
|
||||
"github.com/hashicorp/terraform-plugin-go/tftypes"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
func TestLocalDataSourceMetadata(t *testing.T) {
|
||||
|
|
@ -47,7 +48,7 @@ func TestLocalDataSourceRead(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Unprotected",
|
||||
LocalDataSource{client: &memoryFileClient{}},
|
||||
LocalDataSource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getDataSourceReadRequest(t, map[string]string{
|
||||
"id": "60cef95046105ff4522c0c1f1aeeeba43d0d729dbcabdd8846c317c98cac60a2",
|
||||
|
|
@ -66,6 +67,7 @@ func TestLocalDataSourceRead(t *testing.T) {
|
|||
"contents": "this is an unprotected read test",
|
||||
"hmac_secret_key": defaultHmacSecretKey,
|
||||
}),
|
||||
// setup
|
||||
map[string]string{
|
||||
"mode": defaultPerm,
|
||||
"directory": defaultDirectory,
|
||||
|
|
@ -75,7 +77,7 @@ func TestLocalDataSourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected",
|
||||
LocalDataSource{client: &memoryFileClient{}},
|
||||
LocalDataSource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getDataSourceReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -104,7 +106,7 @@ func TestLocalDataSourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected with content update",
|
||||
LocalDataSource{client: &memoryFileClient{}},
|
||||
LocalDataSource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getDataSourceReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -133,7 +135,7 @@ func TestLocalDataSourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected with mode update",
|
||||
LocalDataSource{client: &memoryFileClient{}},
|
||||
LocalDataSource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getDataSourceReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MPL-2.0
|
||||
|
||||
package local
|
||||
package file_local
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -24,6 +24,7 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
|
|
@ -34,21 +35,12 @@ var _ resource.ResourceWithImportState = &LocalResource{}
|
|||
|
||||
const unprotectedHmacSecret = "this-is-the-hmac-secret-key-that-will-be-used-to-calculate-the-hash-of-unprotected-files"
|
||||
|
||||
// An interface for defining custom file managers.
|
||||
type fileClient interface {
|
||||
Create(directory string, name string, data string, permissions string) error
|
||||
// If file isn't found the error message must have err.Error() == "file not found"
|
||||
Read(directory string, name string) (string, string, error) // permissions, contents, error
|
||||
Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error
|
||||
Delete(directory string, name string) error
|
||||
}
|
||||
|
||||
func NewLocalResource() resource.Resource {
|
||||
return &LocalResource{}
|
||||
}
|
||||
|
||||
type LocalResource struct {
|
||||
client fileClient
|
||||
client c.FileClient
|
||||
}
|
||||
|
||||
// LocalResourceModel describes the resource data model.
|
||||
|
|
@ -159,11 +151,9 @@ func (r *LocalResource) Create(ctx context.Context, req resource.CreateRequest,
|
|||
tflog.Debug(ctx, fmt.Sprintf("Request Object: %#v", req))
|
||||
var err error
|
||||
|
||||
// Allow the ability to inject a file client, but use the osFileClient by default.
|
||||
// see file_os_client.go
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default osFileClient.")
|
||||
r.client = &osFileClient{}
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var plan LocalResourceModel
|
||||
|
|
@ -221,10 +211,10 @@ func (r *LocalResource) Create(ctx context.Context, req resource.CreateRequest,
|
|||
func (r *LocalResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Request Object: %#v", req))
|
||||
|
||||
// Allow the ability to inject a file client, but use the osFileClient by default.
|
||||
// Allow the ability to inject a file client, but use the OsFileClient by default.
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default osFileClient.")
|
||||
r.client = &osFileClient{}
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var state LocalResourceModel
|
||||
|
|
@ -286,10 +276,10 @@ func (r *LocalResource) Read(ctx context.Context, req resource.ReadRequest, resp
|
|||
func (r *LocalResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Request Object: %#v", req))
|
||||
|
||||
// Allow the ability to inject a file client, but use the osFileClient by default.
|
||||
// Allow the ability to inject a file client, but use the OsFileClient by default.
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default osFileClient.")
|
||||
r.client = &osFileClient{}
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var config LocalResourceModel
|
||||
|
|
@ -379,10 +369,10 @@ func (r *LocalResource) Update(ctx context.Context, req resource.UpdateRequest,
|
|||
func (r *LocalResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Request Object: %#v", req))
|
||||
|
||||
// Allow the ability to inject a file client, but use the osFileClient by default.
|
||||
// Allow the ability to inject a file client, but use the OsFileClient by default.
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default osFileClient.")
|
||||
r.client = &osFileClient{}
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var state LocalResourceModel
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MPL-2.0
|
||||
|
||||
package local
|
||||
package file_local
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -12,6 +12,7 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
|
||||
"github.com/hashicorp/terraform-plugin-go/tftypes"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
@ -78,7 +79,7 @@ func TestLocalResourceCreate(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Basic",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getCreateRequest(t, map[string]string{
|
||||
"id": defaultId,
|
||||
|
|
@ -102,7 +103,7 @@ func TestLocalResourceCreate(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected",
|
||||
LocalResource{client: &osFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getCreateRequest(t, map[string]string{
|
||||
"id": "4ccd8ec7ea24e0524c8aba459fbf3a2649ec3cd96a1c8f9dfb326cc57a9d3127",
|
||||
|
|
@ -126,7 +127,7 @@ func TestLocalResourceCreate(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected using key from environment",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getCreateRequest(t, map[string]string{
|
||||
"id": "59fed8691a76c7693fc9dcd4fda28390a1fd3090114bc64f3e5a3abe312a92f5",
|
||||
|
|
@ -189,7 +190,7 @@ func TestLocalResourceRead(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Unprotected",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getReadRequest(t, map[string]string{
|
||||
"id": "60cef95046105ff4522c0c1f1aeeeba43d0d729dbcabdd8846c317c98cac60a2",
|
||||
|
|
@ -219,7 +220,7 @@ func TestLocalResourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -250,7 +251,7 @@ func TestLocalResourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected with content update",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -281,7 +282,7 @@ func TestLocalResourceRead(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"Protected with mode update",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getReadRequest(t, map[string]string{
|
||||
"id": "ec4407ba53b2c40ac2ac18ff7372a6fe6e4f7f8aa04f340503aefc7d9a5fa4e1",
|
||||
|
|
@ -342,7 +343,7 @@ func TestLocalResourceUpdate(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Basic test",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getUpdateRequest(t, map[string]map[string]string{
|
||||
"priorState": {
|
||||
|
|
@ -427,7 +428,7 @@ func TestLocalResourceDelete(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Basic test",
|
||||
LocalResource{client: &memoryFileClient{}},
|
||||
LocalResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getDeleteRequest(t, map[string]string{
|
||||
"id": "fd6fb8621c4850c228190f4d448ce30881a32609d6b4c7341d48d0027e597567",
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
package file_snapshot
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
// The purpose of these lines is to make sure our LocalFileDataSource correctly implements the `datasource.DataSource“ interface.
|
||||
// These will fail at compilation time if the implementation is not satisfied.
|
||||
var _ datasource.DataSource = &SnapshotDataSource{}
|
||||
|
||||
func NewSnapshotDataSource() datasource.DataSource {
|
||||
return &SnapshotDataSource{}
|
||||
}
|
||||
|
||||
type SnapshotDataSource struct{}
|
||||
|
||||
type SnapshotDataSourceModel struct {
|
||||
Id types.String `tfsdk:"id"`
|
||||
Contents types.String `tfsdk:"contents"`
|
||||
Data types.String `tfsdk:"data"`
|
||||
Decompress types.Bool `tfsdk:"decompress"`
|
||||
}
|
||||
|
||||
func (r *SnapshotDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
|
||||
resp.TypeName = req.ProviderTypeName + "_snapshot" // file_snapshot
|
||||
}
|
||||
|
||||
func (r *SnapshotDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
||||
resp.Schema = schema.Schema{
|
||||
MarkdownDescription: "File Snapshot data source. \n" +
|
||||
"This data source retrieves the contents of a file from the output of a file_snapshot datasource." +
|
||||
"Warning! Using this resource places the plain text contents of the snapshot in your state file.",
|
||||
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"contents": schema.StringAttribute{
|
||||
MarkdownDescription: "The contents of the snapshot to retrieve. " +
|
||||
"This could be any gzip compressed base64 encoded data. " +
|
||||
"If the data isn't compressed, set the decompress argument to false, or leave it blank. " +
|
||||
"If the decompress argument is false, the data will be the base64 decoded contents.",
|
||||
Required: true,
|
||||
},
|
||||
"decompress": schema.BoolAttribute{
|
||||
MarkdownDescription: "Whether or not to decompress the contents. " +
|
||||
"If left empty, this will default to false.",
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"id": schema.StringAttribute{
|
||||
MarkdownDescription: "Unique identifier for the datasource. The SHA256 hash of the contents.",
|
||||
Computed: true,
|
||||
},
|
||||
"data": schema.StringAttribute{
|
||||
MarkdownDescription: "The resulting data output. This is the plain text representation of the contents attribute. " +
|
||||
"This is computed by first decoding the data from base64, then decompressing the resulting gzip. " +
|
||||
"If decompress is false, then this will be the base64 decoded version of the contents.",
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SnapshotDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
|
||||
// Prevent panic if the provider has not been configured.
|
||||
// This only configures the provider, so anything here must be available in the provider package to configure.
|
||||
// If you want to configure a client, do that in the Create/Read/Update/Delete functions.
|
||||
if req.ProviderData == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SnapshotDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Request Object: %+v", req))
|
||||
|
||||
var config SnapshotDataSourceModel
|
||||
resp.Diagnostics.Append(req.Config.Get(ctx, &config)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
contents := config.Contents.ValueString()
|
||||
decompress := config.Decompress.ValueBool()
|
||||
|
||||
hasher := sha256.New()
|
||||
hasher.Write([]byte(contents))
|
||||
hashBytes := hasher.Sum(nil)
|
||||
hashString := hex.EncodeToString(hashBytes)
|
||||
|
||||
config.Id = types.StringValue(hashString)
|
||||
d, err := base64.StdEncoding.DecodeString(contents)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error decoding file: ", err.Error())
|
||||
return
|
||||
}
|
||||
contents = string(d)
|
||||
|
||||
if decompress {
|
||||
gzipReader, err := gzip.NewReader(bytes.NewReader([]byte(contents)))
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error creating gzip reader: ", err.Error())
|
||||
return
|
||||
}
|
||||
defer gzipReader.Close()
|
||||
decompressedBytes, err := io.ReadAll(gzipReader)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error reading compressed bytes: ", err.Error())
|
||||
return
|
||||
}
|
||||
contents = string(decompressedBytes)
|
||||
}
|
||||
|
||||
config.Data = types.StringValue(contents)
|
||||
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &config)...)
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Response Object: %+v", *resp))
|
||||
}
|
||||
|
|
@ -0,0 +1,205 @@
|
|||
package file_snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
|
||||
"github.com/hashicorp/terraform-plugin-go/tftypes"
|
||||
)
|
||||
|
||||
const (
|
||||
testDataContents = "these contents are the default for testing"
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 #.
|
||||
testDataEncoded = "dGhlc2UgY29udGVudHMgYXJlIHRoZSBkZWZhdWx0IGZvciB0ZXN0aW5n"
|
||||
// echo -n "these contents are the default for testing" | gzip -c | base64 -w 0 #.
|
||||
testDataCompressed = "H4sIAAAAAAAAAwXBAQoAIAgDwK/sa1KzglDQ9f/utNnEyBBDDStCm5h0e1fwLIitE+sDr6miHioAAAA="
|
||||
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}' #.
|
||||
testDataEncodedId = "ba8cd27d74eb572956e09da49530c5ab2dd66ee946956e9d55a4cd09b76ab527"
|
||||
|
||||
// echo -n "these contents are the default for testing" | gzip -c | base64 -w 0 | sha256sum | awk '{print $1}' #.
|
||||
testDataCompressedId = "a358aafd3bebe1731735516b321d55bd8a58a64e0e2d92646a6a6fdb63751c5d"
|
||||
|
||||
defaultDecompress = "false"
|
||||
)
|
||||
|
||||
var snapshotDataSourceBooleanFields = []string{"decompress"}
|
||||
|
||||
func TestSnapshotDataSourceMetadata(t *testing.T) {
|
||||
t.Run("Metadata function", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
fit SnapshotDataSource
|
||||
want datasource.MetadataResponse
|
||||
}{
|
||||
{"Basic test", SnapshotDataSource{}, datasource.MetadataResponse{TypeName: "file_snapshot"}},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
res := datasource.MetadataResponse{}
|
||||
tc.fit.Metadata(context.Background(), datasource.MetadataRequest{ProviderTypeName: "file"}, &res)
|
||||
got := res
|
||||
if got != tc.want {
|
||||
t.Errorf("%+v.Metadata() is %+v; want %+v", tc.fit, got, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSnapshotDataSourceSchema(t *testing.T) {
|
||||
t.Run("Schema function", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
fit SnapshotDataSource
|
||||
want datasource.SchemaResponse
|
||||
}{
|
||||
{"Basic test", SnapshotDataSource{}, *getSnapshotDataSourceSchema()},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
r := datasource.SchemaResponse{}
|
||||
tc.fit.Schema(context.Background(), datasource.SchemaRequest{}, &r)
|
||||
got := r
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Errorf("Schema() mismatch (-want +got):\n%+v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSnapshotDataSourceRead(t *testing.T) {
|
||||
t.Run("Read function", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
fit SnapshotDataSource
|
||||
have datasource.ReadRequest
|
||||
want datasource.ReadResponse
|
||||
}{
|
||||
{
|
||||
"Basic",
|
||||
SnapshotDataSource{},
|
||||
// have
|
||||
getSnapshotDataSourceReadRequest(t, map[string]string{
|
||||
"id": "", // id is computed.
|
||||
"data": "", // data is computed.
|
||||
"contents": testDataEncoded,
|
||||
"decompress": defaultDecompress,
|
||||
}),
|
||||
// want
|
||||
getSnapshotDataSourceReadResponse(t, map[string]string{
|
||||
"id": testDataEncodedId,
|
||||
"data": testDataContents,
|
||||
"contents": testDataEncoded,
|
||||
"decompress": defaultDecompress,
|
||||
}),
|
||||
},
|
||||
{
|
||||
"Compressed",
|
||||
SnapshotDataSource{},
|
||||
// have
|
||||
getSnapshotDataSourceReadRequest(t, map[string]string{
|
||||
"id": "", // id is computed.
|
||||
"data": "", // data is computed.
|
||||
"contents": testDataCompressed,
|
||||
"decompress": "true",
|
||||
}),
|
||||
// want
|
||||
getSnapshotDataSourceReadResponse(t, map[string]string{
|
||||
"id": testDataCompressedId,
|
||||
"data": testDataContents,
|
||||
"contents": testDataCompressed,
|
||||
"decompress": "true",
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
r := getSnapshotDataSourceReadResponseContainer()
|
||||
tc.fit.Read(context.Background(), tc.have, &r)
|
||||
got := r
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Errorf("Read() mismatch (-want +got):\n%+v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// *** Test Helper Functions *** //
|
||||
|
||||
// Read.
|
||||
func getSnapshotDataSourceReadRequest(t *testing.T, data map[string]string) datasource.ReadRequest {
|
||||
stateMap := make(map[string]tftypes.Value)
|
||||
for key, value := range data {
|
||||
if slices.Contains(snapshotDataSourceBooleanFields, key) { // snapshotDataSourceBooleanFields is a constant
|
||||
v, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
t.Errorf("Error converting %s to bool %s: ", value, err.Error())
|
||||
}
|
||||
stateMap[key] = tftypes.NewValue(tftypes.Bool, v)
|
||||
} else {
|
||||
stateMap[key] = tftypes.NewValue(tftypes.String, value)
|
||||
}
|
||||
}
|
||||
stateValue := tftypes.NewValue(getSnapshotDataSourceAttributeTypes(), stateMap)
|
||||
return datasource.ReadRequest{
|
||||
Config: tfsdk.Config{
|
||||
Raw: stateValue,
|
||||
Schema: getSnapshotDataSourceSchema().Schema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getSnapshotDataSourceReadResponseContainer() datasource.ReadResponse {
|
||||
return datasource.ReadResponse{
|
||||
State: tfsdk.State{Schema: getSnapshotDataSourceSchema().Schema},
|
||||
}
|
||||
}
|
||||
|
||||
func getSnapshotDataSourceReadResponse(t *testing.T, data map[string]string) datasource.ReadResponse {
|
||||
stateMap := make(map[string]tftypes.Value)
|
||||
for key, value := range data {
|
||||
if slices.Contains(snapshotDataSourceBooleanFields, key) { // snapshotDataSourceBooleanFields is a constant
|
||||
v, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
t.Errorf("Error converting %s to bool %s: ", value, err.Error())
|
||||
}
|
||||
stateMap[key] = tftypes.NewValue(tftypes.Bool, v)
|
||||
} else {
|
||||
stateMap[key] = tftypes.NewValue(tftypes.String, value)
|
||||
}
|
||||
}
|
||||
stateValue := tftypes.NewValue(getSnapshotDataSourceAttributeTypes(), stateMap)
|
||||
return datasource.ReadResponse{
|
||||
State: tfsdk.State{
|
||||
Raw: stateValue,
|
||||
Schema: getSnapshotDataSourceSchema().Schema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// The helpers helpers.
|
||||
func getSnapshotDataSourceAttributeTypes() tftypes.Object {
|
||||
return tftypes.Object{
|
||||
AttributeTypes: map[string]tftypes.Type{
|
||||
"id": tftypes.String,
|
||||
"data": tftypes.String,
|
||||
"contents": tftypes.String,
|
||||
"decompress": tftypes.Bool,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getSnapshotDataSourceSchema() *datasource.SchemaResponse {
|
||||
var testDataSource SnapshotDataSource
|
||||
r := &datasource.SchemaResponse{}
|
||||
testDataSource.Schema(context.Background(), datasource.SchemaRequest{}, r)
|
||||
return r
|
||||
}
|
||||
|
|
@ -0,0 +1,289 @@
|
|||
package file_snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform-plugin-framework/path"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
// The purpose of these lines is to make sure our LocalFileResource correctly implements the `resource.Resource“ interface.
|
||||
// These will fail at compilation time if the implementation is not satisfied.
|
||||
var _ resource.Resource = &SnapshotResource{}
|
||||
var _ resource.ResourceWithImportState = &SnapshotResource{}
|
||||
|
||||
func NewSnapshotResource() resource.Resource {
|
||||
return &SnapshotResource{}
|
||||
}
|
||||
|
||||
type SnapshotResource struct {
|
||||
client c.FileClient
|
||||
}
|
||||
|
||||
// SnapshotResourceModel describes the resource data model.
|
||||
type SnapshotResourceModel struct {
|
||||
Id types.String `tfsdk:"id"`
|
||||
Name types.String `tfsdk:"name"`
|
||||
Directory types.String `tfsdk:"directory"`
|
||||
Snapshot types.String `tfsdk:"snapshot"`
|
||||
UpdateTrigger types.String `tfsdk:"update_trigger"`
|
||||
Compress types.Bool `tfsdk:"compress"`
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
resp.TypeName = req.ProviderTypeName + "_snapshot" // file_snapshot
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
resp.Schema = schema.Schema{
|
||||
MarkdownDescription: "File Snapshot resource. \n" +
|
||||
"This resource saves some content in state and doesn't update it until the trigger argument changes. " +
|
||||
"The refresh phase doesn't update state, instead " +
|
||||
"the state can only change on create or update and only when the update_trigger argument changes.",
|
||||
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"name": schema.StringAttribute{
|
||||
MarkdownDescription: "Name of the file to save. Changing this forces recreate, moving the file isn't supported.",
|
||||
Required: true,
|
||||
PlanModifiers: []planmodifier.String{
|
||||
stringplanmodifier.RequiresReplace(), // the location of the file shouldn't change
|
||||
},
|
||||
},
|
||||
"directory": schema.StringAttribute{
|
||||
MarkdownDescription: "Path of the file to save. Changing this forces recreate, moving the file isn't supported.",
|
||||
Optional: true,
|
||||
Computed: true, // whenever an argument has a default value it should have Computed: true
|
||||
Default: stringdefault.StaticString("."),
|
||||
PlanModifiers: []planmodifier.String{
|
||||
stringplanmodifier.RequiresReplace(), // the location of the file shouldn't change
|
||||
},
|
||||
},
|
||||
"update_trigger": schema.StringAttribute{
|
||||
MarkdownDescription: "When this argument changes the snapshot will be updated.",
|
||||
Required: true,
|
||||
},
|
||||
"compress": schema.BoolAttribute{
|
||||
MarkdownDescription: "Whether the provider should compress the contents and snapshot or not. Defaults to 'false'. " +
|
||||
"When set to 'true' the provider will compress the contents and snapshot attributes using the gzip compression algorithm. " +
|
||||
"Changing this attribute forces recreate, compressing snapshots which are already saved in state isn't supported. " +
|
||||
"Warning! To prevent memory errors the provider generates temporary files to facilitate encoding and compression.",
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
Default: booldefault.StaticBool(false),
|
||||
PlanModifiers: []planmodifier.Bool{
|
||||
boolplanmodifier.RequiresReplace(), // compressing files which were previously uncompressed isn't supported
|
||||
},
|
||||
},
|
||||
"snapshot": schema.StringAttribute{
|
||||
MarkdownDescription: "Base64 encoded contents of the file specified in the name and directory fields. " +
|
||||
"This data will be added on create and only updated when the update_trigger field changes. " +
|
||||
"Warning! To prevent memory errors the provider generates temporary files to facilitate encoding and compression.",
|
||||
Computed: true,
|
||||
Sensitive: true,
|
||||
},
|
||||
"id": schema.StringAttribute{
|
||||
MarkdownDescription: "Unique identifier for the resource. The SHA256 hash of the base64 encoded contents.",
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
|
||||
// Prevent panic if the provider has not been configured.
|
||||
// This only configures the provider, so anything here must be available in the provider package to configure.
|
||||
// If you want to configure a client, do that in the Create/Read/Update/Delete functions.
|
||||
if req.ProviderData == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// We should:
|
||||
// - generate reality and state to match plan in the Create function
|
||||
// - update state to match reality in the Read function
|
||||
// - update reality and state to match plan in the Update function (don't compare old state, just override)
|
||||
// - destroy reality in the Destroy function (state is handled automatically)
|
||||
|
||||
func (r *SnapshotResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Create Request Object: %+v", req))
|
||||
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var plan SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
pName := plan.Name.ValueString()
|
||||
pDir := plan.Directory.ValueString()
|
||||
pCompress := plan.Compress.ValueBool()
|
||||
|
||||
name := pName
|
||||
if pCompress {
|
||||
err := r.client.Compress(pDir, pName, "compressed_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error compressing file: ", err.Error())
|
||||
return
|
||||
}
|
||||
name = "compressed_" + pName
|
||||
}
|
||||
|
||||
err := r.client.Encode(pDir, name, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error encoding file: ", err.Error())
|
||||
return
|
||||
}
|
||||
_, encodedContents, err := r.client.Read(pDir, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error reading encoded file: ", err.Error())
|
||||
return
|
||||
}
|
||||
plan.Snapshot = types.StringValue(encodedContents)
|
||||
|
||||
hash, err := r.client.Hash(pDir, pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error hashing file: ", err.Error())
|
||||
return
|
||||
}
|
||||
plan.Id = types.StringValue(hash)
|
||||
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
|
||||
|
||||
if pCompress {
|
||||
err := r.client.Delete(pDir, "compressed_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error cleaning up temporary compressed file: ", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = r.client.Delete(pDir, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error cleaning up temporary encoded file: ", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
tflog.Debug(ctx, fmt.Sprintf("Create Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Request Object: %+v", req))
|
||||
|
||||
var state SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
|
||||
// read is a no-op
|
||||
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &state)...)
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
// a difference between the plan and the state has been found.
|
||||
// we want to update reality and state to match the plan.
|
||||
// our snapshot will only update if the update trigger has changed.
|
||||
func (r *SnapshotResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update Request Object: %+v", req))
|
||||
|
||||
if r.client == nil {
|
||||
tflog.Debug(ctx, "Configuring client with default OsFileClient.")
|
||||
r.client = &c.OsFileClient{}
|
||||
}
|
||||
|
||||
var plan SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
|
||||
pName := plan.Name.ValueString()
|
||||
pDir := plan.Directory.ValueString()
|
||||
pUpdateTrigger := plan.UpdateTrigger.ValueString()
|
||||
|
||||
var state SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
|
||||
sUpdateTrigger := state.UpdateTrigger.ValueString()
|
||||
sSnapshot := state.Snapshot.ValueString()
|
||||
sCompress := state.Compress.ValueBool()
|
||||
sId := state.Id.ValueString()
|
||||
|
||||
plan.Id = types.StringValue(sId)
|
||||
|
||||
if pUpdateTrigger != sUpdateTrigger {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update trigger has changed from %s to %s, updating snapshot.", sUpdateTrigger, pUpdateTrigger))
|
||||
|
||||
name := pName
|
||||
if sCompress {
|
||||
err := r.client.Compress(pDir, pName, "compressed_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error compressing file: ", err.Error())
|
||||
return
|
||||
}
|
||||
name = "compressed_" + pName
|
||||
}
|
||||
|
||||
err := r.client.Encode(pDir, name, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error encoding file: ", err.Error())
|
||||
return
|
||||
}
|
||||
_, encodedContents, err := r.client.Read(pDir, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error reading encoded file: ", err.Error())
|
||||
return
|
||||
}
|
||||
plan.Snapshot = types.StringValue(encodedContents)
|
||||
} else {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update trigger hasn't changed, keeping previous snapshot (%s).", sSnapshot))
|
||||
plan.Snapshot = types.StringValue(sSnapshot)
|
||||
}
|
||||
|
||||
tflog.Debug(ctx, fmt.Sprintf("Setting state to this plan: \n%+v", &plan))
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
|
||||
|
||||
if sCompress {
|
||||
err := r.client.Delete(pDir, "compressed_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error cleaning up temporary compressed file: ", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
err := r.client.Delete(pDir, "encoded_"+pName)
|
||||
if err != nil {
|
||||
resp.Diagnostics.AddError("Error cleaning up temporary encoded file: ", err.Error())
|
||||
return
|
||||
}
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Delete Request Object: %+v", req))
|
||||
|
||||
// delete is a no-op
|
||||
|
||||
tflog.Debug(ctx, fmt.Sprintf("Delete Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
|
||||
resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package snapshot
|
||||
package file_snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -10,19 +10,29 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
|
||||
"github.com/hashicorp/terraform-plugin-go/tftypes"
|
||||
c "github.com/rancher/terraform-provider-file/internal/provider/file_client"
|
||||
)
|
||||
|
||||
const (
|
||||
//
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}' // .
|
||||
defaultId = "ba8cd27d74eb572956e09da49530c5ab2dd66ee946956e9d55a4cd09b76ab527"
|
||||
defaultContents = "these contents are the default for testing"
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 // .
|
||||
defaultTrigger = "dGhlc2UgY29udGVudHMgYXJlIHRoZSBkZWZhdWx0IGZvciB0ZXN0aW5n"
|
||||
testContents = "these contents are the default for testing"
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 #.
|
||||
testEncoded = "dGhlc2UgY29udGVudHMgYXJlIHRoZSBkZWZhdWx0IGZvciB0ZXN0aW5n"
|
||||
// echo -n "these contents are the default for testing" | gzip -c | base64 -w 0 #.
|
||||
// testCompressed = "H4sIAAAAAAAAAwXBAQoAIAgDwK/sa1KzglDQ9f/utNnEyBBDDStCm5h0e1fwLIitE+sDr6miHioAAAA="
|
||||
// echo -n "these contents are the default for testing" | base64 -w 0 | sha256sum | awk '{print $1}' #.
|
||||
testId = "ba8cd27d74eb572956e09da49530c5ab2dd66ee946956e9d55a4cd09b76ab527"
|
||||
// echo -n "these contents are the default for testing" | gzip -c | base64 -w 0 | sha256sum | awk '{print $1}' #.
|
||||
testCompressedId = "a358aafd3bebe1731735516b321d55bd8a58a64e0e2d92646a6a6fdb63751c5d"
|
||||
testName = "tmpTestFileName.txt"
|
||||
// You can use any arbitrary string to define the trigger, I chose to use the base64 encoded contents.
|
||||
testTrigger = "dGhlc2UgY29udGVudHMgYXJlIHRoZSBkZWZhdWx0IGZvciB0ZXN0aW5n"
|
||||
|
||||
defaultDirectory = "."
|
||||
defaultPermissions = "0600"
|
||||
defaultCompress = "false"
|
||||
)
|
||||
|
||||
var snapshotResourceBooleanFields = []string{}
|
||||
var snapshotResourceBooleanFields = []string{"compress"}
|
||||
|
||||
func TestSnapshotResourceMetadata(t *testing.T) {
|
||||
t.Run("Metadata function", func(t *testing.T) {
|
||||
|
|
@ -75,29 +85,49 @@ func TestSnapshotResourceCreate(t *testing.T) {
|
|||
fit SnapshotResource
|
||||
have resource.CreateRequest
|
||||
want resource.CreateResponse
|
||||
setup map[string]string
|
||||
}{
|
||||
{
|
||||
"Basic",
|
||||
SnapshotResource{},
|
||||
SnapshotResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getSnapshotResourceCreateRequest(t, map[string]string{
|
||||
"id": "",
|
||||
"contents": defaultContents,
|
||||
"snapshot": "",
|
||||
"update_trigger": defaultTrigger,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceCreateResponse(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// setup
|
||||
map[string]string{
|
||||
"name": testName,
|
||||
"directory": defaultDirectory,
|
||||
"contents": testContents,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
r := getSnapshotResourceCreateResponseContainer()
|
||||
if err := tc.fit.client.Create(tc.setup["directory"], tc.setup["name"], tc.setup["contents"], defaultPermissions); err != nil {
|
||||
t.Errorf("Error setting up: %v", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := tc.fit.client.Delete(tc.setup["directory"], tc.setup["name"]); err != nil {
|
||||
t.Errorf("Error tearing down: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
tc.fit.Create(context.Background(), tc.have, &r)
|
||||
got := r
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
|
|
@ -120,17 +150,21 @@ func TestSnapshotResourceRead(t *testing.T) {
|
|||
SnapshotResource{},
|
||||
// have
|
||||
getSnapshotResourceReadRequest(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceReadResponse(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -154,89 +188,135 @@ func TestSnapshotResourceUpdate(t *testing.T) {
|
|||
fit SnapshotResource
|
||||
have resource.UpdateRequest
|
||||
want resource.UpdateResponse
|
||||
setup map[string]string
|
||||
}{
|
||||
{
|
||||
"Basic test",
|
||||
SnapshotResource{},
|
||||
"Basic",
|
||||
SnapshotResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getSnapshotResourceUpdateRequest(t, map[string]map[string]string{
|
||||
"priorState": {
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
"plan": {
|
||||
"id": "",
|
||||
"contents": defaultContents,
|
||||
"snapshot": "",
|
||||
"update_trigger": defaultTrigger,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceUpdateResponse(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// setup
|
||||
map[string]string{
|
||||
"name": testName,
|
||||
"directory": defaultDirectory,
|
||||
"contents": testContents,
|
||||
},
|
||||
},
|
||||
{
|
||||
"Updates when trigger changes",
|
||||
SnapshotResource{},
|
||||
SnapshotResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getSnapshotResourceUpdateRequest(t, map[string]map[string]string{
|
||||
"priorState": {
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
"plan": {
|
||||
"id": "",
|
||||
"contents": "these contents are updated for testing",
|
||||
"snapshot": "",
|
||||
"update_trigger": "dGhlc2UgY29udGVudHMgYXJlIHVwZGF0ZWQgZm9yIHRlc3Rpbmc=",
|
||||
"name": testName,
|
||||
"update_trigger": "updated-trigger",
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceUpdateResponse(t, map[string]string{
|
||||
"id": "688722c152590a53e3297277d723453a476da8331d5de2478a36673da9cb1c09",
|
||||
"contents": "these contents are updated for testing",
|
||||
"snapshot": "these contents are updated for testing",
|
||||
"update_trigger": "dGhlc2UgY29udGVudHMgYXJlIHVwZGF0ZWQgZm9yIHRlc3Rpbmc=",
|
||||
"id": testId, // id shouldn't change
|
||||
"snapshot": "dGhlc2UgY29udGVudHMgYXJlIHVwZGF0ZWQgZm9yIHRlc3Rpbmc=", // echo -n "these contents are updated for testing" | base64 -w 0 #.
|
||||
"name": testName,
|
||||
"update_trigger": "updated-trigger",
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// setup
|
||||
map[string]string{
|
||||
"name": testName,
|
||||
"directory": defaultDirectory,
|
||||
"contents": "these contents are updated for testing",
|
||||
},
|
||||
},
|
||||
{
|
||||
"Doesn't update when trigger stays the same",
|
||||
SnapshotResource{},
|
||||
SnapshotResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getSnapshotResourceUpdateRequest(t, map[string]map[string]string{
|
||||
"priorState": {
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
"plan": {
|
||||
"id": "",
|
||||
"contents": "these contents are updated for testing",
|
||||
"snapshot": "",
|
||||
"update_trigger": defaultTrigger,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
},
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceUpdateResponse(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": "these contents are updated for testing",
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"snapshot": testEncoded,
|
||||
"name": testName,
|
||||
"update_trigger": testTrigger,
|
||||
"directory": defaultDirectory,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// setup
|
||||
map[string]string{
|
||||
"name": testName,
|
||||
"directory": defaultDirectory,
|
||||
"contents": "these contents are updated for testing",
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
r := getSnapshotResourceUpdateResponseContainer()
|
||||
if err := tc.fit.client.Create(tc.setup["directory"], tc.setup["name"], tc.setup["contents"], defaultPermissions); err != nil {
|
||||
t.Errorf("Error setting up: %v", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := tc.fit.client.Delete(tc.setup["directory"], tc.setup["name"]); err != nil {
|
||||
t.Errorf("Error tearing down: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
tc.fit.Update(context.Background(), tc.have, &r)
|
||||
got := r
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
|
|
@ -257,13 +337,15 @@ func TestSnapshotResourceDelete(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"Basic test",
|
||||
SnapshotResource{},
|
||||
SnapshotResource{client: &c.MemoryFileClient{}},
|
||||
// have
|
||||
getSnapshotResourceDeleteRequest(t, map[string]string{
|
||||
"id": defaultId,
|
||||
"contents": defaultContents,
|
||||
"snapshot": defaultContents,
|
||||
"update_trigger": defaultTrigger,
|
||||
"id": testId,
|
||||
"name": testName,
|
||||
"directory": defaultDirectory,
|
||||
"snapshot": testContents,
|
||||
"update_trigger": testTrigger,
|
||||
"compress": defaultCompress,
|
||||
}),
|
||||
// want
|
||||
getSnapshotResourceDeleteResponse(),
|
||||
|
|
@ -513,9 +595,11 @@ func getSnapshotResourceAttributeTypes() tftypes.Object {
|
|||
return tftypes.Object{
|
||||
AttributeTypes: map[string]tftypes.Type{
|
||||
"id": tftypes.String,
|
||||
"contents": tftypes.String,
|
||||
"name": tftypes.String,
|
||||
"directory": tftypes.String,
|
||||
"snapshot": tftypes.String,
|
||||
"update_trigger": tftypes.String,
|
||||
"compress": tftypes.Bool,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
package local
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type memoryFileClient struct {
|
||||
file map[string]string
|
||||
}
|
||||
|
||||
var _ fileClient = &memoryFileClient{} // make sure the memoryFileClient implements the fileClient
|
||||
|
||||
func (c *memoryFileClient) Create(directory string, name string, data string, permissions string) error {
|
||||
|
||||
c.file = make(map[string]string)
|
||||
c.file["directory"] = directory
|
||||
c.file["name"] = name
|
||||
c.file["contents"] = data
|
||||
c.file["permissions"] = permissions
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *memoryFileClient) Read(directory string, name string) (string, string, error) {
|
||||
if c.file["directory"] == "" || c.file["name"] == "" {
|
||||
return "", "", fmt.Errorf("file not found")
|
||||
}
|
||||
return c.file["permissions"], c.file["contents"], nil
|
||||
}
|
||||
|
||||
func (c *memoryFileClient) Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error {
|
||||
c.file["directory"] = newDirectory
|
||||
c.file["name"] = newName
|
||||
c.file["contents"] = data
|
||||
c.file["permissions"] = permissions
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *memoryFileClient) Delete(directory string, name string) error {
|
||||
c.file = nil
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
package local
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// The default fileClient, using the os package.
|
||||
type osFileClient struct{}
|
||||
|
||||
var _ fileClient = &osFileClient{} // make sure the osFileClient implements the fileClient
|
||||
|
||||
func (c *osFileClient) Create(directory string, name string, data string, permissions string) error {
|
||||
path := filepath.Join(directory, name)
|
||||
modeInt, err := strconv.ParseUint(permissions, 8, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(path, []byte(data), os.FileMode(modeInt))
|
||||
}
|
||||
|
||||
func (c *osFileClient) Read(directory string, name string) (string, string, error) {
|
||||
path := filepath.Join(directory, name)
|
||||
info, err := os.Stat(path)
|
||||
if err != nil && os.IsNotExist(err) {
|
||||
return "", "", fmt.Errorf("file not found")
|
||||
}
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
mode := fmt.Sprintf("%#o", info.Mode().Perm())
|
||||
contents, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
return mode, string(contents), nil
|
||||
}
|
||||
|
||||
func (c *osFileClient) Update(currentDirectory string, currentName string, newDirectory string, newName string, data string, permissions string) error {
|
||||
currentPath := filepath.Join(currentDirectory, currentName)
|
||||
newPath := filepath.Join(newDirectory, newName)
|
||||
if currentPath != newPath {
|
||||
err := os.Rename(currentPath, newPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
modeInt, err := strconv.ParseUint(permissions, 8, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = os.WriteFile(newPath, []byte(data), os.FileMode(modeInt)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *osFileClient) Delete(directory string, name string) error {
|
||||
path := filepath.Join(directory, name)
|
||||
return os.Remove(path)
|
||||
}
|
||||
|
|
@ -9,8 +9,8 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/provider"
|
||||
"github.com/hashicorp/terraform-plugin-framework/provider/schema"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/rancher/terraform-provider-file/internal/provider/local"
|
||||
"github.com/rancher/terraform-provider-file/internal/provider/snapshot"
|
||||
"github.com/rancher/terraform-provider-file/internal/provider/file_local"
|
||||
"github.com/rancher/terraform-provider-file/internal/provider/file_snapshot"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
|
|
@ -50,14 +50,15 @@ func (p *FileProvider) Configure(ctx context.Context, req provider.ConfigureRequ
|
|||
|
||||
func (p *FileProvider) Resources(ctx context.Context) []func() resource.Resource {
|
||||
return []func() resource.Resource{
|
||||
local.NewLocalResource,
|
||||
snapshot.NewSnapshotResource,
|
||||
file_local.NewLocalResource,
|
||||
file_snapshot.NewSnapshotResource,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *FileProvider) DataSources(ctx context.Context) []func() datasource.DataSource {
|
||||
return []func() datasource.DataSource{
|
||||
local.NewLocalDataSource,
|
||||
file_local.NewLocalDataSource,
|
||||
file_snapshot.NewSnapshotDataSource,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,176 +0,0 @@
|
|||
package snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform-plugin-framework/path"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||
)
|
||||
|
||||
// The `var _` is a special Go construct that results in an unusable variable.
|
||||
// The purpose of these lines is to make sure our LocalFileResource correctly implements the `resource.Resource“ interface.
|
||||
// These will fail at compilation time if the implementation is not satisfied.
|
||||
var _ resource.Resource = &SnapshotResource{}
|
||||
var _ resource.ResourceWithImportState = &SnapshotResource{}
|
||||
|
||||
func NewSnapshotResource() resource.Resource {
|
||||
return &SnapshotResource{}
|
||||
}
|
||||
|
||||
type SnapshotResource struct{}
|
||||
|
||||
// SnapshotResourceModel describes the resource data model.
|
||||
type SnapshotResourceModel struct {
|
||||
Id types.String `tfsdk:"id"`
|
||||
Contents types.String `tfsdk:"contents"`
|
||||
Snapshot types.String `tfsdk:"snapshot"`
|
||||
UpdateTrigger types.String `tfsdk:"update_trigger"`
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
resp.TypeName = req.ProviderTypeName + "_snapshot" // file_snapshot
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
resp.Schema = schema.Schema{
|
||||
MarkdownDescription: "File Snapshot resource. \n" +
|
||||
"This resource saves some content in state and doesn't update it until the trigger argument changes. " +
|
||||
"Importantly, this resource ignores changes in the configuration for the contents argument." +
|
||||
"The refresh phase doesn't update state, instead " +
|
||||
"the state can only change on create or update and only when the update_trigger argument changes.",
|
||||
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"contents": schema.StringAttribute{
|
||||
MarkdownDescription: "Contents to save. While this argument is exposed, you shouldn't use its output. " +
|
||||
"Instead use the snapshot attribute to get the data saved in the snapshot.",
|
||||
Required: true,
|
||||
},
|
||||
"update_trigger": schema.StringAttribute{
|
||||
MarkdownDescription: "When this argument changes the snapshot will be updated to whatever is in the contents.",
|
||||
Required: true,
|
||||
},
|
||||
"snapshot": schema.StringAttribute{
|
||||
MarkdownDescription: "Saved contents. This will match the contents during create and whenever the update_trigger changes.",
|
||||
Computed: true,
|
||||
},
|
||||
"id": schema.StringAttribute{
|
||||
MarkdownDescription: "Unique identifier for the resource. The SHA256 hash of the base64 encoded contents.",
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
|
||||
// Prevent panic if the provider has not been configured.
|
||||
// This only configures the provider, so anything here must be available in the provider package to configure.
|
||||
// If you want to configure a client, do that in the Create/Read/Update/Delete functions.
|
||||
if req.ProviderData == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// We should:
|
||||
// - generate reality and state to match plan in the Create function
|
||||
// - update state to match reality in the Read function
|
||||
// - update reality and state to match plan in the Update function (don't compare old state, just override)
|
||||
// - destroy reality in the Destroy function (state is handled automatically)
|
||||
|
||||
func (r *SnapshotResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Create Request Object: %+v", req))
|
||||
|
||||
var plan SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
pContents := plan.Contents.ValueString()
|
||||
encodedContents := base64.StdEncoding.EncodeToString([]byte(pContents))
|
||||
h := sha256.New()
|
||||
h.Write([]byte(encodedContents))
|
||||
contentsHash := h.Sum(nil)
|
||||
hexContents := hex.EncodeToString(contentsHash)
|
||||
|
||||
plan.Id = types.StringValue(hexContents)
|
||||
plan.Snapshot = types.StringValue(pContents)
|
||||
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
|
||||
tflog.Debug(ctx, fmt.Sprintf("Create Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Request Object: %+v", req))
|
||||
|
||||
var state SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
|
||||
// read is a no-op
|
||||
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &state)...)
|
||||
tflog.Debug(ctx, fmt.Sprintf("Read Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update Request Object: %+v", req))
|
||||
|
||||
var plan SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
pContents := plan.Contents.ValueString()
|
||||
pUpdateTrigger := plan.UpdateTrigger.ValueString()
|
||||
|
||||
var state SnapshotResourceModel
|
||||
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
|
||||
if resp.Diagnostics.HasError() {
|
||||
return
|
||||
}
|
||||
sUpdateTrigger := state.UpdateTrigger.ValueString()
|
||||
sSnapshot := state.Snapshot.ValueString()
|
||||
sId := state.Id.ValueString()
|
||||
|
||||
encodedContents := base64.StdEncoding.EncodeToString([]byte(pContents))
|
||||
h := sha256.New()
|
||||
h.Write([]byte(encodedContents))
|
||||
contentsHash := h.Sum(nil)
|
||||
hexContents := hex.EncodeToString(contentsHash)
|
||||
|
||||
if pUpdateTrigger != sUpdateTrigger {
|
||||
tflog.Debug(
|
||||
ctx,
|
||||
fmt.Sprintf("Update trigger has changed from %s to %s, updating snapshot to contents and id.", sUpdateTrigger, pUpdateTrigger),
|
||||
)
|
||||
plan.Snapshot = types.StringValue(pContents)
|
||||
plan.Id = types.StringValue(hexContents)
|
||||
} else {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update trigger hasn't changed, keeping previous snapshot (%s) and id (%s).", sSnapshot, sId))
|
||||
plan.Snapshot = types.StringValue(sSnapshot)
|
||||
plan.Id = types.StringValue(sId)
|
||||
}
|
||||
tflog.Debug(ctx, fmt.Sprintf("Setting state to this plan: \n%+v", &plan))
|
||||
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
|
||||
tflog.Debug(ctx, fmt.Sprintf("Update Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
|
||||
tflog.Debug(ctx, fmt.Sprintf("Delete Request Object: %+v", req))
|
||||
|
||||
// delete is a no-op
|
||||
|
||||
tflog.Debug(ctx, fmt.Sprintf("Delete Response Object: %+v", *resp))
|
||||
}
|
||||
|
||||
func (r *SnapshotResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
|
||||
resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
package compressed
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gruntwork-io/terratest/modules/terraform"
|
||||
util "github.com/rancher/terraform-provider-file/test"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSnapshotCompressed(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
id := util.GetId()
|
||||
directory := "snapshot_compressed"
|
||||
repoRoot, err := util.GetRepoRoot(t)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting git root directory: %v", err)
|
||||
}
|
||||
exampleDir := filepath.Join(repoRoot, "examples", "use-cases", directory)
|
||||
testDir := filepath.Join(repoRoot, "test", "data", id)
|
||||
|
||||
err = util.Setup(t, id, "test/data")
|
||||
if err != nil {
|
||||
t.Log("Test failed, tearing down...")
|
||||
util.TearDown(t, testDir, &terraform.Options{})
|
||||
t.Fatalf("Error creating test data directories: %s", err)
|
||||
}
|
||||
statePath := filepath.Join(testDir, "tfstate")
|
||||
terraformOptions := terraform.WithDefaultRetryableErrors(t, &terraform.Options{
|
||||
TerraformDir: exampleDir,
|
||||
Vars: map[string]interface{}{
|
||||
"directory": testDir,
|
||||
"name": "basic_test.txt",
|
||||
},
|
||||
BackendConfig: map[string]interface{}{
|
||||
"path": statePath,
|
||||
},
|
||||
EnvVars: map[string]string{
|
||||
"TF_DATA_DIR": testDir,
|
||||
"TF_CLI_CONFIG_FILE": filepath.Join(repoRoot, "test", ".terraformrc"),
|
||||
"TF_IN_AUTOMATION": "1",
|
||||
"TF_CLI_ARGS_init": "-no-color",
|
||||
"TF_CLI_ARGS_plan": "-no-color",
|
||||
"TF_CLI_ARGS_apply": "-no-color",
|
||||
"TF_CLI_ARGS_destroy": "-no-color",
|
||||
"TF_CLI_ARGS_output": "-no-color",
|
||||
},
|
||||
RetryableTerraformErrors: util.GetRetryableTerraformErrors(),
|
||||
NoColor: true,
|
||||
Upgrade: true,
|
||||
// ExtraArgs: terraform.ExtraArgs{ Output: []string{"-json"} },
|
||||
})
|
||||
|
||||
_, err = terraform.InitAndApplyE(t, terraformOptions)
|
||||
if err != nil {
|
||||
t.Log("Test failed, tearing down...")
|
||||
util.TearDown(t, testDir, terraformOptions)
|
||||
t.Fatalf("Error creating file: %s", err)
|
||||
}
|
||||
outputs, err := terraform.OutputAllE(t, terraformOptions)
|
||||
if err != nil {
|
||||
t.Log("Output failed, moving along...")
|
||||
}
|
||||
|
||||
pesky_id := outputs["pesky_id"]
|
||||
snapshot := outputs["snapshot"]
|
||||
a := assert.New(t)
|
||||
a.Equal(pesky_id, snapshot, "On the first run the snapshot will match the id.")
|
||||
|
||||
_, err = terraform.InitAndApplyE(t, terraformOptions)
|
||||
if err != nil {
|
||||
t.Log("Test failed, tearing down...")
|
||||
util.TearDown(t, testDir, terraformOptions)
|
||||
t.Fatalf("Error creating file: %s", err)
|
||||
}
|
||||
outputs, err = terraform.OutputAllE(t, terraformOptions)
|
||||
if err != nil {
|
||||
t.Log("Output failed, moving along...")
|
||||
}
|
||||
|
||||
pesky_id = outputs["pesky_id"]
|
||||
snapshot = outputs["snapshot"]
|
||||
a.NotEqual(pesky_id, snapshot, "On subsequent runs the id will change, but the snapshot won't.")
|
||||
|
||||
if t.Failed() {
|
||||
t.Log("Test failed...")
|
||||
} else {
|
||||
t.Log("Test passed...")
|
||||
}
|
||||
t.Log("Test complete, tearing down...")
|
||||
util.TearDown(t, testDir, terraformOptions)
|
||||
}
|
||||
Loading…
Reference in New Issue