mirror of https://github.com/kubernetes/kops.git
Merge pull request #14680 from johngmyers/literal-refactor
Refactor terraformWriter.Literal
This commit is contained in:
commit
545931f9b7
|
|
@ -1,11 +1,11 @@
|
||||||
locals {
|
locals {
|
||||||
cluster_name = "complex.example.com"
|
cluster_name = "complex.example.com"
|
||||||
master_autoscaling_group_ids = [aws_autoscaling_group.master-us-test-1a-masters-complex-example-com.id]
|
master_autoscaling_group_ids = [aws_autoscaling_group.master-us-test-1a-masters-complex-example-com.id]
|
||||||
master_security_group_ids = [aws_security_group.masters-complex-example-com.id, "sg-exampleid5", "sg-exampleid6"]
|
master_security_group_ids = ["sg-exampleid5", "sg-exampleid6", aws_security_group.masters-complex-example-com.id]
|
||||||
masters_role_arn = aws_iam_role.masters-complex-example-com.arn
|
masters_role_arn = aws_iam_role.masters-complex-example-com.arn
|
||||||
masters_role_name = aws_iam_role.masters-complex-example-com.name
|
masters_role_name = aws_iam_role.masters-complex-example-com.name
|
||||||
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-complex-example-com.id]
|
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-complex-example-com.id]
|
||||||
node_security_group_ids = [aws_security_group.nodes-complex-example-com.id, "sg-exampleid3", "sg-exampleid4"]
|
node_security_group_ids = ["sg-exampleid3", "sg-exampleid4", aws_security_group.nodes-complex-example-com.id]
|
||||||
node_subnet_ids = [aws_subnet.us-test-1a-complex-example-com.id]
|
node_subnet_ids = [aws_subnet.us-test-1a-complex-example-com.id]
|
||||||
nodes_role_arn = aws_iam_role.nodes-complex-example-com.arn
|
nodes_role_arn = aws_iam_role.nodes-complex-example-com.arn
|
||||||
nodes_role_name = aws_iam_role.nodes-complex-example-com.name
|
nodes_role_name = aws_iam_role.nodes-complex-example-com.name
|
||||||
|
|
@ -30,7 +30,7 @@ output "master_autoscaling_group_ids" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "master_security_group_ids" {
|
output "master_security_group_ids" {
|
||||||
value = [aws_security_group.masters-complex-example-com.id, "sg-exampleid5", "sg-exampleid6"]
|
value = ["sg-exampleid5", "sg-exampleid6", aws_security_group.masters-complex-example-com.id]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "masters_role_arn" {
|
output "masters_role_arn" {
|
||||||
|
|
@ -46,7 +46,7 @@ output "node_autoscaling_group_ids" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "node_security_group_ids" {
|
output "node_security_group_ids" {
|
||||||
value = [aws_security_group.nodes-complex-example-com.id, "sg-exampleid3", "sg-exampleid4"]
|
value = ["sg-exampleid3", "sg-exampleid4", aws_security_group.nodes-complex-example-com.id]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "node_subnet_ids" {
|
output "node_subnet_ids" {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
locals {
|
locals {
|
||||||
cluster_name = "existingsg.example.com"
|
cluster_name = "existingsg.example.com"
|
||||||
master_autoscaling_group_ids = [aws_autoscaling_group.master-us-test-1a-masters-existingsg-example-com.id, aws_autoscaling_group.master-us-test-1b-masters-existingsg-example-com.id, aws_autoscaling_group.master-us-test-1c-masters-existingsg-example-com.id]
|
master_autoscaling_group_ids = [aws_autoscaling_group.master-us-test-1a-masters-existingsg-example-com.id, aws_autoscaling_group.master-us-test-1b-masters-existingsg-example-com.id, aws_autoscaling_group.master-us-test-1c-masters-existingsg-example-com.id]
|
||||||
master_security_group_ids = [aws_security_group.masters-existingsg-example-com.id, "sg-master-1a", "sg-master-1b"]
|
master_security_group_ids = ["sg-master-1a", "sg-master-1b", aws_security_group.masters-existingsg-example-com.id]
|
||||||
masters_role_arn = aws_iam_role.masters-existingsg-example-com.arn
|
masters_role_arn = aws_iam_role.masters-existingsg-example-com.arn
|
||||||
masters_role_name = aws_iam_role.masters-existingsg-example-com.name
|
masters_role_name = aws_iam_role.masters-existingsg-example-com.name
|
||||||
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-existingsg-example-com.id]
|
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-existingsg-example-com.id]
|
||||||
|
|
@ -29,7 +29,7 @@ output "master_autoscaling_group_ids" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "master_security_group_ids" {
|
output "master_security_group_ids" {
|
||||||
value = [aws_security_group.masters-existingsg-example-com.id, "sg-master-1a", "sg-master-1b"]
|
value = ["sg-master-1a", "sg-master-1b", aws_security_group.masters-existingsg-example-com.id]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "masters_role_arn" {
|
output "masters_role_arn" {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ locals {
|
||||||
masters_role_arn = aws_iam_role.masters-externalpolicies-example-com.arn
|
masters_role_arn = aws_iam_role.masters-externalpolicies-example-com.arn
|
||||||
masters_role_name = aws_iam_role.masters-externalpolicies-example-com.name
|
masters_role_name = aws_iam_role.masters-externalpolicies-example-com.name
|
||||||
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-externalpolicies-example-com.id]
|
node_autoscaling_group_ids = [aws_autoscaling_group.nodes-externalpolicies-example-com.id]
|
||||||
node_security_group_ids = [aws_security_group.nodes-externalpolicies-example-com.id, "sg-exampleid3", "sg-exampleid4"]
|
node_security_group_ids = ["sg-exampleid3", "sg-exampleid4", aws_security_group.nodes-externalpolicies-example-com.id]
|
||||||
node_subnet_ids = [aws_subnet.us-test-1a-externalpolicies-example-com.id]
|
node_subnet_ids = [aws_subnet.us-test-1a-externalpolicies-example-com.id]
|
||||||
nodes_role_arn = aws_iam_role.nodes-externalpolicies-example-com.arn
|
nodes_role_arn = aws_iam_role.nodes-externalpolicies-example-com.arn
|
||||||
nodes_role_name = aws_iam_role.nodes-externalpolicies-example-com.name
|
nodes_role_name = aws_iam_role.nodes-externalpolicies-example-com.name
|
||||||
|
|
@ -43,7 +43,7 @@ output "node_autoscaling_group_ids" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "node_security_group_ids" {
|
output "node_security_group_ids" {
|
||||||
value = [aws_security_group.nodes-externalpolicies-example-com.id, "sg-exampleid3", "sg-exampleid4"]
|
value = ["sg-exampleid3", "sg-exampleid4", aws_security_group.nodes-externalpolicies-example-com.id]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "node_subnet_ids" {
|
output "node_subnet_ids" {
|
||||||
|
|
@ -294,7 +294,7 @@ resource "aws_elb" "api-externalpolicies-example-com" {
|
||||||
lb_protocol = "TCP"
|
lb_protocol = "TCP"
|
||||||
}
|
}
|
||||||
name = "api-externalpolicies-exam-5cse45"
|
name = "api-externalpolicies-exam-5cse45"
|
||||||
security_groups = [aws_security_group.api-elb-externalpolicies-example-com.id, "sg-exampleid3", "sg-exampleid4"]
|
security_groups = ["sg-exampleid3", "sg-exampleid4", aws_security_group.api-elb-externalpolicies-example-com.id]
|
||||||
subnets = [aws_subnet.us-test-1a-externalpolicies-example-com.id]
|
subnets = [aws_subnet.us-test-1a-externalpolicies-example-com.id]
|
||||||
tags = {
|
tags = {
|
||||||
"KubernetesCluster" = "externalpolicies.example.com"
|
"KubernetesCluster" = "externalpolicies.example.com"
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
|
|
||||||
"github.com/hashicorp/hcl/v2"
|
|
||||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||||
"github.com/hashicorp/hcl/v2/hclwrite"
|
"github.com/hashicorp/hcl/v2/hclwrite"
|
||||||
"github.com/zclconf/go-cty/cty"
|
"github.com/zclconf/go-cty/cty"
|
||||||
|
|
@ -102,61 +101,12 @@ func writeValue(body *hclwrite.Body, key string, value cty.Value) {
|
||||||
// key = res_type.res_name.res_prop
|
// key = res_type.res_name.res_prop
|
||||||
// key = file("${module.path}/foo")
|
// key = file("${module.path}/foo")
|
||||||
func writeLiteral(body *hclwrite.Body, key string, literal *terraformWriter.Literal) {
|
func writeLiteral(body *hclwrite.Body, key string, literal *terraformWriter.Literal) {
|
||||||
if literal.String != "" {
|
body.SetAttributeRaw(key, hclwrite.Tokens{
|
||||||
body.SetAttributeRaw(key, hclwrite.Tokens{
|
{
|
||||||
{
|
Type: hclsyntax.TokenIdent,
|
||||||
Type: hclsyntax.TokenIdent,
|
Bytes: []byte(literal.String),
|
||||||
Bytes: []byte(literal.String),
|
},
|
||||||
},
|
})
|
||||||
})
|
|
||||||
} else if literal.Index {
|
|
||||||
tokens := hclwrite.Tokens{
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenOQuote,
|
|
||||||
Bytes: []byte(`"`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenQuotedLit,
|
|
||||||
Bytes: []byte(literal.Value),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenQuotedLit,
|
|
||||||
Bytes: []byte(`-`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenTemplateInterp,
|
|
||||||
Bytes: []byte(`${`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenQuotedLit,
|
|
||||||
Bytes: []byte(`count.index`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenTemplateSeqEnd,
|
|
||||||
Bytes: []byte(`}`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenCQuote,
|
|
||||||
Bytes: []byte(`"`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: hclsyntax.TokenEOF,
|
|
||||||
Bytes: []byte{},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
body.SetAttributeRaw(key, tokens)
|
|
||||||
} else if len(literal.Tokens) == 0 {
|
|
||||||
body.SetAttributeValue(key, cty.StringVal(literal.Value))
|
|
||||||
} else {
|
|
||||||
traversal := hcl.Traversal{
|
|
||||||
hcl.TraverseRoot{Name: literal.Tokens[0]},
|
|
||||||
}
|
|
||||||
for i := 1; i < len(literal.Tokens); i++ {
|
|
||||||
token := literal.Tokens[i]
|
|
||||||
traversal = append(traversal, hcl.TraverseAttr{Name: token})
|
|
||||||
}
|
|
||||||
body.SetAttributeTraversal(key, traversal)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// literalListTokens returns the tokens of a list of literals
|
// literalListTokens returns the tokens of a list of literals
|
||||||
|
|
@ -167,22 +117,7 @@ func literalListTokens(literals []*terraformWriter.Literal) hclwrite.Tokens {
|
||||||
{Type: hclsyntax.TokenOBrack, Bytes: []byte("["), SpacesBefore: 1},
|
{Type: hclsyntax.TokenOBrack, Bytes: []byte("["), SpacesBefore: 1},
|
||||||
}
|
}
|
||||||
for i, literal := range literals {
|
for i, literal := range literals {
|
||||||
if len(literal.Tokens) == 0 {
|
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenIdent, Bytes: []byte(literal.String)})
|
||||||
tokens = append(tokens, []*hclwrite.Token{
|
|
||||||
{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}, SpacesBefore: 1},
|
|
||||||
{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(literal.Value)},
|
|
||||||
{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}, SpacesBefore: 1},
|
|
||||||
}...)
|
|
||||||
} else {
|
|
||||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenStringLit, Bytes: []byte(literal.Tokens[0]), SpacesBefore: 1})
|
|
||||||
for i := 1; i < len(literal.Tokens); i++ {
|
|
||||||
token := literal.Tokens[i]
|
|
||||||
tokens = append(tokens, []*hclwrite.Token{
|
|
||||||
{Type: hclsyntax.TokenDot, Bytes: []byte(".")},
|
|
||||||
{Type: hclsyntax.TokenStringLit, Bytes: []byte(token)},
|
|
||||||
}...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if i < len(literals)-1 {
|
if i < len(literals)-1 {
|
||||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenComma, Bytes: []byte(",")})
|
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenComma, Bytes: []byte(",")})
|
||||||
}
|
}
|
||||||
|
|
@ -239,18 +174,10 @@ func writeMap(body *hclwrite.Body, key string, values map[string]cty.Value) {
|
||||||
errLiteralSlice := gocty.FromCtyValue(v, refLiteralSlice.Interface())
|
errLiteralSlice := gocty.FromCtyValue(v, refLiteralSlice.Interface())
|
||||||
// If this is a map of literals then do not surround the value with quotes
|
// If this is a map of literals then do not surround the value with quotes
|
||||||
if literal, ok := refLiteral.Interface().(*terraformWriter.Literal); errLiteral == nil && ok {
|
if literal, ok := refLiteral.Interface().(*terraformWriter.Literal); errLiteral == nil && ok {
|
||||||
if literal.String != "" {
|
tokens = append(tokens, &hclwrite.Token{
|
||||||
tokens = append(tokens, &hclwrite.Token{
|
Type: hclsyntax.TokenIdent,
|
||||||
Type: hclsyntax.TokenIdent,
|
Bytes: []byte(literal.String),
|
||||||
Bytes: []byte(literal.String),
|
})
|
||||||
})
|
|
||||||
} else if literal.Value != "" {
|
|
||||||
tokens = append(tokens, []*hclwrite.Token{
|
|
||||||
{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}, SpacesBefore: 1},
|
|
||||||
{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(literal.Value)},
|
|
||||||
{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}, SpacesBefore: 1},
|
|
||||||
}...)
|
|
||||||
}
|
|
||||||
} else if literals, ok := refLiteralSlice.Interface().(*[]*terraformWriter.Literal); errLiteralSlice == nil && ok {
|
} else if literals, ok := refLiteralSlice.Interface().(*[]*terraformWriter.Literal); errLiteralSlice == nil && ok {
|
||||||
tokens = append(tokens, literalListTokens(*literals)...)
|
tokens = append(tokens, literalListTokens(*literals)...)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -167,7 +167,7 @@ func TestWriteLiteralList(t *testing.T) {
|
||||||
name: "one literal",
|
name: "one literal",
|
||||||
literals: []*terraformWriter.Literal{
|
literals: []*terraformWriter.Literal{
|
||||||
{
|
{
|
||||||
Tokens: []string{"type", "name", "prop"},
|
String: "type.name.prop",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: "foo = [type.name.prop]",
|
expected: "foo = [type.name.prop]",
|
||||||
|
|
@ -176,10 +176,10 @@ func TestWriteLiteralList(t *testing.T) {
|
||||||
name: "two literals",
|
name: "two literals",
|
||||||
literals: []*terraformWriter.Literal{
|
literals: []*terraformWriter.Literal{
|
||||||
{
|
{
|
||||||
Tokens: []string{"type1", "name1", "prop1"},
|
String: "type1.name1.prop1",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Tokens: []string{"type2", "name2", "prop2"},
|
String: "type2.name2.prop2",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: "foo = [type1.name1.prop1, type2.name2.prop2]",
|
expected: "foo = [type1.name1.prop1, type2.name2.prop2]",
|
||||||
|
|
@ -188,10 +188,10 @@ func TestWriteLiteralList(t *testing.T) {
|
||||||
name: "one traversal literal, one string literal",
|
name: "one traversal literal, one string literal",
|
||||||
literals: []*terraformWriter.Literal{
|
literals: []*terraformWriter.Literal{
|
||||||
{
|
{
|
||||||
Tokens: []string{"type", "name", "prop"},
|
String: "type.name.prop",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Value: "foobar",
|
String: "\"foobar\"",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: `foo = [type.name.prop, "foobar"]`,
|
expected: `foo = [type.name.prop, "foobar"]`,
|
||||||
|
|
|
||||||
|
|
@ -21,39 +21,23 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"k8s.io/klog/v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Literal represents a literal in terraform syntax
|
// Literal represents a literal in terraform syntax
|
||||||
type Literal struct {
|
type Literal struct {
|
||||||
// String is the Terraform representation.
|
// String is the Terraform representation.
|
||||||
String string `cty:"string"`
|
String string `cty:"string"`
|
||||||
// Value is used to support Terraform's "${}" interpolation.
|
|
||||||
Value string `cty:"value"`
|
|
||||||
// Index to support the index of the count meta-argument.
|
|
||||||
Index bool `cty:"index"`
|
|
||||||
|
|
||||||
// Tokens are portions of a literal reference joined by periods.
|
|
||||||
// example: {"aws_vpc", "foo", "id"}
|
|
||||||
Tokens []string `cty:"tokens"`
|
|
||||||
|
|
||||||
// FnArgs contains string representations of arguments to the function call.
|
|
||||||
// Any string arguments must be quoted.
|
|
||||||
FnArgs []string `cty:"fn_arg"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ json.Marshaler = &Literal{}
|
var _ json.Marshaler = &Literal{}
|
||||||
|
|
||||||
func (l *Literal) MarshalJSON() ([]byte, error) {
|
func (l *Literal) MarshalJSON() ([]byte, error) {
|
||||||
return json.Marshal(&l.Value)
|
return json.Marshal(&l.String)
|
||||||
}
|
}
|
||||||
|
|
||||||
func LiteralFunctionExpression(functionName string, args ...string) *Literal {
|
func LiteralFunctionExpression(functionName string, args ...string) *Literal {
|
||||||
return &Literal{
|
return &Literal{
|
||||||
String: fmt.Sprintf("%v(%v)", functionName, strings.Join(args, ", ")),
|
String: fmt.Sprintf("%v(%v)", functionName, strings.Join(args, ", ")),
|
||||||
Value: fmt.Sprintf("${%v(%v)}", functionName, strings.Join(args, ", ")),
|
|
||||||
FnArgs: args,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -63,81 +47,41 @@ func LiteralSelfLink(resourceType, resourceName string) *Literal {
|
||||||
|
|
||||||
func LiteralData(dataSourceType, dataSourceName, prop string) *Literal {
|
func LiteralData(dataSourceType, dataSourceName, prop string) *Literal {
|
||||||
tfName := sanitizeName(dataSourceName)
|
tfName := sanitizeName(dataSourceName)
|
||||||
expr := "${data." + dataSourceType + "." + tfName + "." + prop + "}"
|
|
||||||
return &Literal{
|
return &Literal{
|
||||||
Value: expr,
|
String: "data." + dataSourceType + "." + tfName + "." + prop + "",
|
||||||
Tokens: []string{"data", dataSourceType, tfName, prop},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func LiteralProperty(resourceType, resourceName, prop string) *Literal {
|
func LiteralProperty(resourceType, resourceName, prop string) *Literal {
|
||||||
tfName := sanitizeName(resourceName)
|
tfName := sanitizeName(resourceName)
|
||||||
expr := "${" + resourceType + "." + tfName + "." + prop + "}"
|
|
||||||
return &Literal{
|
return &Literal{
|
||||||
Value: expr,
|
String: resourceType + "." + tfName + "." + prop,
|
||||||
Tokens: []string{resourceType, tfName, prop},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func LiteralTokens(tokens ...string) *Literal {
|
func LiteralTokens(tokens ...string) *Literal {
|
||||||
expr := "${" + strings.Join(tokens, ".") + "}"
|
|
||||||
return &Literal{
|
return &Literal{
|
||||||
Value: expr,
|
String: strings.Join(tokens, "."),
|
||||||
Tokens: tokens,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func LiteralFromStringValue(s string) *Literal {
|
func LiteralFromStringValue(s string) *Literal {
|
||||||
return &Literal{Value: s}
|
return &Literal{
|
||||||
|
String: "\"" + s + "\"",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func LiteralWithIndex(s string) *Literal {
|
func LiteralWithIndex(s string) *Literal {
|
||||||
return &Literal{Value: s, Index: true}
|
return &Literal{
|
||||||
}
|
String: fmt.Sprintf("\"%s-${count.index}\"", s),
|
||||||
|
|
||||||
type literalWithJSON struct {
|
|
||||||
literal *Literal
|
|
||||||
key string
|
|
||||||
}
|
|
||||||
|
|
||||||
type byKey []*literalWithJSON
|
|
||||||
|
|
||||||
func (a byKey) Len() int { return len(a) }
|
|
||||||
func (a byKey) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a byKey) Less(i, j int) bool {
|
|
||||||
return a[i].key < a[j].key
|
|
||||||
}
|
|
||||||
|
|
||||||
// buildSortProxies maps a list of Literals to a list of literalWithJSON
|
|
||||||
func buildSortProxies(v []*Literal) ([]*literalWithJSON, error) {
|
|
||||||
var proxies []*literalWithJSON
|
|
||||||
for _, l := range v {
|
|
||||||
k, err := json.Marshal(l)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
proxies = append(proxies, &literalWithJSON{
|
|
||||||
literal: l,
|
|
||||||
key: string(k),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return proxies, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// SortLiterals sorts a list of Literal, by key. It does so in-place
|
// SortLiterals sorts a list of Literal, by key. It does so in-place
|
||||||
func SortLiterals(v []*Literal) {
|
func SortLiterals(v []*Literal) {
|
||||||
proxies, err := buildSortProxies(v)
|
sort.Slice(v, func(i, j int) bool {
|
||||||
if err != nil {
|
return v[i].String < v[j].String
|
||||||
// Very unexpected
|
})
|
||||||
klog.Fatalf("error processing terraform Literal: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byKey(proxies))
|
|
||||||
|
|
||||||
for i := range proxies {
|
|
||||||
v[i] = proxies[i].literal
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// dedupLiterals removes any duplicate Literals before returning the slice.
|
// dedupLiterals removes any duplicate Literals before returning the slice.
|
||||||
|
|
@ -147,19 +91,14 @@ func dedupLiterals(v []*Literal) ([]*Literal, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
proxies, err := buildSortProxies(v)
|
SortLiterals(v)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byKey(proxies))
|
|
||||||
|
|
||||||
var deduped []*Literal
|
var deduped []*Literal
|
||||||
for i, p := range proxies {
|
for i, p := range v {
|
||||||
if i != 0 && proxies[i-1].key == proxies[i].key {
|
if i != 0 && v[i-1].String == v[i].String {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
deduped = append(deduped, p.literal)
|
deduped = append(deduped, p)
|
||||||
}
|
}
|
||||||
|
|
||||||
return deduped, nil
|
return deduped, nil
|
||||||
|
|
|
||||||
|
|
@ -75,6 +75,20 @@ func (t *TerraformWriter) InitTerraformWriter() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TerraformWriter) AddFileBytes(resourceType string, resourceName string, key string, data []byte, base64 bool) (*Literal, error) {
|
func (t *TerraformWriter) AddFileBytes(resourceType string, resourceName string, key string, data []byte, base64 bool) (*Literal, error) {
|
||||||
|
|
||||||
|
path, err := t.AddFilePath(resourceType, resourceName, key, data, base64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fn := "file"
|
||||||
|
if base64 {
|
||||||
|
fn = "filebase64"
|
||||||
|
}
|
||||||
|
return LiteralFunctionExpression(fn, path.String), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TerraformWriter) AddFilePath(resourceType string, resourceName string, key string, data []byte, base64 bool) (*Literal, error) {
|
||||||
id := resourceType + "_" + resourceName + "_" + key
|
id := resourceType + "_" + resourceName + "_" + key
|
||||||
|
|
||||||
t.mutex.Lock()
|
t.mutex.Lock()
|
||||||
|
|
@ -84,11 +98,8 @@ func (t *TerraformWriter) AddFileBytes(resourceType string, resourceName string,
|
||||||
t.Files[p] = data
|
t.Files[p] = data
|
||||||
|
|
||||||
modulePath := fmt.Sprintf("%q", path.Join("${path.module}", p))
|
modulePath := fmt.Sprintf("%q", path.Join("${path.module}", p))
|
||||||
fn := "file"
|
|
||||||
if base64 {
|
return LiteralTokens(modulePath), nil
|
||||||
fn = "filebase64"
|
|
||||||
}
|
|
||||||
return LiteralFunctionExpression(fn, modulePath), nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TerraformWriter) RenderDataSource(dataType string, dataName string, e interface{}) error {
|
func (t *TerraformWriter) RenderDataSource(dataType string, dataName string, e interface{}) error {
|
||||||
|
|
|
||||||
|
|
@ -389,7 +389,7 @@ func (p *GSPath) Hash(a hashing.HashAlgorithm) (*hashing.Hash, error) {
|
||||||
type terraformGSObject struct {
|
type terraformGSObject struct {
|
||||||
Bucket string `json:"bucket" cty:"bucket"`
|
Bucket string `json:"bucket" cty:"bucket"`
|
||||||
Name string `json:"name" cty:"name"`
|
Name string `json:"name" cty:"name"`
|
||||||
Source string `json:"source" cty:"source"`
|
Source *terraformWriter.Literal `json:"source" cty:"source"`
|
||||||
Provider *terraformWriter.Literal `json:"provider,omitempty" cty:"provider"`
|
Provider *terraformWriter.Literal `json:"provider,omitempty" cty:"provider"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -406,7 +406,7 @@ func (p *GSPath) RenderTerraform(w *terraformWriter.TerraformWriter, name string
|
||||||
return fmt.Errorf("reading data: %v", err)
|
return fmt.Errorf("reading data: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
content, err := w.AddFileBytes("google_storage_bucket_object", name, "content", bytes, false)
|
content, err := w.AddFilePath("google_storage_bucket_object", name, "content", bytes, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("rendering GCS file: %v", err)
|
return fmt.Errorf("rendering GCS file: %v", err)
|
||||||
}
|
}
|
||||||
|
|
@ -414,7 +414,7 @@ func (p *GSPath) RenderTerraform(w *terraformWriter.TerraformWriter, name string
|
||||||
tf := &terraformGSObject{
|
tf := &terraformGSObject{
|
||||||
Bucket: p.Bucket(),
|
Bucket: p.Bucket(),
|
||||||
Name: p.Object(),
|
Name: p.Object(),
|
||||||
Source: content.FnArgs[0],
|
Source: content,
|
||||||
Provider: terraformWriter.LiteralTokens("google", "files"),
|
Provider: terraformWriter.LiteralTokens("google", "files"),
|
||||||
}
|
}
|
||||||
err = w.RenderResource("google_storage_bucket_object", name, tf)
|
err = w.RenderResource("google_storage_bucket_object", name, tf)
|
||||||
|
|
|
||||||
|
|
@ -56,15 +56,15 @@ func TestGSRenderTerraform(t *testing.T) {
|
||||||
{
|
{
|
||||||
"bucket": "foo",
|
"bucket": "foo",
|
||||||
"name": "bar",
|
"name": "bar",
|
||||||
"provider": "${google.files}",
|
"provider": "google.files",
|
||||||
"source": "\"${path.module}/data/google_storage_bucket_object_bar_content\""
|
"source": "\"${path.module}/data/google_storage_bucket_object_bar_content\""
|
||||||
}
|
}
|
||||||
`,
|
`,
|
||||||
expectedACLJSON: `
|
expectedACLJSON: `
|
||||||
{
|
{
|
||||||
"bucket": "foo",
|
"bucket": "foo",
|
||||||
"object": "${google_storage_bucket_object.bar.output_name}",
|
"object": "google_storage_bucket_object.bar.output_name",
|
||||||
"provider": "${google.files}",
|
"provider": "google.files",
|
||||||
"role_entity": [
|
"role_entity": [
|
||||||
"READER:user-foo-123@project.iam.gserviceaccount.com"
|
"READER:user-foo-123@project.iam.gserviceaccount.com"
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,9 @@ func TestS3RenderTerraform(t *testing.T) {
|
||||||
{
|
{
|
||||||
"acl": "bucket-owner-full-control",
|
"acl": "bucket-owner-full-control",
|
||||||
"bucket": "foo",
|
"bucket": "foo",
|
||||||
"content": "${file(\"${path.module}/data/aws_s3_object_bar_content\")}",
|
"content": "file(\"${path.module}/data/aws_s3_object_bar_content\")",
|
||||||
"key": "bar",
|
"key": "bar",
|
||||||
"provider": "${aws.files}",
|
"provider": "aws.files",
|
||||||
"server_side_encryption": "AES256"
|
"server_side_encryption": "AES256"
|
||||||
}
|
}
|
||||||
`,
|
`,
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue