vendor: Add github.com/hashicorp/terraform-config-inspect

This commit is contained in:
Martin Atkins 2019-01-08 17:42:48 -08:00
parent e27e0ddc9e
commit 047239e68c
25 changed files with 1756 additions and 138 deletions

3
go.mod
View File

@ -71,12 +71,13 @@ require (
github.com/hashicorp/go-uuid v1.0.0
github.com/hashicorp/go-version v1.0.0
github.com/hashicorp/golang-lru v0.5.0 // indirect
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f
github.com/hashicorp/hcl v1.0.0
github.com/hashicorp/hcl2 v0.0.0-20181220012050-6631d7cd0a68
github.com/hashicorp/hil v0.0.0-20170627220502-fa9f258a9250
github.com/hashicorp/logutils v0.0.0-20150609070431-0dc08b1671f3
github.com/hashicorp/memberlist v0.1.0 // indirect
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb // indirect
github.com/hashicorp/terraform-config-inspect v0.0.0-20181213005350-314d8affa1db
github.com/hashicorp/vault v0.0.0-20161029210149-9a60bf2a50e4
github.com/jen20/awspolicyequivalence v0.0.0-20170831201602-3d48364a137a // indirect
github.com/jonboulle/clockwork v0.1.0 // indirect

5
go.sum
View File

@ -9,6 +9,7 @@ github.com/Azure/go-ntlmssp v0.0.0-20170803034930-c92175d54006 h1:dVyNL14dq1500J
github.com/Azure/go-ntlmssp v0.0.0-20170803034930-c92175d54006/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290 h1:K9I21XUHNbYD3GNMmJBN0UKJCpdP+glftwNZ7Bo8kqY=
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4=
github.com/DHowett/go-plist v0.0.0-20180609054337-500bd5b9081b/go.mod h1:5paT5ZDrOm8eAJPem2Bd+q3FTi3Gxm/U4tb2tH8YIUQ=
github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292 h1:tuQ7w+my8a8mkwN7x2TSd7OzTjkZ7rAeSyH4xncuAMI=
github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292/go.mod h1:KYCjqMOeHpNuTOiFQU6WEcTG7poCJrUs0YgyHNtn1no=
github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af h1:DBNMBMuMiWYu0b+8KMJuWmfCkcxl09JwdlqwDZZ6U14=
@ -159,6 +160,8 @@ github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCO
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f h1:UdxlrJz4JOnY8W+DbLISwf2B8WXEolNRA8BGCwI9jws=
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl2 v0.0.0-20181208003705-670926858200 h1:F/nGtDwtQsuw7ZHmiLpHsPWNljDC24kiSHSGUnou9sw=
github.com/hashicorp/hcl2 v0.0.0-20181208003705-670926858200/go.mod h1:ShfpTh661oAaxo7VcNxg0zcZW6jvMa7Moy2oFx7e5dE=
github.com/hashicorp/hcl2 v0.0.0-20181214224644-4c4fdbdcc016 h1:rWpgzURetj/et0J6RoRw1+CBaiyrAQamwNCuYrHayU0=
@ -181,6 +184,8 @@ github.com/hashicorp/memberlist v0.1.0 h1:qSsCiC0WYD39lbSitKNt40e30uorm2Ss/d4JGU
github.com/hashicorp/memberlist v0.1.0/go.mod h1:ncdBp14cuox2iFOq3kDiquKU6fqsTBc3W6JvZwjxxsE=
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb h1:ZbgmOQt8DOg796figP87/EFCVx2v2h9yRvwHF/zceX4=
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE=
github.com/hashicorp/terraform-config-inspect v0.0.0-20181213005350-314d8affa1db h1:AcZb6ClKGJoY9vDFvxw+t5s8EmtWP8fMerxP6j+veKc=
github.com/hashicorp/terraform-config-inspect v0.0.0-20181213005350-314d8affa1db/go.mod h1:2zR/i7/tO81bLaVoMEJlSLRJ36/TvPLapEmy682Zizo=
github.com/hashicorp/vault v0.0.0-20161029210149-9a60bf2a50e4 h1:SGDekHLK2IRoVS7Fb4olLyWvc2VmwKgyFC05j6X3NII=
github.com/hashicorp/vault v0.0.0-20161029210149-9a60bf2a50e4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M=

View File

@ -3,7 +3,8 @@ sudo: false
language: go
go:
- 1.8
- 1.x
- tip
branches:
only:

View File

@ -89,7 +89,7 @@ func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error
switch k.Kind() {
case reflect.Bool:
return d.decodeBool(name, node, result)
case reflect.Float64:
case reflect.Float32, reflect.Float64:
return d.decodeFloat(name, node, result)
case reflect.Int, reflect.Int32, reflect.Int64:
return d.decodeInt(name, node, result)
@ -137,13 +137,13 @@ func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) e
func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error {
switch n := node.(type) {
case *ast.LiteralType:
if n.Token.Type == token.FLOAT {
if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER {
v, err := strconv.ParseFloat(n.Token.Text, 64)
if err != nil {
return err
}
result.Set(reflect.ValueOf(v))
result.Set(reflect.ValueOf(v).Convert(result.Type()))
return nil
}
}
@ -573,7 +573,11 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
// Compile the list of all the fields that we're going to be decoding
// from all the structs.
fields := make(map[*reflect.StructField]reflect.Value)
type field struct {
field reflect.StructField
val reflect.Value
}
fields := []field{}
for len(structs) > 0 {
structVal := structs[0]
structs = structs[1:]
@ -616,7 +620,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
}
// Normal struct field, store it away
fields[&fieldType] = structVal.Field(i)
fields = append(fields, field{fieldType, structVal.Field(i)})
}
}
@ -624,26 +628,27 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
decodedFields := make([]string, 0, len(fields))
decodedFieldsVal := make([]reflect.Value, 0)
unusedKeysVal := make([]reflect.Value, 0)
for fieldType, field := range fields {
if !field.IsValid() {
for _, f := range fields {
field, fieldValue := f.field, f.val
if !fieldValue.IsValid() {
// This should never happen
panic("field is not valid")
}
// If we can't set the field, then it is unexported or something,
// and we just continue onwards.
if !field.CanSet() {
if !fieldValue.CanSet() {
continue
}
fieldName := fieldType.Name
fieldName := field.Name
tagValue := fieldType.Tag.Get(tagName)
tagValue := field.Tag.Get(tagName)
tagParts := strings.SplitN(tagValue, ",", 2)
if len(tagParts) >= 2 {
switch tagParts[1] {
case "decodedFields":
decodedFieldsVal = append(decodedFieldsVal, field)
decodedFieldsVal = append(decodedFieldsVal, fieldValue)
continue
case "key":
if item == nil {
@ -654,10 +659,10 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
}
}
field.SetString(item.Keys[0].Token.Value().(string))
fieldValue.SetString(item.Keys[0].Token.Value().(string))
continue
case "unusedKeys":
unusedKeysVal = append(unusedKeysVal, field)
unusedKeysVal = append(unusedKeysVal, fieldValue)
continue
}
}
@ -684,7 +689,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
// because we actually want the value.
fieldName = fmt.Sprintf("%s.%s", name, fieldName)
if len(prefixMatches.Items) > 0 {
if err := d.decode(fieldName, prefixMatches, field); err != nil {
if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil {
return err
}
}
@ -694,12 +699,12 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
decodeNode = &ast.ObjectList{Items: ot.List.Items}
}
if err := d.decode(fieldName, decodeNode, field); err != nil {
if err := d.decode(fieldName, decodeNode, fieldValue); err != nil {
return err
}
}
decodedFields = append(decodedFields, fieldType.Name)
decodedFields = append(decodedFields, field.Name)
}
if len(decodedFieldsVal) > 0 {

3
vendor/github.com/hashicorp/hcl/go.mod generated vendored Normal file
View File

@ -0,0 +1,3 @@
module github.com/hashicorp/hcl
require github.com/davecgh/go-spew v1.1.1

2
vendor/github.com/hashicorp/hcl/go.sum generated vendored Normal file
View File

@ -0,0 +1,2 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@ -197,9 +197,18 @@ func (p *Parser) objectItem() (*ast.ObjectItem, error) {
keyStr = append(keyStr, k.Token.Text)
}
return nil, fmt.Errorf(
"key '%s' expected start of object ('{') or assignment ('=')",
strings.Join(keyStr, " "))
return nil, &PosError{
Pos: p.tok.Pos,
Err: fmt.Errorf(
"key '%s' expected start of object ('{') or assignment ('=')",
strings.Join(keyStr, " ")),
}
}
// key=#comment
// val
if p.lineComment != nil {
o.LineComment, p.lineComment = p.lineComment, nil
}
// do a look-ahead for line comment
@ -319,7 +328,10 @@ func (p *Parser) objectType() (*ast.ObjectType, error) {
// No error, scan and expect the ending to be a brace
if tok := p.scan(); tok.Type != token.RBRACE {
return nil, fmt.Errorf("object expected closing RBRACE got: %s", tok.Type)
return nil, &PosError{
Pos: tok.Pos,
Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type),
}
}
o.List = l

View File

@ -252,6 +252,14 @@ func (p *printer) objectItem(o *ast.ObjectItem) []byte {
}
}
// If key and val are on different lines, treat line comments like lead comments.
if o.LineComment != nil && o.Val.Pos().Line != o.Keys[0].Pos().Line {
for _, comment := range o.LineComment.List {
buf.WriteString(comment.Text)
buf.WriteByte(newline)
}
}
for i, k := range o.Keys {
buf.WriteString(k.Token.Text)
buf.WriteByte(blank)
@ -265,7 +273,7 @@ func (p *printer) objectItem(o *ast.ObjectItem) []byte {
buf.Write(p.output(o.Val))
if o.Val.Pos().Line == o.Keys[0].Pos().Line && o.LineComment != nil {
if o.LineComment != nil && o.Val.Pos().Line == o.Keys[0].Pos().Line {
buf.WriteByte(blank)
for _, comment := range o.LineComment.List {
buf.WriteString(comment.Text)
@ -509,8 +517,13 @@ func (p *printer) alignedItems(items []*ast.ObjectItem) []byte {
// list returns the printable HCL form of an list type.
func (p *printer) list(l *ast.ListType) []byte {
if p.isSingleLineList(l) {
return p.singleLineList(l)
}
var buf bytes.Buffer
buf.WriteString("[")
buf.WriteByte(newline)
var longestLine int
for _, item := range l.List {
@ -523,115 +536,112 @@ func (p *printer) list(l *ast.ListType) []byte {
}
}
insertSpaceBeforeItem := false
lastHadLeadComment := false
haveEmptyLine := false
for i, item := range l.List {
// Keep track of whether this item is a heredoc since that has
// unique behavior.
heredoc := false
// If we have a lead comment, then we want to write that first
leadComment := false
if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil {
leadComment = true
// Ensure an empty line before every element with a
// lead comment (except the first item in a list).
if !haveEmptyLine && i != 0 {
buf.WriteByte(newline)
}
for _, comment := range lit.LeadComment.List {
buf.Write(p.indent([]byte(comment.Text)))
buf.WriteByte(newline)
}
}
// also indent each line
val := p.output(item)
curLen := len(val)
buf.Write(p.indent(val))
// if this item is a heredoc, then we output the comma on
// the next line. This is the only case this happens.
comma := []byte{','}
if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC {
heredoc = true
}
if item.Pos().Line != l.Lbrack.Line {
// multiline list, add newline before we add each item
buf.WriteByte(newline)
insertSpaceBeforeItem = false
comma = p.indent(comma)
}
// If we have a lead comment, then we want to write that first
leadComment := false
if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil {
leadComment = true
buf.Write(comma)
// If this isn't the first item and the previous element
// didn't have a lead comment, then we need to add an extra
// newline to properly space things out. If it did have a
// lead comment previously then this would be done
// automatically.
if i > 0 && !lastHadLeadComment {
buf.WriteByte(newline)
}
for _, comment := range lit.LeadComment.List {
buf.Write(p.indent([]byte(comment.Text)))
buf.WriteByte(newline)
}
}
// also indent each line
val := p.output(item)
curLen := len(val)
buf.Write(p.indent(val))
// if this item is a heredoc, then we output the comma on
// the next line. This is the only case this happens.
comma := []byte{','}
if heredoc {
buf.WriteByte(newline)
comma = p.indent(comma)
}
buf.Write(comma)
if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil {
// if the next item doesn't have any comments, do not align
buf.WriteByte(blank) // align one space
for i := 0; i < longestLine-curLen; i++ {
buf.WriteByte(blank)
}
for _, comment := range lit.LineComment.List {
buf.WriteString(comment.Text)
}
}
lastItem := i == len(l.List)-1
if lastItem {
buf.WriteByte(newline)
}
if leadComment && !lastItem {
buf.WriteByte(newline)
}
lastHadLeadComment = leadComment
} else {
if insertSpaceBeforeItem {
if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil {
// if the next item doesn't have any comments, do not align
buf.WriteByte(blank) // align one space
for i := 0; i < longestLine-curLen; i++ {
buf.WriteByte(blank)
insertSpaceBeforeItem = false
}
// Output the item itself
// also indent each line
val := p.output(item)
curLen := len(val)
buf.Write(val)
// If this is a heredoc item we always have to output a newline
// so that it parses properly.
if heredoc {
buf.WriteByte(newline)
}
// If this isn't the last element, write a comma.
if i != len(l.List)-1 {
buf.WriteString(",")
insertSpaceBeforeItem = true
}
if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil {
// if the next item doesn't have any comments, do not align
buf.WriteByte(blank) // align one space
for i := 0; i < longestLine-curLen; i++ {
buf.WriteByte(blank)
}
for _, comment := range lit.LineComment.List {
buf.WriteString(comment.Text)
}
for _, comment := range lit.LineComment.List {
buf.WriteString(comment.Text)
}
}
buf.WriteByte(newline)
// Ensure an empty line after every element with a
// lead comment (except the first item in a list).
haveEmptyLine = leadComment && i != len(l.List)-1
if haveEmptyLine {
buf.WriteByte(newline)
}
}
buf.WriteString("]")
return buf.Bytes()
}
// isSingleLineList returns true if:
// * they were previously formatted entirely on one line
// * they consist entirely of literals
// * there are either no heredoc strings or the list has exactly one element
// * there are no line comments
func (printer) isSingleLineList(l *ast.ListType) bool {
for _, item := range l.List {
if item.Pos().Line != l.Lbrack.Line {
return false
}
lit, ok := item.(*ast.LiteralType)
if !ok {
return false
}
if lit.Token.Type == token.HEREDOC && len(l.List) != 1 {
return false
}
if lit.LineComment != nil {
return false
}
}
return true
}
// singleLineList prints a simple single line list.
// For a definition of "simple", see isSingleLineList above.
func (p *printer) singleLineList(l *ast.ListType) []byte {
buf := &bytes.Buffer{}
buf.WriteString("[")
for i, item := range l.List {
if i != 0 {
buf.WriteString(", ")
}
// Output the item itself
buf.Write(p.output(item))
// The heredoc marker needs to be at the end of line.
if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC {
buf.WriteByte(newline)
}
}
buf.WriteString("]")

View File

@ -74,14 +74,6 @@ func (s *Scanner) next() rune {
return eof
}
if ch == utf8.RuneError && size == 1 {
s.srcPos.Column++
s.srcPos.Offset += size
s.lastCharLen = size
s.err("illegal UTF-8 encoding")
return ch
}
// remember last position
s.prevPos = s.srcPos
@ -89,18 +81,27 @@ func (s *Scanner) next() rune {
s.lastCharLen = size
s.srcPos.Offset += size
if ch == utf8.RuneError && size == 1 {
s.err("illegal UTF-8 encoding")
return ch
}
if ch == '\n' {
s.srcPos.Line++
s.lastLineLen = s.srcPos.Column
s.srcPos.Column = 0
}
// If we see a null character with data left, then that is an error
if ch == '\x00' && s.buf.Len() > 0 {
if ch == '\x00' {
s.err("unexpected null character (0x00)")
return eof
}
if ch == '\uE123' {
s.err("unicode code point U+E123 reserved for internal use")
return utf8.RuneError
}
// debug
// fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column)
return ch
@ -351,7 +352,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type {
return token.NUMBER
}
// scanMantissa scans the mantissa begining from the rune. It returns the next
// scanMantissa scans the mantissa beginning from the rune. It returns the next
// non decimal rune. It's used to determine wheter it's a fraction or exponent.
func (s *Scanner) scanMantissa(ch rune) rune {
scanned := false
@ -432,16 +433,16 @@ func (s *Scanner) scanHeredoc() {
// Read the identifier
identBytes := s.src[offs : s.srcPos.Offset-s.lastCharLen]
if len(identBytes) == 0 {
if len(identBytes) == 0 || (len(identBytes) == 1 && identBytes[0] == '-') {
s.err("zero-length heredoc anchor")
return
}
var identRegexp *regexp.Regexp
if identBytes[0] == '-' {
identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes[1:]))
identRegexp = regexp.MustCompile(fmt.Sprintf(`^[[:space:]]*%s\r*\z`, identBytes[1:]))
} else {
identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes))
identRegexp = regexp.MustCompile(fmt.Sprintf(`^[[:space:]]*%s\r*\z`, identBytes))
}
// Read the actual string value
@ -551,7 +552,7 @@ func (s *Scanner) scanDigits(ch rune, base, n int) rune {
s.err("illegal char escape")
}
if n != start {
if n != start && ch != eof {
// we scanned all digits, put the last non digit char back,
// only if we read anything at all
s.unread()

View File

@ -246,7 +246,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type {
return token.NUMBER
}
// scanMantissa scans the mantissa begining from the rune. It returns the next
// scanMantissa scans the mantissa beginning from the rune. It returns the next
// non decimal rune. It's used to determine wheter it's a fraction or exponent.
func (s *Scanner) scanMantissa(ch rune) rune {
scanned := false

View File

@ -0,0 +1,353 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -0,0 +1,138 @@
package tfconfig
import (
"fmt"
legacyhclparser "github.com/hashicorp/hcl/hcl/parser"
"github.com/hashicorp/hcl2/hcl"
)
// Diagnostic describes a problem (error or warning) encountered during
// configuration loading.
type Diagnostic struct {
Severity DiagSeverity `json:"severity"`
Summary string `json:"summary"`
Detail string `json:"detail,omitempty"`
// Pos is not populated for all diagnostics, but when populated should
// indicate a particular line that the described problem relates to.
Pos *SourcePos `json:"pos,omitempty"`
}
// Diagnostics represents a sequence of diagnostics. This is the type that
// should be returned from a function that might generate diagnostics.
type Diagnostics []Diagnostic
// HasErrors returns true if there is at least one Diagnostic of severity
// DiagError in the receiever.
//
// If a function returns a Diagnostics without errors then the result can
// be assumed to be complete within the "best effort" constraints of this
// library. If errors are present then the caller may wish to employ more
// caution in relying on the result.
func (diags Diagnostics) HasErrors() bool {
for _, diag := range diags {
if diag.Severity == DiagError {
return true
}
}
return false
}
func (diags Diagnostics) Error() string {
switch len(diags) {
case 0:
return "no problems"
case 1:
return fmt.Sprintf("%s: %s", diags[0].Summary, diags[0].Detail)
default:
return fmt.Sprintf("%s: %s (and %d other messages)", diags[0].Summary, diags[0].Detail, len(diags)-1)
}
}
// Err returns an error representing the receiver if the receiver HasErrors, or
// nil otherwise.
//
// The returned error can be type-asserted back to a Diagnostics if needed.
func (diags Diagnostics) Err() error {
if diags.HasErrors() {
return diags
}
return nil
}
// DiagSeverity describes the severity of a Diagnostic.
type DiagSeverity rune
// DiagError indicates a problem that prevented proper processing of the
// configuration. In the precense of DiagError diagnostics the result is
// likely to be incomplete.
const DiagError DiagSeverity = 'E'
// DiagWarning indicates a problem that the user may wish to consider but
// that did not prevent proper processing of the configuration.
const DiagWarning DiagSeverity = 'W'
// MarshalJSON is an implementation of encoding/json.Marshaler
func (s DiagSeverity) MarshalJSON() ([]byte, error) {
switch s {
case DiagError:
return []byte(`"error"`), nil
case DiagWarning:
return []byte(`"warning"`), nil
default:
return []byte(`"invalid"`), nil
}
}
func diagnosticsHCL(diags hcl.Diagnostics) Diagnostics {
if len(diags) == 0 {
return nil
}
ret := make(Diagnostics, len(diags))
for i, diag := range diags {
ret[i] = Diagnostic{
Summary: diag.Summary,
Detail: diag.Detail,
}
switch diag.Severity {
case hcl.DiagError:
ret[i].Severity = DiagError
case hcl.DiagWarning:
ret[i].Severity = DiagWarning
}
if diag.Subject != nil {
pos := sourcePosHCL(*diag.Subject)
ret[i].Pos = &pos
}
}
return ret
}
func diagnosticsError(err error) Diagnostics {
if err == nil {
return nil
}
if posErr, ok := err.(*legacyhclparser.PosError); ok {
pos := sourcePosLegacyHCL(posErr.Pos, "")
return Diagnostics{
Diagnostic{
Severity: DiagError,
Summary: posErr.Err.Error(),
Pos: &pos,
},
}
}
return Diagnostics{
Diagnostic{
Severity: DiagError,
Summary: err.Error(),
},
}
}
func diagnosticsErrorf(format string, args ...interface{}) Diagnostics {
return diagnosticsError(fmt.Errorf(format, args...))
}

View File

@ -0,0 +1,21 @@
// Package tfconfig is a helper library that does careful, shallow parsing of
// Terraform modules to provide access to high-level metadata while
// remaining broadly compatible with configurations targeting various
// different Terraform versions.
//
// This packge focuses on describing top-level objects only, and in particular
// does not attempt any sort of processing that would require access to plugins.
// Currently it allows callers to extract high-level information about
// variables, outputs, resource blocks, provider dependencies, and Terraform
// Core dependencies.
//
// This package only works at the level of single modules. A full configuration
// is a tree of potentially several modules, some of which may be references
// to remote packages. There are some basic helpers for traversing calls to
// modules at relative local paths, however.
//
// This package employs a "best effort" parsing strategy, producing as complete
// a result as possible even though the input may not be entirely valid. The
// intended use-case is high-level analysis and indexing of externally-facing
// module characteristics, as opposed to validating or even applying the module.
package tfconfig

View File

@ -0,0 +1,130 @@
package tfconfig
import (
"fmt"
"io/ioutil"
"path/filepath"
"strings"
"github.com/hashicorp/hcl2/hcl"
)
// LoadModule reads the directory at the given path and attempts to interpret
// it as a Terraform module.
func LoadModule(dir string) (*Module, Diagnostics) {
// For broad compatibility here we actually have two separate loader
// codepaths. The main one uses the new HCL parser and API and is intended
// for configurations from Terraform 0.12 onwards (though will work for
// many older configurations too), but we'll also fall back on one that
// uses the _old_ HCL implementation so we can deal with some edge-cases
// that are not valid in new HCL.
module, diags := loadModule(dir)
if diags.HasErrors() {
// Try using the legacy HCL parser and see if we fare better.
legacyModule, legacyDiags := loadModuleLegacyHCL(dir)
if !legacyDiags.HasErrors() {
legacyModule.init(legacyDiags)
return legacyModule, legacyDiags
}
}
module.init(diags)
return module, diags
}
// IsModuleDir checks if the given path contains terraform configuration files.
// This allows the caller to decide how to handle directories that do not have tf files.
func IsModuleDir(dir string) bool {
primaryPaths, _ := dirFiles(dir)
if len(primaryPaths) == 0 {
return false
}
return true
}
func (m *Module) init(diags Diagnostics) {
// Fill in any additional provider requirements that are implied by
// resource configurations, to avoid the caller from needing to apply
// this logic itself. Implied requirements don't have version constraints,
// but we'll make sure the requirement value is still non-nil in this
// case so callers can easily recognize it.
for _, r := range m.ManagedResources {
if _, exists := m.RequiredProviders[r.Provider.Name]; !exists {
m.RequiredProviders[r.Provider.Name] = []string{}
}
}
for _, r := range m.DataResources {
if _, exists := m.RequiredProviders[r.Provider.Name]; !exists {
m.RequiredProviders[r.Provider.Name] = []string{}
}
}
// We redundantly also reference the diagnostics from inside the module
// object, primarily so that we can easily included in JSON-serialized
// versions of the module object.
m.Diagnostics = diags
}
func dirFiles(dir string) (primary []string, diags hcl.Diagnostics) {
infos, err := ioutil.ReadDir(dir)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to read module directory",
Detail: fmt.Sprintf("Module directory %s does not exist or cannot be read.", dir),
})
return
}
var override []string
for _, info := range infos {
if info.IsDir() {
// We only care about files
continue
}
name := info.Name()
ext := fileExt(name)
if ext == "" || isIgnoredFile(name) {
continue
}
baseName := name[:len(name)-len(ext)] // strip extension
isOverride := baseName == "override" || strings.HasSuffix(baseName, "_override")
fullPath := filepath.Join(dir, name)
if isOverride {
override = append(override, fullPath)
} else {
primary = append(primary, fullPath)
}
}
// We are assuming that any _override files will be logically named,
// and processing the files in alphabetical order. Primaries first, then overrides.
primary = append(primary, override...)
return
}
// fileExt returns the Terraform configuration extension of the given
// path, or a blank string if it is not a recognized extension.
func fileExt(path string) string {
if strings.HasSuffix(path, ".tf") {
return ".tf"
} else if strings.HasSuffix(path, ".tf.json") {
return ".tf.json"
} else {
return ""
}
}
// isIgnoredFile returns true if the given filename (which must not have a
// directory path ahead of it) should be ignored as e.g. an editor swap file.
func isIgnoredFile(name string) bool {
return strings.HasPrefix(name, ".") || // Unix-like hidden files
strings.HasSuffix(name, "~") || // vim
strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#") // emacs
}

View File

@ -0,0 +1,312 @@
package tfconfig
import (
"encoding/json"
"fmt"
"strings"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/hashicorp/hcl2/gohcl"
"github.com/hashicorp/hcl2/hcl"
"github.com/hashicorp/hcl2/hclparse"
ctyjson "github.com/zclconf/go-cty/cty/json"
)
func loadModule(dir string) (*Module, Diagnostics) {
mod := newModule(dir)
primaryPaths, diags := dirFiles(dir)
parser := hclparse.NewParser()
for _, filename := range primaryPaths {
var file *hcl.File
var fileDiags hcl.Diagnostics
if strings.HasSuffix(filename, ".json") {
file, fileDiags = parser.ParseJSONFile(filename)
} else {
file, fileDiags = parser.ParseHCLFile(filename)
}
diags = append(diags, fileDiags...)
if file == nil {
continue
}
content, _, contentDiags := file.Body.PartialContent(rootSchema)
diags = append(diags, contentDiags...)
for _, block := range content.Blocks {
switch block.Type {
case "terraform":
content, _, contentDiags := block.Body.PartialContent(terraformBlockSchema)
diags = append(diags, contentDiags...)
if attr, defined := content.Attributes["required_version"]; defined {
var version string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
diags = append(diags, valDiags...)
if !valDiags.HasErrors() {
mod.RequiredCore = append(mod.RequiredCore, version)
}
}
for _, block := range content.Blocks {
// Our schema only allows required_providers here, so we
// assume that we'll only get that block type.
attrs, attrDiags := block.Body.JustAttributes()
diags = append(diags, attrDiags...)
for name, attr := range attrs {
var version string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
diags = append(diags, valDiags...)
if !valDiags.HasErrors() {
mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version)
}
}
}
case "variable":
content, _, contentDiags := block.Body.PartialContent(variableSchema)
diags = append(diags, contentDiags...)
name := block.Labels[0]
v := &Variable{
Name: name,
Pos: sourcePosHCL(block.DefRange),
}
mod.Variables[name] = v
if attr, defined := content.Attributes["type"]; defined {
// We handle this particular attribute in a somewhat-tricky way:
// since Terraform may evolve its type expression syntax in
// future versions, we don't want to be overly-strict in how
// we handle it here, and so we'll instead just take the raw
// source provided by the user, using the source location
// information in the expression object.
//
// However, older versions of Terraform expected the type
// to be a string containing a keyword, so we'll need to
// handle that as a special case first for backward compatibility.
var typeExpr string
var typeExprAsStr string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &typeExprAsStr)
if !valDiags.HasErrors() {
typeExpr = typeExprAsStr
} else {
rng := attr.Expr.Range()
sourceFilename := rng.Filename
source, exists := parser.Sources()[sourceFilename]
if exists {
typeExpr = string(rng.SliceBytes(source))
} else {
// This should never happen, so we'll just warn about it and leave the type unspecified.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Source code not available",
Detail: fmt.Sprintf("Source code is not available for the file %q, which declares the variable %q.", sourceFilename, name),
Subject: &block.DefRange,
})
typeExpr = ""
}
}
v.Type = typeExpr
}
if attr, defined := content.Attributes["description"]; defined {
var description string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &description)
diags = append(diags, valDiags...)
v.Description = description
}
if attr, defined := content.Attributes["default"]; defined {
// To avoid the caller needing to deal with cty here, we'll
// use its JSON encoding to convert into an
// approximately-equivalent plain Go interface{} value
// to return.
val, valDiags := attr.Expr.Value(nil)
diags = append(diags, valDiags...)
if val.IsWhollyKnown() { // should only be false if there are errors in the input
valJSON, err := ctyjson.Marshal(val, val.Type())
if err != nil {
// Should never happen, since all possible known
// values have a JSON mapping.
panic(fmt.Errorf("failed to serialize default value as JSON: %s", err))
}
var def interface{}
err = json.Unmarshal(valJSON, &def)
if err != nil {
// Again should never happen, because valJSON is
// guaranteed valid by ctyjson.Marshal.
panic(fmt.Errorf("failed to re-parse default value from JSON: %s", err))
}
v.Default = def
}
}
case "output":
content, _, contentDiags := block.Body.PartialContent(outputSchema)
diags = append(diags, contentDiags...)
name := block.Labels[0]
o := &Output{
Name: name,
Pos: sourcePosHCL(block.DefRange),
}
mod.Outputs[name] = o
if attr, defined := content.Attributes["description"]; defined {
var description string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &description)
diags = append(diags, valDiags...)
o.Description = description
}
case "provider":
content, _, contentDiags := block.Body.PartialContent(providerConfigSchema)
diags = append(diags, contentDiags...)
name := block.Labels[0]
if attr, defined := content.Attributes["version"]; defined {
var version string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
diags = append(diags, valDiags...)
if !valDiags.HasErrors() {
mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version)
}
}
// Even if there wasn't an explicit version required, we still
// need an entry in our map to signal the unversioned dependency.
if _, exists := mod.RequiredProviders[name]; !exists {
mod.RequiredProviders[name] = []string{}
}
case "resource", "data":
content, _, contentDiags := block.Body.PartialContent(resourceSchema)
diags = append(diags, contentDiags...)
typeName := block.Labels[0]
name := block.Labels[1]
r := &Resource{
Type: typeName,
Name: name,
Pos: sourcePosHCL(block.DefRange),
}
var resourcesMap map[string]*Resource
switch block.Type {
case "resource":
r.Mode = ManagedResourceMode
resourcesMap = mod.ManagedResources
case "data":
r.Mode = DataResourceMode
resourcesMap = mod.DataResources
}
key := r.MapKey()
resourcesMap[key] = r
if attr, defined := content.Attributes["provider"]; defined {
// New style here is to provide this as a naked traversal
// expression, but we also support quoted references for
// older configurations that predated this convention.
traversal, travDiags := hcl.AbsTraversalForExpr(attr.Expr)
if travDiags.HasErrors() {
traversal = nil // in case we got any partial results
// Fall back on trying to parse as a string
var travStr string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &travStr)
if !valDiags.HasErrors() {
var strDiags hcl.Diagnostics
traversal, strDiags = hclsyntax.ParseTraversalAbs([]byte(travStr), "", hcl.Pos{})
if strDiags.HasErrors() {
traversal = nil
}
}
}
// If we get out here with a nil traversal then we didn't
// succeed in processing the input.
if len(traversal) > 0 {
providerName := traversal.RootName()
alias := ""
if len(traversal) > 1 {
if getAttr, ok := traversal[1].(hcl.TraverseAttr); ok {
alias = getAttr.Name
}
}
r.Provider = ProviderRef{
Name: providerName,
Alias: alias,
}
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid provider reference",
Detail: "Provider argument requires a provider name followed by an optional alias, like \"aws.foo\".",
Subject: attr.Expr.Range().Ptr(),
})
}
} else {
// If provider _isn't_ set then we'll infer it from the
// resource type.
r.Provider = ProviderRef{
Name: resourceTypeDefaultProviderName(r.Type),
}
}
case "module":
content, _, contentDiags := block.Body.PartialContent(moduleCallSchema)
diags = append(diags, contentDiags...)
name := block.Labels[0]
mc := &ModuleCall{
Name: block.Labels[0],
Pos: sourcePosHCL(block.DefRange),
}
mod.ModuleCalls[name] = mc
if attr, defined := content.Attributes["source"]; defined {
var source string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &source)
diags = append(diags, valDiags...)
mc.Source = source
}
if attr, defined := content.Attributes["version"]; defined {
var version string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
diags = append(diags, valDiags...)
mc.Version = version
}
default:
// Should never happen because our cases above should be
// exhaustive for our schema.
panic(fmt.Errorf("unhandled block type %q", block.Type))
}
}
}
return mod, diagnosticsHCL(diags)
}

View File

@ -0,0 +1,322 @@
package tfconfig
import (
"io/ioutil"
"strings"
legacyhcl "github.com/hashicorp/hcl"
legacyast "github.com/hashicorp/hcl/hcl/ast"
)
func loadModuleLegacyHCL(dir string) (*Module, Diagnostics) {
// This implementation is intentionally more quick-and-dirty than the
// main loader. In particular, it doesn't bother to keep careful track
// of multiple error messages because we always fall back on returning
// the main parser's error message if our fallback parsing produces
// an error, and thus the errors here are not seen by the end-caller.
mod := newModule(dir)
primaryPaths, diags := dirFiles(dir)
if diags.HasErrors() {
return mod, diagnosticsHCL(diags)
}
for _, filename := range primaryPaths {
src, err := ioutil.ReadFile(filename)
if err != nil {
return mod, diagnosticsErrorf("Error reading %s: %s", filename, err)
}
hclRoot, err := legacyhcl.Parse(string(src))
if err != nil {
return mod, diagnosticsErrorf("Error parsing %s: %s", filename, err)
}
list, ok := hclRoot.Node.(*legacyast.ObjectList)
if !ok {
return mod, diagnosticsErrorf("Error parsing %s: no root object", filename)
}
for _, item := range list.Filter("terraform").Items {
if len(item.Keys) > 0 {
item = &legacyast.ObjectItem{
Val: &legacyast.ObjectType{
List: &legacyast.ObjectList{
Items: []*legacyast.ObjectItem{item},
},
},
}
}
type TerraformBlock struct {
RequiredVersion string `hcl:"required_version"`
}
var block TerraformBlock
err = legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("terraform block: %s", err)
}
if block.RequiredVersion != "" {
mod.RequiredCore = append(mod.RequiredCore, block.RequiredVersion)
}
}
if vars := list.Filter("variable"); len(vars.Items) > 0 {
vars = vars.Children()
type VariableBlock struct {
Type string `hcl:"type"`
Default interface{}
Description string
Fields []string `hcl:",decodedFields"`
}
for _, item := range vars.Items {
unwrapLegacyHCLObjectKeysFromJSON(item, 1)
if len(item.Keys) != 1 {
return nil, diagnosticsErrorf("variable block at %s has no label", item.Pos())
}
name := item.Keys[0].Token.Value().(string)
var block VariableBlock
err := legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("invalid variable block at %s: %s", item.Pos(), err)
}
// Clean up legacy HCL decoding ambiguity by unwrapping list of maps
if ms, ok := block.Default.([]map[string]interface{}); ok {
def := make(map[string]interface{})
for _, m := range ms {
for k, v := range m {
def[k] = v
}
}
block.Default = def
}
v := &Variable{
Name: name,
Type: block.Type,
Description: block.Description,
Default: block.Default,
Pos: sourcePosLegacyHCL(item.Pos(), filename),
}
if _, exists := mod.Variables[name]; exists {
return nil, diagnosticsErrorf("duplicate variable block for %q", name)
}
mod.Variables[name] = v
}
}
if outputs := list.Filter("output"); len(outputs.Items) > 0 {
outputs = outputs.Children()
type OutputBlock struct {
Description string
}
for _, item := range outputs.Items {
unwrapLegacyHCLObjectKeysFromJSON(item, 1)
if len(item.Keys) != 1 {
return nil, diagnosticsErrorf("output block at %s has no label", item.Pos())
}
name := item.Keys[0].Token.Value().(string)
var block OutputBlock
err := legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("invalid output block at %s: %s", item.Pos(), err)
}
o := &Output{
Name: name,
Description: block.Description,
Pos: sourcePosLegacyHCL(item.Pos(), filename),
}
if _, exists := mod.Outputs[name]; exists {
return nil, diagnosticsErrorf("duplicate output block for %q", name)
}
mod.Outputs[name] = o
}
}
for _, blockType := range []string{"resource", "data"} {
if resources := list.Filter(blockType); len(resources.Items) > 0 {
resources = resources.Children()
type ResourceBlock struct {
Provider string
}
for _, item := range resources.Items {
unwrapLegacyHCLObjectKeysFromJSON(item, 2)
if len(item.Keys) != 2 {
return nil, diagnosticsErrorf("resource block at %s has wrong label count", item.Pos())
}
typeName := item.Keys[0].Token.Value().(string)
name := item.Keys[1].Token.Value().(string)
var mode ResourceMode
var rMap map[string]*Resource
switch blockType {
case "resource":
mode = ManagedResourceMode
rMap = mod.ManagedResources
case "data":
mode = DataResourceMode
rMap = mod.DataResources
}
var block ResourceBlock
err := legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("invalid resource block at %s: %s", item.Pos(), err)
}
var providerName, providerAlias string
if dotPos := strings.IndexByte(block.Provider, '.'); dotPos != -1 {
providerName = block.Provider[:dotPos]
providerAlias = block.Provider[dotPos+1:]
} else {
providerName = block.Provider
}
if providerName == "" {
providerName = resourceTypeDefaultProviderName(typeName)
}
r := &Resource{
Mode: mode,
Type: typeName,
Name: name,
Provider: ProviderRef{
Name: providerName,
Alias: providerAlias,
},
Pos: sourcePosLegacyHCL(item.Pos(), filename),
}
key := r.MapKey()
if _, exists := rMap[key]; exists {
return nil, diagnosticsErrorf("duplicate resource block for %q", key)
}
rMap[key] = r
}
}
}
if moduleCalls := list.Filter("module"); len(moduleCalls.Items) > 0 {
moduleCalls = moduleCalls.Children()
type ModuleBlock struct {
Source string
Version string
}
for _, item := range moduleCalls.Items {
unwrapLegacyHCLObjectKeysFromJSON(item, 1)
if len(item.Keys) != 1 {
return nil, diagnosticsErrorf("module block at %s has no label", item.Pos())
}
name := item.Keys[0].Token.Value().(string)
var block ModuleBlock
err := legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("module block at %s: %s", item.Pos(), err)
}
mc := &ModuleCall{
Name: name,
Source: block.Source,
Version: block.Version,
Pos: sourcePosLegacyHCL(item.Pos(), filename),
}
if _, exists := mod.ModuleCalls[name]; exists {
return nil, diagnosticsErrorf("duplicate module block for %q", name)
}
mod.ModuleCalls[name] = mc
}
}
if providerConfigs := list.Filter("provider"); len(providerConfigs.Items) > 0 {
providerConfigs = providerConfigs.Children()
type ProviderBlock struct {
Version string
}
for _, item := range providerConfigs.Items {
unwrapLegacyHCLObjectKeysFromJSON(item, 1)
if len(item.Keys) != 1 {
return nil, diagnosticsErrorf("provider block at %s has no label", item.Pos())
}
name := item.Keys[0].Token.Value().(string)
var block ProviderBlock
err := legacyhcl.DecodeObject(&block, item.Val)
if err != nil {
return nil, diagnosticsErrorf("invalid provider block at %s: %s", item.Pos(), err)
}
if block.Version != "" {
mod.RequiredProviders[name] = append(mod.RequiredProviders[name], block.Version)
}
// Even if there wasn't an explicit version required, we still
// need an entry in our map to signal the unversioned dependency.
if _, exists := mod.RequiredProviders[name]; !exists {
mod.RequiredProviders[name] = []string{}
}
}
}
}
return mod, nil
}
// unwrapLegacyHCLObjectKeysFromJSON cleans up an edge case that can occur when
// parsing JSON as input: if we're parsing JSON then directly nested
// items will show up as additional "keys".
//
// For objects that expect a fixed number of keys, this breaks the
// decoding process. This function unwraps the object into what it would've
// looked like if it came directly from HCL by specifying the number of keys
// you expect.
//
// Example:
//
// { "foo": { "baz": {} } }
//
// Will show up with Keys being: []string{"foo", "baz"}
// when we really just want the first two. This function will fix this.
func unwrapLegacyHCLObjectKeysFromJSON(item *legacyast.ObjectItem, depth int) {
if len(item.Keys) > depth && item.Keys[0].Token.JSON {
for len(item.Keys) > depth {
// Pop off the last key
n := len(item.Keys)
key := item.Keys[n-1]
item.Keys[n-1] = nil
item.Keys = item.Keys[:n-1]
// Wrap our value in a list
item.Val = &legacyast.ObjectType{
List: &legacyast.ObjectList{
Items: []*legacyast.ObjectItem{
&legacyast.ObjectItem{
Keys: []*legacyast.ObjectKey{key},
Val: item.Val,
},
},
},
}
}
}
}

View File

@ -0,0 +1,35 @@
package tfconfig
// Module is the top-level type representing a parsed and processed Terraform
// module.
type Module struct {
// Path is the local filesystem directory where the module was loaded from.
Path string `json:"path"`
Variables map[string]*Variable `json:"variables"`
Outputs map[string]*Output `json:"outputs"`
RequiredCore []string `json:"required_core,omitempty"`
RequiredProviders map[string][]string `json:"required_providers"`
ManagedResources map[string]*Resource `json:"managed_resources"`
DataResources map[string]*Resource `json:"data_resources"`
ModuleCalls map[string]*ModuleCall `json:"module_calls"`
// Diagnostics records any errors and warnings that were detected during
// loading, primarily for inclusion in serialized forms of the module
// since this slice is also returned as a second argument from LoadModule.
Diagnostics Diagnostics `json:"diagnostics,omitempty"`
}
func newModule(path string) *Module {
return &Module{
Path: path,
Variables: make(map[string]*Variable),
Outputs: make(map[string]*Output),
RequiredProviders: make(map[string][]string),
ManagedResources: make(map[string]*Resource),
DataResources: make(map[string]*Resource),
ModuleCalls: make(map[string]*ModuleCall),
}
}

View File

@ -0,0 +1,11 @@
package tfconfig
// ModuleCall represents a "module" block within a module. That is, a
// declaration of a child module from inside its parent.
type ModuleCall struct {
Name string `json:"name"`
Source string `json:"source"`
Version string `json:"version,omitempty"`
Pos SourcePos `json:"pos"`
}

View File

@ -0,0 +1,9 @@
package tfconfig
// Output represents a single output from a Terraform module.
type Output struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
Pos SourcePos `json:"pos"`
}

View File

@ -0,0 +1,9 @@
package tfconfig
// ProviderRef is a reference to a provider configuration within a module.
// It represents the contents of a "provider" argument in a resource, or
// a value in the "providers" map for a module call.
type ProviderRef struct {
Name string `json:"name"`
Alias string `json:"alias,omitempty"` // Empty if the default provider configuration is referenced
}

View File

@ -0,0 +1,64 @@
package tfconfig
import (
"fmt"
"strconv"
"strings"
)
// Resource represents a single "resource" or "data" block within a module.
type Resource struct {
Mode ResourceMode `json:"mode"`
Type string `json:"type"`
Name string `json:"name"`
Provider ProviderRef `json:"provider"`
Pos SourcePos `json:"pos"`
}
// MapKey returns a string that can be used to uniquely identify the receiver
// in a map[string]*Resource.
func (r *Resource) MapKey() string {
switch r.Mode {
case ManagedResourceMode:
return fmt.Sprintf("%s.%s", r.Type, r.Name)
case DataResourceMode:
return fmt.Sprintf("data.%s.%s", r.Type, r.Name)
default:
// should never happen
return fmt.Sprintf("[invalid_mode!].%s.%s", r.Type, r.Name)
}
}
// ResourceMode represents the "mode" of a resource, which is used to
// distinguish between managed resources ("resource" blocks in config) and
// data resources ("data" blocks in config).
type ResourceMode rune
const InvalidResourceMode ResourceMode = 0
const ManagedResourceMode ResourceMode = 'M'
const DataResourceMode ResourceMode = 'D'
func (m ResourceMode) String() string {
switch m {
case ManagedResourceMode:
return "managed"
case DataResourceMode:
return "data"
default:
return ""
}
}
// MarshalJSON implements encoding/json.Marshaler.
func (m ResourceMode) MarshalJSON() ([]byte, error) {
return []byte(strconv.Quote(m.String())), nil
}
func resourceTypeDefaultProviderName(typeName string) string {
if underPos := strings.IndexByte(typeName, '_'); underPos != -1 {
return typeName[:underPos]
}
return typeName
}

View File

@ -0,0 +1,106 @@
package tfconfig
import (
"github.com/hashicorp/hcl2/hcl"
)
var rootSchema = &hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: "terraform",
LabelNames: nil,
},
{
Type: "variable",
LabelNames: []string{"name"},
},
{
Type: "output",
LabelNames: []string{"name"},
},
{
Type: "provider",
LabelNames: []string{"name"},
},
{
Type: "resource",
LabelNames: []string{"type", "name"},
},
{
Type: "data",
LabelNames: []string{"type", "name"},
},
{
Type: "module",
LabelNames: []string{"name"},
},
},
}
var terraformBlockSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "required_version",
},
},
Blocks: []hcl.BlockHeaderSchema{
{
Type: "required_providers",
},
},
}
var providerConfigSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "version",
},
{
Name: "alias",
},
},
}
var variableSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "type",
},
{
Name: "description",
},
{
Name: "default",
},
},
}
var outputSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "description",
},
},
}
var moduleCallSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "source",
},
{
Name: "version",
},
{
Name: "providers",
},
},
}
var resourceSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "provider",
},
},
}

View File

@ -0,0 +1,50 @@
package tfconfig
import (
legacyhcltoken "github.com/hashicorp/hcl/hcl/token"
"github.com/hashicorp/hcl2/hcl"
)
// SourcePos is a pointer to a particular location in a source file.
//
// This type is embedded into other structs to allow callers to locate the
// definition of each described module element. The SourcePos of an element
// is usually the first line of its definition, although the definition can
// be a little "fuzzy" with JSON-based config files.
type SourcePos struct {
Filename string `json:"filename"`
Line int `json:"line"`
}
func sourcePos(filename string, line int) SourcePos {
return SourcePos{
Filename: filename,
Line: line,
}
}
func sourcePosHCL(rng hcl.Range) SourcePos {
// We intentionally throw away the column information here because
// current and legacy HCL both disagree on the definition of a column
// and so a line-only reference is the best granularity we can do
// such that the result is consistent between both parsers.
return SourcePos{
Filename: rng.Filename,
Line: rng.Start.Line,
}
}
func sourcePosLegacyHCL(pos legacyhcltoken.Pos, filename string) SourcePos {
useFilename := pos.Filename
// We'll try to use the filename given in legacy HCL position, but
// in practice there's no way to actually get this populated via
// the HCL API so it's usually empty except in some specialized
// situations, such as positions in error objects.
if useFilename == "" {
useFilename = filename
}
return SourcePos{
Filename: useFilename,
Line: pos.Line,
}
}

View File

@ -0,0 +1,16 @@
package tfconfig
// Variable represents a single variable from a Terraform module.
type Variable struct {
Name string `json:"name"`
Type string `json:"type,omitempty"`
Description string `json:"description,omitempty"`
// Default is an approximate representation of the default value in
// the native Go type system. The conversion from the value given in
// configuration may be slightly lossy. Only values that can be
// serialized by json.Marshal will be included here.
Default interface{} `json:"default,omitempty"`
Pos SourcePos `json:"pos"`
}

4
vendor/modules.txt vendored
View File

@ -340,7 +340,7 @@ github.com/hashicorp/go-tfe
github.com/hashicorp/go-uuid
# github.com/hashicorp/go-version v1.0.0
github.com/hashicorp/go-version
# github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f
# github.com/hashicorp/hcl v1.0.0
github.com/hashicorp/hcl
github.com/hashicorp/hcl/hcl/ast
github.com/hashicorp/hcl/hcl/parser
@ -372,6 +372,8 @@ github.com/hashicorp/hil/scanner
github.com/hashicorp/logutils
# github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb
github.com/hashicorp/serf/coordinate
# github.com/hashicorp/terraform-config-inspect v0.0.0-20181213005350-314d8affa1db
github.com/hashicorp/terraform-config-inspect/tfconfig
# github.com/hashicorp/vault v0.0.0-20161029210149-9a60bf2a50e4
github.com/hashicorp/vault/helper/pgpkeys
github.com/hashicorp/vault/helper/jsonutil