Merge #17533: Update "HCL2" and cty dependencies

This commit is contained in:
Martin Atkins 2018-03-08 14:49:25 -08:00 committed by GitHub
commit 3cec7dc595
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 6988 additions and 3625 deletions

View File

@ -60,7 +60,7 @@ func TestParserLoadValuesFile(t *testing.T) {
"invalid-syntax.tfvars": {
"foo bar baz\n",
map[string]cty.Value{},
1, // attribute or block definition required
2, // invalid block definition, and unexpected foo block (the latter due to parser recovery behavior)
},
"block.tfvars": {
"foo = true\ninvalid {\n}\n",

View File

@ -0,0 +1,67 @@
# HCL Type Expressions Extension
This HCL extension defines a convention for describing HCL types using function
call and variable reference syntax, allowing configuration formats to include
type information provided by users.
The type syntax is processed statically from a hcl.Expression, so it cannot
use any of the usual language operators. This is similar to type expressions
in statically-typed programming languages.
```hcl
variable "example" {
type = list(string)
}
```
The extension is built using the `hcl.ExprAsKeyword` and `hcl.ExprCall`
functions, and so it relies on the underlying syntax to define how "keyword"
and "call" are interpreted. The above shows how they are interpreted in
the HCL native syntax, while the following shows the same information
expressed in JSON:
```json
{
"variable": {
"example": {
"type": "list(string)"
}
}
}
```
Notice that since we have additional contextual information that we intend
to allow only calls and keywords the JSON syntax is able to parse the given
string directly as an expression, rather than as a template as would be
the case for normal expression evaluation.
For more information, see [the godoc reference](http://godoc.org/github.com/hashicorp/hcl2/ext/typeexpr).
## Type Expression Syntax
When expressed in the native syntax, the following expressions are permitted
in a type expression:
* `string` - string
* `bool` - boolean
* `number` - number
* `any` - `cty.DynamicPseudoType` (in function `TypeConstraint` only)
* `list(<type_expr>)` - list of the type given as an argument
* `set(<type_expr>)` - set of the type given as an argument
* `map(<type_expr>)` - map of the type given as an argument
* `tuple([<type_exprs...>])` - tuple with the element types given in the single list argument
* `object({<attr_name>=<type_expr>, ...}` - object with the attributes and corresponding types given in the single map argument
For example:
* `list(string)`
* `object({"name":string,"age":number})`
* `map(object({"name":string,"age":number}))`
Note that the object constructor syntax is not fully-general for all possible
object types because it requires the attribute names to be valid identifiers.
In practice it is expected that any time an object type is being fixed for
type checking it will be one that has identifiers as its attributes; object
types with weird attributes generally show up only from arbitrary object
constructors in configuration files, which are usually treated either as maps
or as the dynamic pseudo-type.

11
vendor/github.com/hashicorp/hcl2/ext/typeexpr/doc.go generated vendored Normal file
View File

@ -0,0 +1,11 @@
// Package typeexpr extends HCL with a convention for describing HCL types
// within configuration files.
//
// The type syntax is processed statically from a hcl.Expression, so it cannot
// use any of the usual language operators. This is similar to type expressions
// in statically-typed programming languages.
//
// variable "example" {
// type = list(string)
// }
package typeexpr

View File

@ -0,0 +1,196 @@
package typeexpr
import (
"fmt"
"github.com/hashicorp/hcl2/hcl"
"github.com/zclconf/go-cty/cty"
)
const invalidTypeSummary = "Invalid type specification"
// getType is the internal implementation of both Type and TypeConstraint,
// using the passed flag to distinguish. When constraint is false, the "any"
// keyword will produce an error.
func getType(expr hcl.Expression, constraint bool) (cty.Type, hcl.Diagnostics) {
// First we'll try for one of our keywords
kw := hcl.ExprAsKeyword(expr)
switch kw {
case "bool":
return cty.Bool, nil
case "string":
return cty.String, nil
case "number":
return cty.Number, nil
case "any":
if constraint {
return cty.DynamicPseudoType, nil
}
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("The keyword %q cannot be used in this type specification: an exact type is required.", kw),
Subject: expr.Range().Ptr(),
}}
case "list", "map", "set":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", kw),
Subject: expr.Range().Ptr(),
}}
case "object":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "The object type constructor requires one argument specifying the attribute types and values as a map.",
Subject: expr.Range().Ptr(),
}}
case "tuple":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "The tuple type constructor requires one argument specifying the element types as a list.",
Subject: expr.Range().Ptr(),
}}
case "":
// okay! we'll fall through and try processing as a call, then.
default:
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("The keyword %q is not a valid type specification.", kw),
Subject: expr.Range().Ptr(),
}}
}
// If we get down here then our expression isn't just a keyword, so we'll
// try to process it as a call instead.
call, diags := hcl.ExprCall(expr)
if diags.HasErrors() {
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "A type specification is either a primitive type keyword (bool, number, string) or a complex type constructor call, like list(string).",
Subject: expr.Range().Ptr(),
}}
}
switch call.Name {
case "bool", "string", "number", "any":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("Primitive type keyword %q does not expect arguments.", call.Name),
Subject: &call.ArgsRange,
}}
}
if len(call.Arguments) != 1 {
contextRange := call.ArgsRange
subjectRange := call.ArgsRange
if len(call.Arguments) > 1 {
// If we have too many arguments (as opposed to too _few_) then
// we'll highlight the extraneous arguments as the diagnostic
// subject.
subjectRange = hcl.RangeBetween(call.Arguments[1].Range(), call.Arguments[len(call.Arguments)-1].Range())
}
switch call.Name {
case "list", "set", "map":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", call.Name),
Subject: &subjectRange,
Context: &contextRange,
}}
case "object":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "The object type constructor requires one argument specifying the attribute types and values as a map.",
Subject: &subjectRange,
Context: &contextRange,
}}
case "tuple":
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "The tuple type constructor requires one argument specifying the element types as a list.",
Subject: &subjectRange,
Context: &contextRange,
}}
}
}
switch call.Name {
case "list":
ety, diags := getType(call.Arguments[0], constraint)
return cty.List(ety), diags
case "set":
ety, diags := getType(call.Arguments[0], constraint)
return cty.Set(ety), diags
case "map":
ety, diags := getType(call.Arguments[0], constraint)
return cty.Map(ety), diags
case "object":
attrDefs, diags := hcl.ExprMap(call.Arguments[0])
if diags.HasErrors() {
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "Object type constructor requires a map whose keys are attribute names and whose values are the corresponding attribute types.",
Subject: call.Arguments[0].Range().Ptr(),
Context: expr.Range().Ptr(),
}}
}
atys := make(map[string]cty.Type)
for _, attrDef := range attrDefs {
attrName := hcl.ExprAsKeyword(attrDef.Key)
if attrName == "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "Object constructor map keys must be attribute names.",
Subject: attrDef.Key.Range().Ptr(),
Context: expr.Range().Ptr(),
})
continue
}
aty, attrDiags := getType(attrDef.Value, constraint)
diags = append(diags, attrDiags...)
atys[attrName] = aty
}
return cty.Object(atys), diags
case "tuple":
elemDefs, diags := hcl.ExprList(call.Arguments[0])
if diags.HasErrors() {
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: "Tuple type constructor requires a list of element types.",
Subject: call.Arguments[0].Range().Ptr(),
Context: expr.Range().Ptr(),
}}
}
etys := make([]cty.Type, len(elemDefs))
for i, defExpr := range elemDefs {
ety, elemDiags := getType(defExpr, constraint)
diags = append(diags, elemDiags...)
etys[i] = ety
}
return cty.Tuple(etys), diags
default:
// Can't access call.Arguments in this path because we've not validated
// that it contains exactly one expression here.
return cty.DynamicPseudoType, hcl.Diagnostics{{
Severity: hcl.DiagError,
Summary: invalidTypeSummary,
Detail: fmt.Sprintf("Keyword %q is not a valid type constructor.", call.Name),
Subject: expr.Range().Ptr(),
}}
}
}

129
vendor/github.com/hashicorp/hcl2/ext/typeexpr/public.go generated vendored Normal file
View File

@ -0,0 +1,129 @@
package typeexpr
import (
"bytes"
"fmt"
"sort"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/hashicorp/hcl2/hcl"
"github.com/zclconf/go-cty/cty"
)
// Type attempts to process the given expression as a type expression and, if
// successful, returns the resulting type. If unsuccessful, error diagnostics
// are returned.
func Type(expr hcl.Expression) (cty.Type, hcl.Diagnostics) {
return getType(expr, false)
}
// TypeConstraint attempts to parse the given expression as a type constraint
// and, if successful, returns the resulting type. If unsuccessful, error
// diagnostics are returned.
//
// A type constraint has the same structure as a type, but it additionally
// allows the keyword "any" to represent cty.DynamicPseudoType, which is often
// used as a wildcard in type checking and type conversion operations.
func TypeConstraint(expr hcl.Expression) (cty.Type, hcl.Diagnostics) {
return getType(expr, true)
}
// TypeString returns a string rendering of the given type as it would be
// expected to appear in the HCL native syntax.
//
// This is primarily intended for showing types to the user in an application
// that uses typexpr, where the user can be assumed to be familiar with the
// type expression syntax. In applications that do not use typeexpr these
// results may be confusing to the user and so type.FriendlyName may be
// preferable, even though it's less precise.
//
// TypeString produces reasonable results only for types like what would be
// produced by the Type and TypeConstraint functions. In particular, it cannot
// support capsule types.
func TypeString(ty cty.Type) string {
// Easy cases first
switch ty {
case cty.String:
return "string"
case cty.Bool:
return "bool"
case cty.Number:
return "number"
case cty.DynamicPseudoType:
return "any"
}
if ty.IsCapsuleType() {
panic("TypeString does not support capsule types")
}
if ty.IsCollectionType() {
ety := ty.ElementType()
etyString := TypeString(ety)
switch {
case ty.IsListType():
return fmt.Sprintf("list(%s)", etyString)
case ty.IsSetType():
return fmt.Sprintf("set(%s)", etyString)
case ty.IsMapType():
return fmt.Sprintf("map(%s)", etyString)
default:
// Should never happen because the above is exhaustive
panic("unsupported collection type")
}
}
if ty.IsObjectType() {
var buf bytes.Buffer
buf.WriteString("object({")
atys := ty.AttributeTypes()
names := make([]string, 0, len(atys))
for name := range atys {
names = append(names, name)
}
sort.Strings(names)
first := true
for _, name := range names {
aty := atys[name]
if !first {
buf.WriteByte(',')
}
if !hclsyntax.ValidIdentifier(name) {
// Should never happen for any type produced by this package,
// but we'll do something reasonable here just so we don't
// produce garbage if someone gives us a hand-assembled object
// type that has weird attribute names.
// Using Go-style quoting here isn't perfect, since it doesn't
// exactly match HCL syntax, but it's fine for an edge-case.
buf.WriteString(fmt.Sprintf("%q", name))
} else {
buf.WriteString(name)
}
buf.WriteByte('=')
buf.WriteString(TypeString(aty))
first = false
}
buf.WriteString("})")
return buf.String()
}
if ty.IsTupleType() {
var buf bytes.Buffer
buf.WriteString("tuple([")
etys := ty.TupleElementTypes()
first := true
for _, ety := range etys {
if !first {
buf.WriteByte(',')
}
buf.WriteString(TypeString(ety))
first = false
}
buf.WriteString("])")
return buf.String()
}
// Should never happen because we covered all cases above.
panic(fmt.Errorf("unsupported type %#v", ty))
}

View File

@ -42,7 +42,9 @@ func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
sort.Strings(attrNames)
for _, n := range attrNames {
idx := tags.Attributes[n]
optional := tags.Optional[n]
field := ty.Field(idx)
var required bool
switch {
@ -51,7 +53,7 @@ func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
// indicated via a null value, so we don't specify that
// the field is required during decoding.
required = false
case field.Type.Kind() != reflect.Ptr:
case field.Type.Kind() != reflect.Ptr && !optional:
required = true
default:
required = false
@ -111,6 +113,7 @@ type fieldTags struct {
Blocks map[string]int
Labels []labelField
Remain *int
Optional map[string]bool
}
type labelField struct {
@ -122,6 +125,7 @@ func getFieldTags(ty reflect.Type) *fieldTags {
ret := &fieldTags{
Attributes: map[string]int{},
Blocks: map[string]int{},
Optional: map[string]bool{},
}
ct := ty.NumField()
@ -158,6 +162,9 @@ func getFieldTags(ty reflect.Type) *fieldTags {
}
idx := i // copy, because this loop will continue assigning to i
ret.Remain = &idx
case "optional":
ret.Attributes[name] = i
ret.Optional[name] = true
default:
panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name))
}

46
vendor/github.com/hashicorp/hcl2/hcl/expr_call.go generated vendored Normal file
View File

@ -0,0 +1,46 @@
package hcl
// ExprCall tests if the given expression is a function call and,
// if so, extracts the function name and the expressions that represent
// the arguments. If the given expression is not statically a function call,
// error diagnostics are returned.
//
// A particular Expression implementation can support this function by
// offering a method called ExprCall that takes no arguments and returns
// *StaticCall. This method should return nil if a static call cannot
// be extracted. Alternatively, an implementation can support
// UnwrapExpression to delegate handling of this function to a wrapped
// Expression object.
func ExprCall(expr Expression) (*StaticCall, Diagnostics) {
type exprCall interface {
ExprCall() *StaticCall
}
physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
_, supported := expr.(exprCall)
return supported
})
if exC, supported := physExpr.(exprCall); supported {
if call := exC.ExprCall(); call != nil {
return call, nil
}
}
return nil, Diagnostics{
&Diagnostic{
Severity: DiagError,
Summary: "Invalid expression",
Detail: "A static function call is required.",
Subject: expr.StartRange().Ptr(),
},
}
}
// StaticCall represents a function call that was extracted statically from
// an expression using ExprCall.
type StaticCall struct {
Name string
NameRange Range
Arguments []Expression
ArgsRange Range
}

44
vendor/github.com/hashicorp/hcl2/hcl/expr_map.go generated vendored Normal file
View File

@ -0,0 +1,44 @@
package hcl
// ExprMap tests if the given expression is a static map construct and,
// if so, extracts the expressions that represent the map elements.
// If the given expression is not a static map, error diagnostics are
// returned.
//
// A particular Expression implementation can support this function by
// offering a method called ExprMap that takes no arguments and returns
// []KeyValuePair. This method should return nil if a static map cannot
// be extracted. Alternatively, an implementation can support
// UnwrapExpression to delegate handling of this function to a wrapped
// Expression object.
func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) {
type exprMap interface {
ExprMap() []KeyValuePair
}
physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
_, supported := expr.(exprMap)
return supported
})
if exM, supported := physExpr.(exprMap); supported {
if pairs := exM.ExprMap(); pairs != nil {
return pairs, nil
}
}
return nil, Diagnostics{
&Diagnostic{
Severity: DiagError,
Summary: "Invalid expression",
Detail: "A static map expression is required.",
Subject: expr.StartRange().Ptr(),
},
}
}
// KeyValuePair represents a pair of expressions that serve as a single item
// within a map or object definition construct.
type KeyValuePair struct {
Key Expression
Value Expression
}

View File

@ -47,6 +47,51 @@ func (e *LiteralValueExpr) StartRange() hcl.Range {
return e.SrcRange
}
// Implementation for hcl.AbsTraversalForExpr.
func (e *LiteralValueExpr) AsTraversal() hcl.Traversal {
// This one's a little weird: the contract for AsTraversal is to interpret
// an expression as if it were traversal syntax, and traversal syntax
// doesn't have the special keywords "null", "true", and "false" so these
// are expected to be treated like variables in that case.
// Since our parser already turned them into LiteralValueExpr by the time
// we get here, we need to undo this and infer the name that would've
// originally led to our value.
// We don't do anything for any other values, since they don't overlap
// with traversal roots.
if e.Val.IsNull() {
// In practice the parser only generates null values of the dynamic
// pseudo-type for literals, so we can safely assume that any null
// was orignally the keyword "null".
return hcl.Traversal{
hcl.TraverseRoot{
Name: "null",
SrcRange: e.SrcRange,
},
}
}
switch e.Val {
case cty.True:
return hcl.Traversal{
hcl.TraverseRoot{
Name: "true",
SrcRange: e.SrcRange,
},
}
case cty.False:
return hcl.Traversal{
hcl.TraverseRoot{
Name: "false",
SrcRange: e.SrcRange,
},
}
default:
// No traversal is possible for any other value.
return nil
}
}
// ScopeTraversalExpr is an Expression that retrieves a value from the scope
// using a traversal.
type ScopeTraversalExpr struct {
@ -102,6 +147,20 @@ func (e *RelativeTraversalExpr) StartRange() hcl.Range {
return e.SrcRange
}
// Implementation for hcl.AbsTraversalForExpr.
func (e *RelativeTraversalExpr) AsTraversal() hcl.Traversal {
// We can produce a traversal only if our source can.
st, diags := hcl.AbsTraversalForExpr(e.Source)
if diags.HasErrors() {
return nil
}
ret := make(hcl.Traversal, len(st)+len(e.Traversal))
copy(ret, st)
copy(ret[len(st):], e.Traversal)
return ret
}
// FunctionCallExpr is an Expression that calls a function from the EvalContext
// and returns its result.
type FunctionCallExpr struct {
@ -358,6 +417,21 @@ func (e *FunctionCallExpr) StartRange() hcl.Range {
return hcl.RangeBetween(e.NameRange, e.OpenParenRange)
}
// Implementation for hcl.ExprCall.
func (e *FunctionCallExpr) ExprCall() *hcl.StaticCall {
ret := &hcl.StaticCall{
Name: e.Name,
NameRange: e.NameRange,
Arguments: make([]hcl.Expression, len(e.Args)),
ArgsRange: hcl.RangeBetween(e.OpenParenRange, e.CloseParenRange),
}
// Need to convert our own Expression objects into hcl.Expression.
for i, arg := range e.Args {
ret.Arguments[i] = arg
}
return ret
}
type ConditionalExpr struct {
Condition Expression
TrueResult Expression
@ -648,6 +722,72 @@ func (e *ObjectConsExpr) StartRange() hcl.Range {
return e.OpenRange
}
// Implementation for hcl.ExprMap
func (e *ObjectConsExpr) ExprMap() []hcl.KeyValuePair {
ret := make([]hcl.KeyValuePair, len(e.Items))
for i, item := range e.Items {
ret[i] = hcl.KeyValuePair{
Key: item.KeyExpr,
Value: item.ValueExpr,
}
}
return ret
}
// ObjectConsKeyExpr is a special wrapper used only for ObjectConsExpr keys,
// which deals with the special case that a naked identifier in that position
// must be interpreted as a literal string rather than evaluated directly.
type ObjectConsKeyExpr struct {
Wrapped Expression
}
func (e *ObjectConsKeyExpr) literalName() string {
// This is our logic for deciding whether to behave like a literal string.
// We lean on our AbsTraversalForExpr implementation here, which already
// deals with some awkward cases like the expression being the result
// of the keywords "null", "true" and "false" which we'd want to interpret
// as keys here too.
return hcl.ExprAsKeyword(e.Wrapped)
}
func (e *ObjectConsKeyExpr) walkChildNodes(w internalWalkFunc) {
// We only treat our wrapped expression as a real expression if we're
// not going to interpret it as a literal.
if e.literalName() == "" {
e.Wrapped = w(e.Wrapped).(Expression)
}
}
func (e *ObjectConsKeyExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
if ln := e.literalName(); ln != "" {
return cty.StringVal(ln), nil
}
return e.Wrapped.Value(ctx)
}
func (e *ObjectConsKeyExpr) Range() hcl.Range {
return e.Wrapped.Range()
}
func (e *ObjectConsKeyExpr) StartRange() hcl.Range {
return e.Wrapped.StartRange()
}
// Implementation for hcl.AbsTraversalForExpr.
func (e *ObjectConsKeyExpr) AsTraversal() hcl.Traversal {
// We can produce a traversal only if our wrappee can.
st, diags := hcl.AbsTraversalForExpr(e.Wrapped)
if diags.HasErrors() {
return nil
}
return st
}
func (e *ObjectConsKeyExpr) UnwrapExpression() Expression {
return e.Wrapped
}
// ForExpr represents iteration constructs:
//
// tuple = [for i, v in list: upper(v) if i > 2]

View File

@ -39,6 +39,10 @@ func (e *ObjectConsExpr) Variables() []hcl.Traversal {
return Variables(e)
}
func (e *ObjectConsKeyExpr) Variables() []hcl.Traversal {
return Variables(e)
}
func (e *RelativeTraversalExpr) Variables() []hcl.Traversal {
return Variables(e)
}

View File

@ -132,7 +132,7 @@ func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) {
switch next.Type {
case TokenEqual:
return p.finishParsingBodyAttribute(ident)
case TokenOQuote, TokenOBrace:
case TokenOQuote, TokenOBrace, TokenIdent:
return p.finishParsingBodyBlock(ident)
default:
p.recoverAfterBodyItem()
@ -167,25 +167,15 @@ func (p *parser) finishParsingBodyAttribute(ident Token) (Node, hcl.Diagnostics)
p.recoverAfterBodyItem()
} else {
end := p.Peek()
if end.Type != TokenNewline {
if end.Type != TokenNewline && end.Type != TokenEOF {
if !p.recovery {
if end.Type == TokenEOF {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after attribute definition",
Detail: "A newline is required after an attribute definition at the end of a file.",
Subject: &end.Range,
Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after attribute definition",
Detail: "An attribute definition must end with a newline.",
Subject: &end.Range,
Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
})
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after attribute definition",
Detail: "An attribute definition must end with a newline.",
Subject: &end.Range,
Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
})
}
endRange = p.PrevRange()
p.recoverAfterBodyItem()
@ -242,6 +232,12 @@ Token:
}, diags
}
case TokenIdent:
tok = p.Read() // eat token
label, labelRange := string(tok.Bytes), tok.Range
labels = append(labels, label)
labelRanges = append(labelRanges, labelRange)
default:
switch tok.Type {
case TokenEqual:
@ -294,27 +290,17 @@ Token:
cBraceRange := p.PrevRange()
eol := p.Peek()
if eol.Type == TokenNewline {
if eol.Type == TokenNewline || eol.Type == TokenEOF {
p.Read() // eat newline
} else {
if !p.recovery {
if eol.Type == TokenEOF {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after block definition",
Detail: "A newline is required after a block definition at the end of a file.",
Subject: &eol.Range,
Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after block definition",
Detail: "A block definition must end with a newline.",
Subject: &eol.Range,
Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
})
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing newline after block definition",
Detail: "A block definition must end with a newline.",
Subject: &eol.Range,
Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
})
}
p.recoverAfterBodyItem()
}
@ -497,6 +483,53 @@ Traversal:
ret = makeRelativeTraversal(ret, step, rng)
case TokenNumberLit:
// This is a weird form we inherited from HIL, allowing numbers
// to be used as attributes as a weird way of writing [n].
// This was never actually a first-class thing in HIL, but
// HIL tolerated sequences like .0. in its variable names and
// calling applications like Terraform exploited that to
// introduce indexing syntax where none existed.
numTok := p.Read() // eat token
attrTok = numTok
// This syntax is ambiguous if multiple indices are used in
// succession, like foo.0.1.baz: that actually parses as
// a fractional number 0.1. Since we're only supporting this
// syntax for compatibility with legacy Terraform
// configurations, and Terraform does not tend to have lists
// of lists, we'll choose to reject that here with a helpful
// error message, rather than failing later because the index
// isn't a whole number.
if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
first := numTok.Bytes[:dotIdx]
second := numTok.Bytes[dotIdx+1:]
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid legacy index syntax",
Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second),
Subject: &attrTok.Range,
})
rng := hcl.RangeBetween(dot.Range, numTok.Range)
step := hcl.TraverseIndex{
Key: cty.DynamicVal,
SrcRange: rng,
}
ret = makeRelativeTraversal(ret, step, rng)
break
}
numVal, numDiags := p.numberLitValue(numTok)
diags = append(diags, numDiags...)
rng := hcl.RangeBetween(dot.Range, numTok.Range)
step := hcl.TraverseIndex{
Key: numVal,
SrcRange: rng,
}
ret = makeRelativeTraversal(ret, step, rng)
case TokenStar:
// "Attribute-only" splat expression.
// (This is a kinda weird construct inherited from HIL, which
@ -517,6 +550,27 @@ Traversal:
// into a list, for expressions like:
// foo.bar.*.baz.0.foo
numTok := p.Read()
// Weird special case if the user writes something
// like foo.bar.*.baz.0.0.foo, where 0.0 parses
// as a number.
if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
first := numTok.Bytes[:dotIdx]
second := numTok.Bytes[dotIdx+1:]
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid legacy index syntax",
Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second),
Subject: &attrTok.Range,
})
trav = append(trav, hcl.TraverseIndex{
Key: cty.DynamicVal,
SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
})
lastRange = numTok.Range
continue
}
numVal, numDiags := p.numberLitValue(numTok)
diags = append(diags, numDiags...)
trav = append(trav, hcl.TraverseIndex{
@ -623,7 +677,7 @@ Traversal:
close = p.recover(TokenCBrack)
}
}
p.PushIncludeNewlines(true)
p.PopIncludeNewlines()
if lit, isLit := keyExpr.(*LiteralValueExpr); isLit {
litKey, _ := lit.Value(nil)
@ -1057,23 +1111,9 @@ func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
break
}
// As a special case, we allow the key to be a literal identifier.
// This means that a variable reference or function call can't appear
// directly as key expression, and must instead be wrapped in some
// disambiguation punctuation, like (var.a) = "b" or "${var.a}" = "b".
var key Expression
var keyDiags hcl.Diagnostics
if p.Peek().Type == TokenIdent {
nameTok := p.Read()
key = &LiteralValueExpr{
Val: cty.StringVal(string(nameTok.Bytes)),
SrcRange: nameTok.Range,
}
} else {
key, keyDiags = p.ParseExpression()
}
key, keyDiags = p.ParseExpression()
diags = append(diags, keyDiags...)
if p.recovery && keyDiags.HasErrors() {
@ -1084,6 +1124,11 @@ func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
break
}
// We wrap up the key expression in a special wrapper that deals
// with our special case that naked identifiers as object keys
// are interpreted as literal strings.
key = &ObjectConsKeyExpr{Wrapped: key}
next = p.Peek()
if next.Type != TokenEqual && next.Type != TokenColon {
if !p.recovery {

View File

@ -1,15 +1,38 @@
package hclsyntax
import (
"bytes"
"fmt"
"path/filepath"
"runtime"
"strings"
"github.com/hashicorp/hcl2/hcl"
)
// This is set to true at init() time in tests, to enable more useful output
// if a stack discipline error is detected. It should not be enabled in
// normal mode since there is a performance penalty from accessing the
// runtime stack to produce the traces, but could be temporarily set to
// true for debugging if desired.
var tracePeekerNewlinesStack = false
type peeker struct {
Tokens Tokens
NextIndex int
IncludeComments bool
IncludeNewlinesStack []bool
// used only when tracePeekerNewlinesStack is set
newlineStackChanges []peekerNewlineStackChange
}
// for use in debugging the stack usage only
type peekerNewlineStackChange struct {
Pushing bool // if false, then popping
Frame runtime.Frame
Include bool
}
func newPeeker(tokens Tokens, includeComments bool) *peeker {
@ -97,6 +120,18 @@ func (p *peeker) includingNewlines() bool {
}
func (p *peeker) PushIncludeNewlines(include bool) {
if tracePeekerNewlinesStack {
// Record who called us so that we can more easily track down any
// mismanagement of the stack in the parser.
callers := []uintptr{0}
runtime.Callers(2, callers)
frames := runtime.CallersFrames(callers)
frame, _ := frames.Next()
p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
true, frame, include,
})
}
p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include)
}
@ -104,5 +139,74 @@ func (p *peeker) PopIncludeNewlines() bool {
stack := p.IncludeNewlinesStack
remain, ret := stack[:len(stack)-1], stack[len(stack)-1]
p.IncludeNewlinesStack = remain
if tracePeekerNewlinesStack {
// Record who called us so that we can more easily track down any
// mismanagement of the stack in the parser.
callers := []uintptr{0}
runtime.Callers(2, callers)
frames := runtime.CallersFrames(callers)
frame, _ := frames.Next()
p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
false, frame, ret,
})
}
return ret
}
// AssertEmptyNewlinesStack checks if the IncludeNewlinesStack is empty, doing
// panicking if it is not. This can be used to catch stack mismanagement that
// might otherwise just cause confusing downstream errors.
//
// This function is a no-op if the stack is empty when called.
//
// If newlines stack tracing is enabled by setting the global variable
// tracePeekerNewlinesStack at init time, a full log of all of the push/pop
// calls will be produced to help identify which caller in the parser is
// misbehaving.
func (p *peeker) AssertEmptyIncludeNewlinesStack() {
if len(p.IncludeNewlinesStack) != 1 {
// Should never happen; indicates mismanagement of the stack inside
// the parser.
if p.newlineStackChanges != nil { // only if traceNewlinesStack is enabled above
panic(fmt.Errorf(
"non-empty IncludeNewlinesStack after parse with %d calls unaccounted for:\n%s",
len(p.IncludeNewlinesStack)-1,
formatPeekerNewlineStackChanges(p.newlineStackChanges),
))
} else {
panic(fmt.Errorf("non-empty IncludeNewlinesStack after parse: %#v", p.IncludeNewlinesStack))
}
}
}
func formatPeekerNewlineStackChanges(changes []peekerNewlineStackChange) string {
indent := 0
var buf bytes.Buffer
for _, change := range changes {
funcName := change.Frame.Function
if idx := strings.LastIndexByte(funcName, '.'); idx != -1 {
funcName = funcName[idx+1:]
}
filename := change.Frame.File
if idx := strings.LastIndexByte(filename, filepath.Separator); idx != -1 {
filename = filename[idx+1:]
}
switch change.Pushing {
case true:
buf.WriteString(strings.Repeat(" ", indent))
fmt.Fprintf(&buf, "PUSH %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
indent++
case false:
indent--
buf.WriteString(strings.Repeat(" ", indent))
fmt.Fprintf(&buf, "POP %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
}
}
return buf.String()
}

View File

@ -20,6 +20,12 @@ func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Dia
parser := &parser{peeker: peeker}
body, parseDiags := parser.ParseBody(TokenEOF)
diags = append(diags, parseDiags...)
// Panic if the parser uses incorrect stack discipline with the peeker's
// newlines stack, since otherwise it will produce confusing downstream
// errors.
peeker.AssertEmptyIncludeNewlinesStack()
return &hcl.File{
Body: body,
Bytes: src,
@ -54,6 +60,13 @@ func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hc
})
}
parser.PopIncludeNewlines()
// Panic if the parser uses incorrect stack discipline with the peeker's
// newlines stack, since otherwise it will produce confusing downstream
// errors.
peeker.AssertEmptyIncludeNewlinesStack()
return expr, diags
}
@ -65,6 +78,12 @@ func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.
parser := &parser{peeker: peeker}
expr, parseDiags := parser.ParseTemplate()
diags = append(diags, parseDiags...)
// Panic if the parser uses incorrect stack discipline with the peeker's
// newlines stack, since otherwise it will produce confusing downstream
// errors.
peeker.AssertEmptyIncludeNewlinesStack()
return expr, diags
}
@ -85,6 +104,14 @@ func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversa
expr, parseDiags := parser.ParseTraversalAbs()
diags = append(diags, parseDiags...)
parser.PopIncludeNewlines()
// Panic if the parser uses incorrect stack discipline with the peeker's
// newlines stack, since otherwise it will produce confusing downstream
// errors.
peeker.AssertEmptyIncludeNewlinesStack()
return expr, diags
}

File diff suppressed because it is too large Load Diff

View File

@ -36,10 +36,10 @@ func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []To
NumberLitContinue = (digit|'.'|('e'|'E') ('+'|'-')? digit);
NumberLit = digit ("" | (NumberLitContinue - '.') | (NumberLitContinue* (NumberLitContinue - '.')));
Ident = ID_Start (ID_Continue | '-')*;
Ident = (ID_Start | '_') (ID_Continue | '-')*;
# Symbols that just represent themselves are handled as a single rule.
SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`";
SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "%" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`";
EqualOp = "==";
NotEqual = "!=";
@ -112,7 +112,25 @@ func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []To
if topdoc.StartOfLine {
maybeMarker := bytes.TrimSpace(data[ts:te])
if bytes.Equal(maybeMarker, topdoc.Marker) {
// We actually emit two tokens here: the end-of-heredoc
// marker first, and then separately the newline that
// follows it. This then avoids issues with the closing
// marker consuming a newline that would normally be used
// to mark the end of an attribute definition.
// We might have either a \n sequence or an \r\n sequence
// here, so we must handle both.
nls := te-1
nle := te
te--
if data[te-1] == '\r' {
// back up one more byte
nls--
te--
}
token(TokenCHeredoc);
ts = nls
te = nle
token(TokenNewline);
heredocs = heredocs[:len(heredocs)-1]
fret;
}
@ -197,14 +215,14 @@ func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []To
EndStringTmpl = '"';
StringLiteralChars = (AnyUTF8 - ("\r"|"\n"));
TemplateStringLiteral = (
('$' ^'{') |
('%' ^'{') |
('$' ^'{' %{ fhold; }) |
('%' ^'{' %{ fhold; }) |
('\\' StringLiteralChars) |
(StringLiteralChars - ("$" | '%' | '"'))
)+;
HeredocStringLiteral = (
('$' ^'{') |
('%' ^'{') |
('$' ^'{' %{ fhold; }) |
('%' ^'{' %{ fhold; }) |
(StringLiteralChars - ("$" | '%'))
)*;
BareStringLiteral = (
@ -337,7 +355,17 @@ func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []To
// encountered something that the scanner can't match, which we'll
// deal with as an invalid.
if cs < hcltok_first_final {
f.emitToken(TokenInvalid, p, len(data))
if mode == scanTemplate && len(stack) == 0 {
// If we're scanning a bare template then any straggling
// top-level stuff is actually literal string, rather than
// invalid. This handles the case where the template ends
// with a single "$" or "%", which trips us up because we
// want to see another character to decide if it's a sequence
// or an escape.
f.emitToken(TokenStringLit, ts, len(data))
} else {
f.emitToken(TokenInvalid, ts, len(data))
}
}
// We always emit a synthetic EOF token at the end, since it gives the

View File

@ -161,7 +161,7 @@ language-agnostic HCL information model.
ConfigFile = Body;
Body = (Attribute | Block)*;
Attribute = Identifier "=" Expression Newline;
Block = Identifier (StringLit)* "{" Newline Body "}" Newline;
Block = Identifier (StringLit|Identifier)* "{" Newline Body "}" Newline;
```
### Configuration Files
@ -186,8 +186,10 @@ for later evaluation by the calling application.
### Blocks
A _block_ creates a child body that is annotated with a block _type_ and
zero or more optional block _labels_. Blocks create a structural heirachy
which can be interpreted by the calling application.
zero or more block _labels_. Blocks create a structural heirachy which can be
interpreted by the calling application.
Block labels can either be quoted literal strings or naked identifiers.
## Expressions
@ -877,3 +879,45 @@ application, by converting the final template result to string. This is
necessary, for example, if a standalone template is being used to produce
the direct contents of a file, since the result in that case must always be a
string.
## Static Analysis
The HCL static analysis operations are implemented for some expression types
in the native syntax, as described in the following sections.
A goal for static analysis of the native syntax is for the interpretation to
be as consistent as possible with the dynamic evaluation interpretation of
the given expression, though some deviations are intentionally made in order
to maximize the potential for analysis.
### Static List
The tuple construction syntax can be interpreted as a static list. All of
the expression elements given are returned as the static list elements,
with no further interpretation.
### Static Map
The object construction syntax can be interpreted as a static map. All of the
key/value pairs given are returned as the static pairs, with no further
interpretation.
The usual requirement that an attribute name be interpretable as a string
does not apply to this static analyis, allowing callers to provide map-like
constructs with different key types by building on the map syntax.
### Static Call
The function call syntax can be interpreted as a static call. The called
function name is returned verbatim and the given argument expressions are
returned as the static arguments, with no further interpretation.
### Static Traversal
A variable expression and any attached attribute access operations and
constant index operations can be interpreted as a static traversal.
The keywords `true`, `false` and `null` can also be interpreted as
static traversals, behaving as if they were references to variables of those
names, to allow callers to redefine the meaning of those keywords in certain
contexts.

View File

@ -133,7 +133,7 @@ func (f *tokenAccum) emitToken(ty TokenType, startOfs, endOfs int) {
b := f.Bytes[startOfs:endOfs]
for len(b) > 0 {
advance, seq, _ := textseg.ScanGraphemeClusters(b, true)
if len(seq) == 1 && seq[0] == '\n' {
if (len(seq) == 1 && seq[0] == '\n') || (len(seq) == 2 && seq[0] == '\r' && seq[1] == '\n') {
end.Line++
end.Column = 1
} else {

View File

@ -12,7 +12,7 @@ type node interface {
}
type objectVal struct {
Attrs map[string]*objectAttr
Attrs []*objectAttr
SrcRange hcl.Range // range of the entire object, brace-to-brace
OpenRange hcl.Range // range of the opening brace
CloseRange hcl.Range // range of the closing brace

View File

@ -1,11 +1,12 @@
package json
import (
"fmt"
"strings"
)
type navigation struct {
root *objectVal
root node
}
// Implementation of hcled.ContextString
@ -21,21 +22,49 @@ func (n navigation) ContextString(offset int) string {
steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
}
return strings.Join(steps, ".")
ret := strings.Join(steps, "")
if len(ret) > 0 && ret[0] == '.' {
ret = ret[1:]
}
return ret
}
func navigationStepsRev(obj *objectVal, offset int) []string {
// Do any of our properties have an object that contains the target
// offset?
for k, attr := range obj.Attrs {
ov, ok := attr.Value.(*objectVal)
if !ok {
continue
}
func navigationStepsRev(v node, offset int) []string {
switch tv := v.(type) {
case *objectVal:
// Do any of our properties have an object that contains the target
// offset?
for _, attr := range tv.Attrs {
k := attr.Name
av := attr.Value
if ov.SrcRange.ContainsOffset(offset) {
return append(navigationStepsRev(ov, offset), k)
switch av.(type) {
case *objectVal, *arrayVal:
// okay
default:
continue
}
if av.Range().ContainsOffset(offset) {
return append(navigationStepsRev(av, offset), "."+k)
}
}
case *arrayVal:
// Do any of our elements contain the target offset?
for i, elem := range tv.Values {
switch elem.(type) {
case *objectVal, *arrayVal:
// okay
default:
continue
}
if elem.Range().ContainsOffset(offset) {
return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
}
}
}
return nil
}

View File

@ -103,7 +103,7 @@ func parseObject(p *peeker) (node, hcl.Diagnostics) {
var diags hcl.Diagnostics
open := p.Read()
attrs := map[string]*objectAttr{}
attrs := []*objectAttr{}
// recover is used to shift the peeker to what seems to be the end of
// our object, so that when we encounter an error we leave the peeker
@ -191,24 +191,11 @@ Token:
return nil, diags
}
if existing := attrs[key]; existing != nil {
// Generate a diagnostic for the duplicate key, but continue parsing
// anyway since this is a semantic error we can recover from.
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate JSON object property",
Detail: fmt.Sprintf(
"An property named %q was previously introduced at %s",
key, existing.NameRange.String(),
),
Subject: &keyStrNode.SrcRange,
})
}
attrs[key] = &objectAttr{
attrs = append(attrs, &objectAttr{
Name: key,
Value: valNode,
NameRange: keyStrNode.SrcRange,
}
})
switch p.Peek().Type {
case tokenComma:
@ -383,7 +370,7 @@ func parseNumber(p *peeker) (node, hcl.Diagnostics) {
}
}
f, _, err := (&big.Float{}).Parse(string(num), 10)
f, _, err := big.ParseFloat(string(num), 10, 512, big.ToNearestEven)
if err != nil {
// Should never happen if above passed, since JSON numbers are a subset
// of what big.Float can parse...

View File

@ -19,19 +19,22 @@ import (
// the subset of data that was able to be parsed, which may be none.
func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
rootNode, diags := parseFileContent(src, filename)
if _, ok := rootNode.(*objectVal); !ok {
switch rootNode.(type) {
case *objectVal, *arrayVal:
// okay
default:
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Root value must be object",
Detail: "The root value in a JSON-based configuration must be a JSON object.",
Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
Subject: rootNode.StartRange().Ptr(),
})
// Put in a placeholder objectVal just so the caller always gets
// a valid file, even if it appears empty. This is useful for callers
// that are doing static analysis of possibly-erroneous source code,
// which will try to process the returned file even if we return
// diagnostics of severity error. This way, they'll get a file that
// has an empty body rather than a body that panics when probed.
// Since we've already produced an error message for this being
// invalid, we'll return an empty placeholder here so that trying to
// extract content from our root body won't produce a redundant
// error saying the same thing again in more general terms.
fakePos := hcl.Pos{
Byte: 0,
Line: 1,
@ -43,17 +46,18 @@ func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
End: fakePos,
}
rootNode = &objectVal{
Attrs: map[string]*objectAttr{},
Attrs: []*objectAttr{},
SrcRange: fakeRange,
OpenRange: fakeRange,
}
}
file := &hcl.File{
Body: &body{
obj: rootNode.(*objectVal),
val: rootNode,
},
Bytes: src,
Nav: navigation{rootNode.(*objectVal)},
Nav: navigation{rootNode},
}
return file, diags
}

View File

@ -13,19 +13,36 @@ grammar as-is, and merely defines a specific methodology for interpreting
JSON constructs into HCL structural elements and expressions.
This mapping is defined such that valid JSON-serialized HCL input can be
produced using standard JSON implementations in various programming languages.
_produced_ using standard JSON implementations in various programming languages.
_Parsing_ such JSON has some additional constraints not beyond what is normally
supported by JSON parsers, though adaptations are defined to allow processing
with an off-the-shelf JSON parser with certain caveats, described in later
sections.
supported by JSON parsers, so a specialized parser may be required that
is able to:
* Preserve the relative ordering of properties defined in an object.
* Preserve multiple definitions of the same property name.
* Preserve numeric values to the precision required by the number type
in [the HCL syntax-agnostic information model](../spec.md).
* Retain source location information for parsed tokens/constructs in order
to produce good error messages.
## Structural Elements
The HCL language-agnostic information model defines a _body_ as an abstract
container for attribute definitions and child blocks. A body is represented
in JSON as a JSON _object_.
[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
abstract container for attribute definitions and child blocks. A body is
represented in JSON as either a single JSON object or a JSON array of objects.
As defined in the language-agnostic model, body processing is done in terms
Body processing is in terms of JSON object properties, visited in the order
they appear in the input. Where a body is represented by a single JSON object,
the properties of that object are visited in order. Where a body is
represented by a JSON array, each of its elements are visited in order and
each element has its properties visited in order. If any element of the array
is not a JSON object then the input is erroneous.
When a body is being processed in the _dynamic attributes_ mode, the allowance
of a JSON array in the previous paragraph does not apply and instead a single
JSON object is always required.
As defined in the language-agnostic model, body processing is in terms
of a schema which provides context for interpreting the body's content. For
JSON bodies, the schema is crucial to allow differentiation of attribute
definitions and block definitions, both of which are represented via object
@ -61,14 +78,16 @@ the following provides a definition for that attribute:
### Blocks
Where the given schema describes a block with a given type name, the object
property with the matching name — if present — serves as a definition of
zero or more blocks of that type.
Where the given schema describes a block with a given type name, each object
property with the matching name serves as a definition of zero or more blocks
of that type.
Processing of child blocks is in terms of nested JSON objects and arrays.
If the schema defines one or more _labels_ for the block type, a nested
object is required for each labelling level, with the object keys serving as
the label values at that level.
If the schema defines one or more _labels_ for the block type, a nested JSON
object or JSON array of objects is required for each labelling level. These
are flattened to a single ordered sequence of object properties using the
same algorithm as for body content as defined above. Each object property
serves as a label value at the corresponding level.
After any labelling levels, the next nested value is either a JSON object
representing a single block body, or a JSON array of JSON objects that each
@ -111,7 +130,8 @@ of zero blocks, though generators should prefer to omit the property entirely
in this scenario.
Given a schema that calls for a block type named "foo" with _two_ labels, the
extra label levels must be represented as objects as in the following examples:
extra label levels must be represented as objects or arrays of objects as in
the following examples:
```json
{
@ -132,6 +152,7 @@ extra label levels must be represented as objects as in the following examples:
}
}
```
```json
{
"foo": {
@ -157,10 +178,70 @@ extra label levels must be represented as objects as in the following examples:
}
```
Where multiple definitions are included for the same type and labels, the
JSON array is always the value of the property representing the final label,
and contains objects representing block bodies. It is not valid to use an array
at any other point in the block definition structure.
```json
{
"foo": [
{
"bar": {
"baz": {
"child_attr": "baz"
},
"boz": {
"child_attr": "baz"
}
},
},
{
"bar": {
"baz": [
{
"child_attr": "baz"
},
{
"child_attr": "boz"
}
]
}
}
]
}
```
```json
{
"foo": {
"bar": {
"baz": {
"child_attr": "baz"
},
"boz": {
"child_attr": "baz"
}
},
"bar": {
"baz": [
{
"child_attr": "baz"
},
{
"child_attr": "boz"
}
]
}
}
}
```
Arrays can be introduced at either the label definition or block body
definition levels to define multiple definitions of the same block type
or labels while preserving order.
A JSON HCL parser _must_ support duplicate definitions of the same property
name within a single object, preserving all of them and the relative ordering
between them. The array-based forms are also required so that JSON HCL
configurations can be produced with JSON producing libraries that are not
able to preserve property definition order and multiple definitions of
the same property.
## Expressions
@ -174,17 +255,24 @@ When interpreted as an expression, a JSON object represents a value of a HCL
object type.
Each property of the JSON object represents an attribute of the HCL object type.
The object type is constructed by enumerating the JSON object properties,
creating for each an attribute whose name exactly matches the property name,
and whose type is the result of recursively applying the expression mapping
rules.
The property name string given in the JSON input is interpreted as a string
expression as described below, and its result is converted to string as defined
by the syntax-agnostic information model. If such a conversion is not possible,
an error is produced and evaluation fails.
An instance of the constructed object type is then created, whose values
are interpreted by again recursively applying the mapping rules defined in
this section.
this section to each of the property values.
If any evaluated property name strings produce null values, an error is
produced and evaluation fails. If any produce _unknown_ values, the _entire
object's_ result is an unknown value of the dynamic pseudo-type, signalling
that the type of the object cannot be determined.
It is an error to define the same property name multiple times within a single
JSON object interpreted as an expression.
JSON object interpreted as an expression. In full expression mode, this
constraint applies to the name expression results after conversion to string,
rather than the raw string that may contain interpolation expressions.
### Arrays
@ -205,18 +293,25 @@ section.
When interpreted as an expression, a JSON number represents a HCL number value.
HCL numbers are arbitrary-precision decimal values, so an ideal implementation
of this specification will translate exactly the value given to a number of
corresponding precision.
HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
be able to translate exactly the value given to a number of corresponding
precision, within the constraints set by the HCL syntax-agnostic information
model.
In practice, off-the-shelf JSON parsers often do not support customizing the
In practice, off-the-shelf JSON serializers often do not support customizing the
processing of numbers, and instead force processing as 32-bit or 64-bit
floating point values with a potential loss of precision. It is permissable
for a HCL JSON parser to pass on such limitations _if and only if_ the
available precision and other constraints are defined in its documentation.
Calling applications each have differing precision requirements, so calling
applications are free to select an implementation with more limited precision
capabilities should high precision not be required for that application.
floating point values.
A _producer_ of JSON HCL that uses such a serializer can provide numeric values
as JSON strings where they have precision too great for representation in the
serializer's chosen numeric type in situations where the result will be
converted to number (using the standard conversion rules) by a calling
application.
Alternatively, for expressions that are evaluated in full expression mode an
embedded template interpolation can be used to faithfully represent a number,
such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
syntax expression evaluator.
### Boolean Values
@ -263,3 +358,48 @@ the result must be a number, rather than a string representation of a number:
```json
"${ a + b }"
```
## Static Analysis
The HCL static analysis operations are implemented for JSON values that
represent expressions, as described in the following sections.
Due to the limited expressive power of the JSON syntax alone, use of these
static analyses functions rather than normal expression evaluation is used
as additional context for how a JSON value is to be interpreted, which means
that static analyses can result in a different interpretation of a given
expression than normal evaluation.
### Static List
An expression interpreted as a static list must be a JSON array. Each of the
values in the array is interpreted as an expression and returned.
### Static Map
An expression interpreted as a static map must be a JSON object. Each of the
key/value pairs in the object is presented as a pair of expressions. Since
object property names are always strings, evaluating the key expression with
a non-`nil` evaluation context will evaluate any template sequences given
in the property name.
### Static Call
An expression interpreted as a static call must be a string. The content of
the string is interpreted as a native syntax expression (not a _template_,
unlike normal evaluation) and then the static call analysis is delegated to
that expression.
If the original expression is not a string or its contents cannot be parsed
as a native syntax expression then static call analysis is not supported.
### Static Traversal
An expression interpreted as a static traversal must be a string. The content
of the string is interpreted as a native syntax expression (not a _template_,
unlike normal evaluation) and then static traversal analysis is delegated
to that expression.
If the original expression is not a string or its contents cannot be parsed
as a native syntax expression then static call analysis is not supported.

View File

@ -6,12 +6,13 @@ import (
"github.com/hashicorp/hcl2/hcl"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
)
// body is the implementation of "Body" used for files processed with the JSON
// parser.
type body struct {
obj *objectVal
val node
// If non-nil, the keys of this map cause the corresponding attributes to
// be treated as non-existing. This is used when Body.PartialContent is
@ -43,7 +44,11 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
nameSuggestions = append(nameSuggestions, blockS.Type)
}
for k, attr := range b.obj.Attrs {
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
diags = append(diags, attrDiags...)
for _, attr := range jsonAttrs {
k := attr.Name
if k == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
@ -51,16 +56,15 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
}
if _, ok := hiddenAttrs[k]; !ok {
var fixItHint string
suggestion := nameSuggestion(k, nameSuggestions)
if suggestion != "" {
fixItHint = fmt.Sprintf(" Did you mean %q?", suggestion)
suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Extraneous JSON object property",
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, fixItHint),
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion),
Subject: &attr.NameRange,
Context: attr.Range().Ptr(),
})
@ -71,16 +75,17 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
}
func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
var diags hcl.Diagnostics
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
diags = append(diags, attrDiags...)
obj := b.obj
jsonAttrs := obj.Attrs
usedNames := map[string]struct{}{}
if b.hiddenAttrs != nil {
for k := range b.hiddenAttrs {
usedNames[k] = struct{}{}
}
}
var diags hcl.Diagnostics
content := &hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
@ -89,43 +94,70 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
MissingItemRange: b.MissingItemRange(),
}
// Create some more convenient data structures for our work below.
attrSchemas := map[string]hcl.AttributeSchema{}
blockSchemas := map[string]hcl.BlockHeaderSchema{}
for _, attrS := range schema.Attributes {
jsonAttr, exists := jsonAttrs[attrS.Name]
_, used := usedNames[attrS.Name]
if used || !exists {
if attrS.Required {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required attribute",
Detail: fmt.Sprintf("The attribute %q is required, so a JSON object property must be present with this name.", attrS.Name),
Subject: &obj.OpenRange,
})
}
continue
}
content.Attributes[attrS.Name] = &hcl.Attribute{
Name: attrS.Name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
usedNames[attrS.Name] = struct{}{}
attrSchemas[attrS.Name] = attrS
}
for _, blockS := range schema.Blocks {
blockSchemas[blockS.Type] = blockS
}
for _, blockS := range schema.Blocks {
jsonAttr, exists := jsonAttrs[blockS.Type]
_, used := usedNames[blockS.Type]
if used || !exists {
usedNames[blockS.Type] = struct{}{}
for _, jsonAttr := range jsonAttrs {
attrName := jsonAttr.Name
if _, used := b.hiddenAttrs[attrName]; used {
continue
}
v := jsonAttr.Value
diags = append(diags, b.unpackBlock(v, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)...)
usedNames[blockS.Type] = struct{}{}
if attrS, defined := attrSchemas[attrName]; defined {
if existing, exists := content.Attributes[attrName]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate attribute definition",
Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range),
Subject: &jsonAttr.NameRange,
Context: jsonAttr.Range().Ptr(),
})
continue
}
content.Attributes[attrS.Name] = &hcl.Attribute{
Name: attrS.Name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
usedNames[attrName] = struct{}{}
} else if blockS, defined := blockSchemas[attrName]; defined {
bv := jsonAttr.Value
blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
diags = append(diags, blockDiags...)
usedNames[attrName] = struct{}{}
}
// We ignore anything that isn't defined because that's the
// PartialContent contract. The Content method will catch leftovers.
}
// Make sure we got all the required attributes.
for _, attrS := range schema.Attributes {
if !attrS.Required {
continue
}
if _, defined := content.Attributes[attrS.Name]; !defined {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required attribute",
Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
Subject: b.MissingItemRange().Ptr(),
})
}
}
unusedBody := &body{
obj: b.obj,
val: b.val,
hiddenAttrs: usedNames,
}
@ -135,8 +167,22 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
// JustAttributes for JSON bodies interprets all properties of the wrapped
// JSON object as attributes and returns them.
func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
var diags hcl.Diagnostics
attrs := make(map[string]*hcl.Attribute)
for name, jsonAttr := range b.obj.Attrs {
obj, ok := b.val.(*objectVal)
if !ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "A JSON object is required here, defining the attributes for this block.",
Subject: b.val.StartRange().Ptr(),
})
return attrs, diags
}
for _, jsonAttr := range obj.Attrs {
name := jsonAttr.Name
if name == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
@ -146,6 +192,17 @@ func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
if _, hidden := b.hiddenAttrs[name]; hidden {
continue
}
if existing, exists := attrs[name]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate attribute definition",
Detail: fmt.Sprintf("The attribute %q was already defined at %s.", name, existing.Range),
Subject: &jsonAttr.NameRange,
})
continue
}
attrs[name] = &hcl.Attribute{
Name: name,
Expr: &expression{src: jsonAttr.Value},
@ -156,27 +213,29 @@ func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
// No diagnostics possible here, since the parser already took care of
// finding duplicates and every JSON value can be a valid attribute value.
return attrs, nil
return attrs, diags
}
func (b *body) MissingItemRange() hcl.Range {
return b.obj.CloseRange
switch tv := b.val.(type) {
case *objectVal:
return tv.CloseRange
case *arrayVal:
return tv.OpenRange
default:
// Should not happen in correct operation, but might show up if the
// input is invalid and we are producing partial results.
return tv.StartRange()
}
}
func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
if len(labelsLeft) > 0 {
labelName := labelsLeft[0]
ov, ok := v.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, whose keys represent the %s block's %s.", typeName, labelName),
Subject: v.StartRange().Ptr(),
})
return
}
if len(ov.Attrs) == 0 {
jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
diags = append(diags, attrDiags...)
if len(jsonAttrs) == 0 {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing block label",
@ -187,7 +246,8 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
}
labelsUsed := append(labelsUsed, "")
labelRanges := append(labelRanges, hcl.Range{})
for pk, p := range ov.Attrs {
for _, p := range jsonAttrs {
pk := p.Name
labelsUsed[len(labelsUsed)-1] = pk
labelRanges[len(labelRanges)-1] = p.NameRange
diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
@ -212,7 +272,7 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
Type: typeName,
Labels: labels,
Body: &body{
obj: tv,
val: tv,
},
DefRange: tv.OpenRange,
@ -222,22 +282,11 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
case *arrayVal:
// Multiple instances of the block
for _, av := range tv.Values {
ov, ok := av.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, representing the contents of a %q block.", typeName),
Subject: v.StartRange().Ptr(),
})
continue
}
*blocks = append(*blocks, &hcl.Block{
Type: typeName,
Labels: labels,
Body: &body{
obj: ov,
val: av, // might be mistyped; we'll find out when content is requested for this body
},
DefRange: tv.OpenRange,
@ -256,6 +305,74 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
return
}
// collectDeepAttrs takes either a single object or an array of objects and
// flattens it into a list of object attributes, collecting attributes from
// all of the objects in a given array.
//
// Ordering is preserved, so a list of objects that each have one property
// will result in those properties being returned in the same order as the
// objects appeared in the array.
//
// This is appropriate for use only for objects representing bodies or labels
// within a block.
//
// The labelName argument, if non-null, is used to tailor returned error
// messages to refer to block labels rather than attributes and child blocks.
// It has no other effect.
func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
var diags hcl.Diagnostics
var attrs []*objectAttr
switch tv := v.(type) {
case *objectVal:
attrs = append(attrs, tv.Attrs...)
case *arrayVal:
for _, ev := range tv.Values {
switch tev := ev.(type) {
case *objectVal:
attrs = append(attrs, tev.Attrs...)
default:
if labelName != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
Subject: ev.StartRange().Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "A JSON object is required here, to define attributes and child blocks.",
Subject: ev.StartRange().Ptr(),
})
}
}
}
default:
if labelName != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
Subject: v.StartRange().Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.",
Subject: v.StartRange().Ptr(),
})
}
}
return attrs, diags
}
func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
switch v := e.src.(type) {
case *stringVal:
@ -301,12 +418,75 @@ func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
}
return cty.TupleVal(vals), nil
case *objectVal:
var diags hcl.Diagnostics
attrs := map[string]cty.Value{}
for name, jsonAttr := range v.Attrs {
val, _ := (&expression{src: jsonAttr.Value}).Value(ctx)
attrs[name] = val
attrRanges := map[string]hcl.Range{}
known := true
for _, jsonAttr := range v.Attrs {
// In this one context we allow keys to contain interpolation
// experessions too, assuming we're evaluating in interpolation
// mode. This achieves parity with the native syntax where
// object expressions can have dynamic keys, while block contents
// may not.
name, nameDiags := (&expression{src: &stringVal{
Value: jsonAttr.Name,
SrcRange: jsonAttr.NameRange,
}}).Value(ctx)
val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx)
diags = append(diags, nameDiags...)
diags = append(diags, valDiags...)
var err error
name, err = convert.Convert(name, cty.String)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid object key expression",
Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
Subject: &jsonAttr.NameRange,
})
continue
}
if name.IsNull() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid object key expression",
Detail: "Cannot use null value as an object key.",
Subject: &jsonAttr.NameRange,
})
continue
}
if !name.IsKnown() {
// This is a bit of a weird case, since our usual rules require
// us to tolerate unknowns and just represent the result as
// best we can but if we don't know the key then we can't
// know the type of our object at all, and thus we must turn
// the whole thing into cty.DynamicVal. This is consistent with
// how this situation is handled in the native syntax.
// We'll keep iterating so we can collect other errors in
// subsequent attributes.
known = false
continue
}
nameStr := name.AsString()
if _, defined := attrs[nameStr]; defined {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate object attribute",
Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
Subject: &jsonAttr.NameRange,
})
continue
}
attrs[nameStr] = val
attrRanges[nameStr] = jsonAttr.NameRange
}
return cty.ObjectVal(attrs), nil
if !known {
// We encountered an unknown key somewhere along the way, so
// we can't know what our type will eventually be.
return cty.DynamicVal, diags
}
return cty.ObjectVal(attrs), diags
default:
// Default to DynamicVal so that ASTs containing invalid nodes can
// still be partially-evaluated.
@ -378,6 +558,29 @@ func (e *expression) AsTraversal() hcl.Traversal {
}
}
// Implementation for hcl.ExprCall.
func (e *expression) ExprCall() *hcl.StaticCall {
// In JSON-based syntax a static call is given as a string containing
// an expression in the native syntax that also supports ExprCall.
switch v := e.src.(type) {
case *stringVal:
expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
if diags.HasErrors() {
return nil
}
call, diags := hcl.ExprCall(expr)
if diags.HasErrors() {
return nil
}
return call
default:
return nil
}
}
// Implementation for hcl.ExprList.
func (e *expression) ExprList() []hcl.Expression {
switch v := e.src.(type) {
@ -391,3 +594,23 @@ func (e *expression) ExprList() []hcl.Expression {
return nil
}
}
// Implementation for hcl.ExprMap.
func (e *expression) ExprMap() []hcl.KeyValuePair {
switch v := e.src.(type) {
case *objectVal:
ret := make([]hcl.KeyValuePair, len(v.Attrs))
for i, jsonAttr := range v.Attrs {
ret[i] = hcl.KeyValuePair{
Key: &expression{src: &stringVal{
Value: jsonAttr.Name,
SrcRange: jsonAttr.NameRange,
}},
Value: &expression{src: jsonAttr.Value},
}
}
return ret
default:
return nil
}
}

View File

@ -2,7 +2,7 @@
package json
import "fmt"
import "strconv"
const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF"
@ -25,5 +25,5 @@ func (i tokenType) String() string {
if str, ok := _tokenType_map[i]; ok {
return str
}
return fmt.Sprintf("tokenType(%d)", i)
return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")"
}

View File

@ -616,6 +616,48 @@ Two tuple types of the same length unify constructing a new type of the
same length whose elements are the unification of the corresponding elements
in the two input types.
## Static Analysis
In most applications, full expression evaluation is sufficient for understanding
the provided configuration. However, some specialized applications require more
direct access to the physical structures in the expressions, which can for
example allow the construction of new language constructs in terms of the
existing syntax elements.
Since static analysis analyses the physical structure of configuration, the
details will vary depending on syntax. Each syntax must decide which of its
physical structures corresponds to the following analyses, producing error
diagnostics if they are applied to inappropriate expressions.
The following are the required static analysis functions:
* **Static List**: Require list/tuple construction syntax to be used and
return a list of expressions for each of the elements given.
* **Static Map**: Require map/object construction syntax to be used and
return a list of key/value pairs -- both expressions -- for each of
the elements given. The usual constraint that a map key must be a string
must not apply to this analysis, thus allowing applications to interpret
arbitrary keys as they see fit.
* **Static Call**: Require function call syntax to be used and return an
object describing the called function name and a list of expressions
representing each of the call arguments.
* **Static Traversal**: Require a reference to a symbol in the variable
scope and return a description of the path from the root scope to the
accessed attribute or index.
The intent of a calling application using these features is to require a more
rigid interpretation of the configuration than in expression evaluation.
Syntax implementations should make use of the extra contextual information
provided in order to make an intuitive mapping onto the constructs of the
underlying syntax, possibly interpreting the expression slightly differently
than it would be interpreted in normal evaluation.
Each syntax must define which of its expression elements each of the analyses
above applies to, and how those analyses behave given those expression elements.
## Implementation Considerations
Implementations of this specification are free to adopt any strategy that
@ -639,6 +681,9 @@ are implemented separately for each syntax:
* Providing an evaluation function for all possible expressions that produces
a value given an evaluation context.
* Providing the static analysis functionality described above in a manner that
makes sense within the convention of the syntax.
The suggested implementation strategy is to use an implementation language's
closest concept to an _abstract type_, _virtual type_ or _interface type_
to represent both Body and Expression. Each language-specific implementation

21
vendor/github.com/zclconf/go-cty/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-2018 Martin Atkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -72,6 +72,27 @@ func getConversionKnown(in cty.Type, out cty.Type, unsafe bool) conversion {
}
return conversionCollectionToList(outEty, convEty)
case out.IsSetType() && (in.IsListType() || in.IsSetType()):
if in.IsListType() && !unsafe {
// Conversion from list to map is unsafe because it will lose
// information: the ordering will not be preserved, and any
// duplicate elements will be conflated.
return nil
}
inEty := in.ElementType()
outEty := out.ElementType()
convEty := getConversion(inEty, outEty, unsafe)
if inEty.Equals(outEty) {
// This indicates that we're converting from set to list with
// the same element type, so we don't need an element converter.
return conversionCollectionToSet(outEty, nil)
}
if convEty == nil {
return nil
}
return conversionCollectionToSet(outEty, convEty)
case out.IsListType() && in.IsTupleType():
outEty := out.ElementType()
return conversionTupleToList(in, outEty, unsafe)

View File

@ -44,6 +44,46 @@ func conversionCollectionToList(ety cty.Type, conv conversion) conversion {
}
}
// conversionCollectionToSet returns a conversion that will apply the given
// conversion to all of the elements of a collection (something that supports
// ForEachElement and LengthInt) and then returns the result as a set.
//
// "conv" can be nil if the elements are expected to already be of the
// correct type and just need to be re-wrapped into a set. (For example,
// if we're converting from a list into a set of the same element type.)
func conversionCollectionToSet(ety cty.Type, conv conversion) conversion {
return func(val cty.Value, path cty.Path) (cty.Value, error) {
elems := make([]cty.Value, 0, val.LengthInt())
i := int64(0)
path = append(path, nil)
it := val.ElementIterator()
for it.Next() {
_, val := it.Element()
var err error
path[len(path)-1] = cty.IndexStep{
Key: cty.NumberIntVal(i),
}
if conv != nil {
val, err = conv(val, path)
if err != nil {
return cty.NilVal, err
}
}
elems = append(elems, val)
i++
}
if len(elems) == 0 {
return cty.SetValEmpty(ety), nil
}
return cty.SetVal(elems), nil
}
}
// conversionTupleToList returns a conversion that will take a value of the
// given tuple type and return a list of the given element type.
//

View File

@ -30,7 +30,7 @@ var primitiveConversionsSafe = map[cty.Type]map[cty.Type]conversion{
var primitiveConversionsUnsafe = map[cty.Type]map[cty.Type]conversion{
cty.String: {
cty.Number: func(val cty.Value, path cty.Path) (cty.Value, error) {
f, _, err := (&big.Float{}).Parse(val.AsString(), 10)
f, _, err := big.ParseFloat(val.AsString(), 10, 512, big.ToNearestEven)
if err != nil {
return cty.NilVal, path.NewErrorf("a number is required")
}

View File

@ -1,6 +1,6 @@
// Package function builds on the functionality of cty by modeling functions
// that operate on cty Values.
//
// Functions are, at their call, Go anonymous functions. However, this package
// Functions are, at their core, Go anonymous functions. However, this package
// wraps around them utility functions for parameter type checking, etc.
package function

View File

@ -0,0 +1,93 @@
package stdlib
import (
"encoding/csv"
"fmt"
"io"
"strings"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
)
var CSVDecodeFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "str",
Type: cty.String,
},
},
Type: func(args []cty.Value) (cty.Type, error) {
str := args[0]
if !str.IsKnown() {
return cty.DynamicPseudoType, nil
}
r := strings.NewReader(str.AsString())
cr := csv.NewReader(r)
headers, err := cr.Read()
if err == io.EOF {
return cty.DynamicPseudoType, fmt.Errorf("missing header line")
}
if err != nil {
return cty.DynamicPseudoType, err
}
atys := make(map[string]cty.Type, len(headers))
for _, name := range headers {
if _, exists := atys[name]; exists {
return cty.DynamicPseudoType, fmt.Errorf("duplicate column name %q", name)
}
atys[name] = cty.String
}
return cty.List(cty.Object(atys)), nil
},
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
ety := retType.ElementType()
atys := ety.AttributeTypes()
str := args[0]
r := strings.NewReader(str.AsString())
cr := csv.NewReader(r)
cr.FieldsPerRecord = len(atys)
// Read the header row first, since that'll tell us which indices
// map to which attribute names.
headers, err := cr.Read()
if err != nil {
return cty.DynamicVal, err
}
var rows []cty.Value
for {
cols, err := cr.Read()
if err == io.EOF {
break
}
if err != nil {
return cty.DynamicVal, err
}
vals := make(map[string]cty.Value, len(cols))
for i, str := range cols {
name := headers[i]
vals[name] = cty.StringVal(str)
}
rows = append(rows, cty.ObjectVal(vals))
}
if len(rows) == 0 {
return cty.ListValEmpty(ety), nil
}
return cty.ListVal(rows), nil
},
})
// CSVDecode parses the given CSV (RFC 4180) string and, if it is valid,
// returns a list of objects representing the rows.
//
// The result is always a list of some object type. The first row of the
// input is used to determine the object attributes, and subsequent rows
// determine the values of those attributes.
func CSVDecode(str cty.Value) (cty.Value, error) {
return CSVDecodeFunc.Call([]cty.Value{str})
}

View File

@ -0,0 +1,496 @@
package stdlib
import (
"bytes"
"fmt"
"math/big"
"strings"
"github.com/apparentlymart/go-textseg/textseg"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
"github.com/zclconf/go-cty/cty/function"
"github.com/zclconf/go-cty/cty/json"
)
//go:generate ragel -Z format_fsm.rl
//go:generate gofmt -w format_fsm.go
var FormatFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "format",
Type: cty.String,
},
},
VarParam: &function.Parameter{
Name: "args",
Type: cty.DynamicPseudoType,
AllowNull: true,
},
Type: function.StaticReturnType(cty.String),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
for _, arg := range args[1:] {
if !arg.IsWhollyKnown() {
// We require all nested values to be known because the only
// thing we can do for a collection/structural type is print
// it as JSON and that requires it to be wholly known.
return cty.UnknownVal(cty.String), nil
}
}
str, err := formatFSM(args[0].AsString(), args[1:])
return cty.StringVal(str), err
},
})
var FormatListFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "format",
Type: cty.String,
},
},
VarParam: &function.Parameter{
Name: "args",
Type: cty.DynamicPseudoType,
AllowNull: true,
AllowUnknown: true,
},
Type: function.StaticReturnType(cty.List(cty.String)),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
fmtVal := args[0]
args = args[1:]
if len(args) == 0 {
// With no arguments, this function is equivalent to Format, but
// returning a single-element list result.
result, err := Format(fmtVal, args...)
return cty.ListVal([]cty.Value{result}), err
}
fmtStr := fmtVal.AsString()
// Each of our arguments will be dealt with either as an iterator
// or as a single value. Iterators are used for sequence-type values
// (lists, sets, tuples) while everything else is treated as a
// single value. The sequences we iterate over are required to be
// all the same length.
iterLen := -1
lenChooser := -1
iterators := make([]cty.ElementIterator, len(args))
singleVals := make([]cty.Value, len(args))
for i, arg := range args {
argTy := arg.Type()
switch {
case (argTy.IsListType() || argTy.IsSetType() || argTy.IsTupleType()) && !arg.IsNull():
thisLen := arg.LengthInt()
if iterLen == -1 {
iterLen = thisLen
lenChooser = i
} else {
if thisLen != iterLen {
return cty.NullVal(cty.List(cty.String)), function.NewArgErrorf(
i+1,
"argument %d has length %d, which is inconsistent with argument %d of length %d",
i+1, thisLen,
lenChooser+1, iterLen,
)
}
}
iterators[i] = arg.ElementIterator()
default:
singleVals[i] = arg
}
}
if iterLen == 0 {
// If our sequences are all empty then our result must be empty.
return cty.ListValEmpty(cty.String), nil
}
if iterLen == -1 {
// If we didn't encounter any iterables at all then we're going
// to just do one iteration with items from singleVals.
iterLen = 1
}
ret := make([]cty.Value, 0, iterLen)
fmtArgs := make([]cty.Value, len(iterators))
Results:
for iterIdx := 0; iterIdx < iterLen; iterIdx++ {
// Construct our arguments for a single format call
for i := range fmtArgs {
switch {
case iterators[i] != nil:
iterator := iterators[i]
iterator.Next()
_, val := iterator.Element()
fmtArgs[i] = val
default:
fmtArgs[i] = singleVals[i]
}
// If any of the arguments to this call would be unknown then
// this particular result is unknown, but we'll keep going
// to see if any other iterations can produce known values.
if !fmtArgs[i].IsWhollyKnown() {
// We require all nested values to be known because the only
// thing we can do for a collection/structural type is print
// it as JSON and that requires it to be wholly known.
ret = append(ret, cty.UnknownVal(cty.String))
continue Results
}
}
str, err := formatFSM(fmtStr, fmtArgs)
if err != nil {
return cty.NullVal(cty.List(cty.String)), fmt.Errorf(
"error on format iteration %d: %s", iterIdx, err,
)
}
ret = append(ret, cty.StringVal(str))
}
return cty.ListVal(ret), nil
},
})
// Format produces a string representation of zero or more values using a
// format string similar to the "printf" function in C.
//
// It supports the following "verbs":
//
// %% Literal percent sign, consuming no value
// %v A default formatting of the value based on type, as described below.
// %#v JSON serialization of the value
// %t Converts to boolean and then produces "true" or "false"
// %b Converts to number, requires integer, produces binary representation
// %d Converts to number, requires integer, produces decimal representation
// %o Converts to number, requires integer, produces octal representation
// %x Converts to number, requires integer, produces hexadecimal representation
// with lowercase letters
// %X Like %x but with uppercase letters
// %e Converts to number, produces scientific notation like -1.234456e+78
// %E Like %e but with an uppercase "E" representing the exponent
// %f Converts to number, produces decimal representation with fractional
// part but no exponent, like 123.456
// %g %e for large exponents or %f otherwise
// %G %E for large exponents or %f otherwise
// %s Converts to string and produces the string's characters
// %q Converts to string and produces JSON-quoted string representation,
// like %v.
//
// The default format selections made by %v are:
//
// string %s
// number %g
// bool %t
// other %#v
//
// Null values produce the literal keyword "null" for %v and %#v, and produce
// an error otherwise.
//
// Width is specified by an optional decimal number immediately preceding the
// verb letter. If absent, the width is whatever is necessary to represent the
// value. Precision is specified after the (optional) width by a period
// followed by a decimal number. If no period is present, a default precision
// is used. A period with no following number is invalid.
// For examples:
//
// %f default width, default precision
// %9f width 9, default precision
// %.2f default width, precision 2
// %9.2f width 9, precision 2
//
// Width and precision are measured in unicode characters (grapheme clusters).
//
// For most values, width is the minimum number of characters to output,
// padding the formatted form with spaces if necessary.
//
// For strings, precision limits the length of the input to be formatted (not
// the size of the output), truncating if necessary.
//
// For numbers, width sets the minimum width of the field and precision sets
// the number of places after the decimal, if appropriate, except that for
// %g/%G precision sets the total number of significant digits.
//
// The following additional symbols can be used immediately after the percent
// introducer as flags:
//
// (a space) leave a space where the sign would be if number is positive
// + Include a sign for a number even if it is positive (numeric only)
// - Pad with spaces on the left rather than the right
// 0 Pad with zeros rather than spaces.
//
// Flag characters are ignored for verbs that do not support them.
//
// By default, % sequences consume successive arguments starting with the first.
// Introducing a [n] sequence immediately before the verb letter, where n is a
// decimal integer, explicitly chooses a particular value argument by its
// one-based index. Subsequent calls without an explicit index will then
// proceed with n+1, n+2, etc.
//
// An error is produced if the format string calls for an impossible conversion
// or accesses more values than are given. An error is produced also for
// an unsupported format verb.
func Format(format cty.Value, vals ...cty.Value) (cty.Value, error) {
args := make([]cty.Value, 0, len(vals)+1)
args = append(args, format)
args = append(args, vals...)
return FormatFunc.Call(args)
}
// FormatList applies the same formatting behavior as Format, but accepts
// a mixture of list and non-list values as arguments. Any list arguments
// passed must have the same length, which dictates the length of the
// resulting list.
//
// Any non-list arguments are used repeatedly for each iteration over the
// list arguments. The list arguments are iterated in order by key, so
// corresponding items are formatted together.
func FormatList(format cty.Value, vals ...cty.Value) (cty.Value, error) {
args := make([]cty.Value, 0, len(vals)+1)
args = append(args, format)
args = append(args, vals...)
return FormatListFunc.Call(args)
}
type formatVerb struct {
Raw string
Offset int
ArgNum int
Mode rune
Zero bool
Sharp bool
Plus bool
Minus bool
Space bool
HasPrec bool
Prec int
HasWidth bool
Width int
}
// formatAppend is called by formatFSM (generated by format_fsm.rl) for each
// formatting sequence that is encountered.
func formatAppend(verb *formatVerb, buf *bytes.Buffer, args []cty.Value) error {
argIdx := verb.ArgNum - 1
if argIdx >= len(args) {
return fmt.Errorf(
"not enough arguments for %q at %d: need index %d but have %d total",
verb.Raw, verb.Offset,
verb.ArgNum, len(args),
)
}
arg := args[argIdx]
if verb.Mode != 'v' && arg.IsNull() {
return fmt.Errorf("unsupported value for %q at %d: null value cannot be formatted", verb.Raw, verb.Offset)
}
// Normalize to make some things easier for downstream formatters
if !verb.HasWidth {
verb.Width = -1
}
if !verb.HasPrec {
verb.Prec = -1
}
// For our first pass we'll ensure the verb is supported and then fan
// out to other functions based on what conversion is needed.
switch verb.Mode {
case 'v':
return formatAppendAsIs(verb, buf, arg)
case 't':
return formatAppendBool(verb, buf, arg)
case 'b', 'd', 'o', 'x', 'X', 'e', 'E', 'f', 'g', 'G':
return formatAppendNumber(verb, buf, arg)
case 's', 'q':
return formatAppendString(verb, buf, arg)
default:
return fmt.Errorf("unsupported format verb %q in %q at offset %d", verb.Mode, verb.Raw, verb.Offset)
}
}
func formatAppendAsIs(verb *formatVerb, buf *bytes.Buffer, arg cty.Value) error {
if !verb.Sharp && !arg.IsNull() {
// Unless the caller overrode it with the sharp flag, we'll try some
// specialized formats before we fall back on JSON.
switch arg.Type() {
case cty.String:
fmted := arg.AsString()
fmted = formatPadWidth(verb, fmted)
buf.WriteString(fmted)
return nil
case cty.Number:
bf := arg.AsBigFloat()
fmted := bf.Text('g', -1)
fmted = formatPadWidth(verb, fmted)
buf.WriteString(fmted)
return nil
}
}
jb, err := json.Marshal(arg, arg.Type())
if err != nil {
return fmt.Errorf("unsupported value for %q at %d: %s", verb.Raw, verb.Offset, err)
}
fmted := formatPadWidth(verb, string(jb))
buf.WriteString(fmted)
return nil
}
func formatAppendBool(verb *formatVerb, buf *bytes.Buffer, arg cty.Value) error {
var err error
arg, err = convert.Convert(arg, cty.Bool)
if err != nil {
return fmt.Errorf("unsupported value for %q at %d: %s", verb.Raw, verb.Offset, err)
}
if arg.True() {
buf.WriteString("true")
} else {
buf.WriteString("false")
}
return nil
}
func formatAppendNumber(verb *formatVerb, buf *bytes.Buffer, arg cty.Value) error {
var err error
arg, err = convert.Convert(arg, cty.Number)
if err != nil {
return fmt.Errorf("unsupported value for %q at %d: %s", verb.Raw, verb.Offset, err)
}
switch verb.Mode {
case 'b', 'd', 'o', 'x', 'X':
return formatAppendInteger(verb, buf, arg)
default:
bf := arg.AsBigFloat()
// For floats our format syntax is a subset of Go's, so it's
// safe for us to just lean on the existing Go implementation.
fmtstr := formatStripIndexSegment(verb.Raw)
fmted := fmt.Sprintf(fmtstr, bf)
buf.WriteString(fmted)
return nil
}
}
func formatAppendInteger(verb *formatVerb, buf *bytes.Buffer, arg cty.Value) error {
bf := arg.AsBigFloat()
bi, acc := bf.Int(nil)
if acc != big.Exact {
return fmt.Errorf("unsupported value for %q at %d: an integer is required", verb.Raw, verb.Offset)
}
// For integers our format syntax is a subset of Go's, so it's
// safe for us to just lean on the existing Go implementation.
fmtstr := formatStripIndexSegment(verb.Raw)
fmted := fmt.Sprintf(fmtstr, bi)
buf.WriteString(fmted)
return nil
}
func formatAppendString(verb *formatVerb, buf *bytes.Buffer, arg cty.Value) error {
var err error
arg, err = convert.Convert(arg, cty.String)
if err != nil {
return fmt.Errorf("unsupported value for %q at %d: %s", verb.Raw, verb.Offset, err)
}
// We _cannot_ directly use the Go fmt.Sprintf implementation for strings
// because it measures widths and precisions in runes rather than grapheme
// clusters.
str := arg.AsString()
if verb.Prec > 0 {
strB := []byte(str)
pos := 0
wanted := verb.Prec
for i := 0; i < wanted; i++ {
next := strB[pos:]
if len(next) == 0 {
// ran out of characters before we hit our max width
break
}
d, _, _ := textseg.ScanGraphemeClusters(strB[pos:], true)
pos += d
}
str = str[:pos]
}
switch verb.Mode {
case 's':
fmted := formatPadWidth(verb, str)
buf.WriteString(fmted)
case 'q':
jb, err := json.Marshal(cty.StringVal(str), cty.String)
if err != nil {
// Should never happen, since we know this is a known, non-null string
panic(fmt.Errorf("failed to marshal %#v as JSON: %s", arg, err))
}
fmted := formatPadWidth(verb, string(jb))
buf.WriteString(fmted)
default:
// Should never happen because formatAppend should've already validated
panic(fmt.Errorf("invalid string formatting mode %q", verb.Mode))
}
return nil
}
func formatPadWidth(verb *formatVerb, fmted string) string {
if verb.Width < 0 {
return fmted
}
// Safe to ignore errors because ScanGraphemeClusters cannot produce errors
givenLen, _ := textseg.TokenCount([]byte(fmted), textseg.ScanGraphemeClusters)
wantLen := verb.Width
if givenLen >= wantLen {
return fmted
}
padLen := wantLen - givenLen
padChar := " "
if verb.Zero {
padChar = "0"
}
pads := strings.Repeat(padChar, padLen)
if verb.Minus {
return fmted + pads
}
return pads + fmted
}
// formatStripIndexSegment strips out any [nnn] segment present in a verb
// string so that we can pass it through to Go's fmt.Sprintf with a single
// argument. This is used in cases where we're just leaning on Go's formatter
// because it's a superset of ours.
func formatStripIndexSegment(rawVerb string) string {
// We assume the string has already been validated here, since we should
// only be using this function with strings that were accepted by our
// scanner in formatFSM.
start := strings.Index(rawVerb, "[")
end := strings.Index(rawVerb, "]")
if start == -1 || end == -1 {
return rawVerb
}
return rawVerb[:start] + rawVerb[end+1:]
}

View File

@ -0,0 +1,358 @@
// line 1 "format_fsm.rl"
// This file is generated from format_fsm.rl. DO NOT EDIT.
// line 5 "format_fsm.rl"
package stdlib
import (
"bytes"
"fmt"
"unicode/utf8"
"github.com/zclconf/go-cty/cty"
)
// line 20 "format_fsm.go"
var _formatfsm_actions []byte = []byte{
0, 1, 0, 1, 1, 1, 2, 1, 4,
1, 5, 1, 6, 1, 7, 1, 8,
1, 9, 1, 10, 1, 11, 1, 14,
1, 16, 1, 17, 1, 18, 2, 3,
4, 2, 12, 10, 2, 12, 16, 2,
12, 18, 2, 13, 14, 2, 15, 10,
2, 15, 18,
}
var _formatfsm_key_offsets []byte = []byte{
0, 0, 14, 27, 34, 36, 39, 43,
51,
}
var _formatfsm_trans_keys []byte = []byte{
32, 35, 37, 43, 45, 46, 48, 91,
49, 57, 65, 90, 97, 122, 32, 35,
43, 45, 46, 48, 91, 49, 57, 65,
90, 97, 122, 91, 48, 57, 65, 90,
97, 122, 49, 57, 93, 48, 57, 65,
90, 97, 122, 46, 91, 48, 57, 65,
90, 97, 122, 37,
}
var _formatfsm_single_lengths []byte = []byte{
0, 8, 7, 1, 0, 1, 0, 2,
1,
}
var _formatfsm_range_lengths []byte = []byte{
0, 3, 3, 3, 1, 1, 2, 3,
0,
}
var _formatfsm_index_offsets []byte = []byte{
0, 0, 12, 23, 28, 30, 33, 36,
42,
}
var _formatfsm_indicies []byte = []byte{
1, 2, 3, 4, 5, 6, 7, 10,
8, 9, 9, 0, 1, 2, 4, 5,
6, 7, 10, 8, 9, 9, 0, 13,
11, 12, 12, 0, 14, 0, 15, 14,
0, 9, 9, 0, 16, 19, 17, 18,
18, 0, 20, 3,
}
var _formatfsm_trans_targs []byte = []byte{
0, 2, 2, 8, 2, 2, 3, 2,
7, 8, 4, 3, 8, 4, 5, 6,
3, 7, 8, 4, 1,
}
var _formatfsm_trans_actions []byte = []byte{
7, 17, 9, 3, 15, 13, 25, 11,
43, 29, 19, 27, 49, 46, 21, 0,
37, 23, 40, 34, 1,
}
var _formatfsm_eof_actions []byte = []byte{
0, 31, 31, 31, 31, 31, 31, 31,
5,
}
const formatfsm_start int = 8
const formatfsm_first_final int = 8
const formatfsm_error int = 0
const formatfsm_en_main int = 8
// line 19 "format_fsm.rl"
func formatFSM(format string, a []cty.Value) (string, error) {
var buf bytes.Buffer
data := format
nextArg := 1 // arg numbers are 1-based
var verb formatVerb
// line 153 "format_fsm.rl"
// Ragel state
p := 0 // "Pointer" into data
pe := len(data) // End-of-data "pointer"
cs := 0 // current state (will be initialized by ragel-generated code)
ts := 0
te := 0
eof := pe
// Keep Go compiler happy even if generated code doesn't use these
_ = ts
_ = te
_ = eof
// line 121 "format_fsm.go"
{
cs = formatfsm_start
}
// line 126 "format_fsm.go"
{
var _klen int
var _trans int
var _acts int
var _nacts uint
var _keys int
if p == pe {
goto _test_eof
}
if cs == 0 {
goto _out
}
_resume:
_keys = int(_formatfsm_key_offsets[cs])
_trans = int(_formatfsm_index_offsets[cs])
_klen = int(_formatfsm_single_lengths[cs])
if _klen > 0 {
_lower := int(_keys)
var _mid int
_upper := int(_keys + _klen - 1)
for {
if _upper < _lower {
break
}
_mid = _lower + ((_upper - _lower) >> 1)
switch {
case data[p] < _formatfsm_trans_keys[_mid]:
_upper = _mid - 1
case data[p] > _formatfsm_trans_keys[_mid]:
_lower = _mid + 1
default:
_trans += int(_mid - int(_keys))
goto _match
}
}
_keys += _klen
_trans += _klen
}
_klen = int(_formatfsm_range_lengths[cs])
if _klen > 0 {
_lower := int(_keys)
var _mid int
_upper := int(_keys + (_klen << 1) - 2)
for {
if _upper < _lower {
break
}
_mid = _lower + (((_upper - _lower) >> 1) & ^1)
switch {
case data[p] < _formatfsm_trans_keys[_mid]:
_upper = _mid - 2
case data[p] > _formatfsm_trans_keys[_mid+1]:
_lower = _mid + 2
default:
_trans += int((_mid - int(_keys)) >> 1)
goto _match
}
}
_trans += _klen
}
_match:
_trans = int(_formatfsm_indicies[_trans])
cs = int(_formatfsm_trans_targs[_trans])
if _formatfsm_trans_actions[_trans] == 0 {
goto _again
}
_acts = int(_formatfsm_trans_actions[_trans])
_nacts = uint(_formatfsm_actions[_acts])
_acts++
for ; _nacts > 0; _nacts-- {
_acts++
switch _formatfsm_actions[_acts-1] {
case 0:
// line 29 "format_fsm.rl"
verb = formatVerb{
ArgNum: nextArg,
Prec: -1,
Width: -1,
}
ts = p
case 1:
// line 38 "format_fsm.rl"
buf.WriteByte(data[p])
case 4:
// line 49 "format_fsm.rl"
// We'll try to slurp a whole UTF-8 sequence here, to give the user
// better feedback.
r, _ := utf8.DecodeRuneInString(data[p:])
return buf.String(), fmt.Errorf("unrecognized format character %q at offset %d", r, p)
case 5:
// line 56 "format_fsm.rl"
verb.Sharp = true
case 6:
// line 59 "format_fsm.rl"
verb.Zero = true
case 7:
// line 62 "format_fsm.rl"
verb.Minus = true
case 8:
// line 65 "format_fsm.rl"
verb.Plus = true
case 9:
// line 68 "format_fsm.rl"
verb.Space = true
case 10:
// line 72 "format_fsm.rl"
verb.ArgNum = 0
case 11:
// line 75 "format_fsm.rl"
verb.ArgNum = (10 * verb.ArgNum) + (int(data[p]) - '0')
case 12:
// line 79 "format_fsm.rl"
verb.HasWidth = true
case 13:
// line 82 "format_fsm.rl"
verb.Width = 0
case 14:
// line 85 "format_fsm.rl"
verb.Width = (10 * verb.Width) + (int(data[p]) - '0')
case 15:
// line 89 "format_fsm.rl"
verb.HasPrec = true
case 16:
// line 92 "format_fsm.rl"
verb.Prec = 0
case 17:
// line 95 "format_fsm.rl"
verb.Prec = (10 * verb.Prec) + (int(data[p]) - '0')
case 18:
// line 99 "format_fsm.rl"
verb.Mode = rune(data[p])
te = p + 1
verb.Raw = data[ts:te]
verb.Offset = ts
err := formatAppend(&verb, &buf, a)
if err != nil {
return buf.String(), err
}
nextArg = verb.ArgNum + 1
// line 324 "format_fsm.go"
}
}
_again:
if cs == 0 {
goto _out
}
p++
if p != pe {
goto _resume
}
_test_eof:
{
}
if p == eof {
__acts := _formatfsm_eof_actions[cs]
__nacts := uint(_formatfsm_actions[__acts])
__acts++
for ; __nacts > 0; __nacts-- {
__acts++
switch _formatfsm_actions[__acts-1] {
case 2:
// line 42 "format_fsm.rl"
case 3:
// line 45 "format_fsm.rl"
return buf.String(), fmt.Errorf("invalid format string starting at offset %d", p)
case 4:
// line 49 "format_fsm.rl"
// We'll try to slurp a whole UTF-8 sequence here, to give the user
// better feedback.
r, _ := utf8.DecodeRuneInString(data[p:])
return buf.String(), fmt.Errorf("unrecognized format character %q at offset %d", r, p)
// line 363 "format_fsm.go"
}
}
}
_out:
{
}
}
// line 171 "format_fsm.rl"
// If we fall out here without being in a final state then we've
// encountered something that the scanner can't match, which should
// be impossible (the scanner matches all bytes _somehow_) but we'll
// flag it anyway rather than just losing data from the end.
if cs < formatfsm_first_final {
return buf.String(), fmt.Errorf("extraneous characters beginning at offset %i", p)
}
return buf.String(), nil
}

View File

@ -0,0 +1,182 @@
// This file is generated from format_fsm.rl. DO NOT EDIT.
%%{
# (except you are actually in scan_tokens.rl here, so edit away!)
machine formatfsm;
}%%
package stdlib
import (
"bytes"
"fmt"
"unicode/utf8"
"github.com/zclconf/go-cty/cty"
)
%%{
write data;
}%%
func formatFSM(format string, a []cty.Value) (string, error) {
var buf bytes.Buffer
data := format
nextArg := 1 // arg numbers are 1-based
var verb formatVerb
%%{
action begin {
verb = formatVerb{
ArgNum: nextArg,
Prec: -1,
Width: -1,
}
ts = p
}
action emit {
buf.WriteByte(fc);
}
action finish_ok {
}
action finish_err {
return buf.String(), fmt.Errorf("invalid format string starting at offset %d", p)
}
action err_char {
// We'll try to slurp a whole UTF-8 sequence here, to give the user
// better feedback.
r, _ := utf8.DecodeRuneInString(data[p:])
return buf.String(), fmt.Errorf("unrecognized format character %q at offset %d", r, p)
}
action flag_sharp {
verb.Sharp = true
}
action flag_zero {
verb.Zero = true
}
action flag_minus {
verb.Minus = true
}
action flag_plus {
verb.Plus = true
}
action flag_space {
verb.Space = true
}
action argidx_reset {
verb.ArgNum = 0
}
action argidx_num {
verb.ArgNum = (10 * verb.ArgNum) + (int(fc) - '0')
}
action has_width {
verb.HasWidth = true
}
action width_reset {
verb.Width = 0
}
action width_num {
verb.Width = (10 * verb.Width) + (int(fc) - '0')
}
action has_prec {
verb.HasPrec = true
}
action prec_reset {
verb.Prec = 0
}
action prec_num {
verb.Prec = (10 * verb.Prec) + (int(fc) - '0')
}
action mode {
verb.Mode = rune(fc)
te = p+1
verb.Raw = data[ts:te]
verb.Offset = ts
err := formatAppend(&verb, &buf, a)
if err != nil {
return buf.String(), err
}
nextArg = verb.ArgNum + 1
}
# a number that isn't zero and doesn't have a leading zero
num = [1-9] [0-9]*;
flags = (
'0' @flag_zero |
'#' @flag_sharp |
'-' @flag_minus |
'+' @flag_plus |
' ' @flag_space
)*;
argidx = ((
'[' (num $argidx_num) ']'
) >argidx_reset)?;
width = (
( num $width_num ) >width_reset %has_width
)?;
precision = (
('.' ( digit* $prec_num )) >prec_reset %has_prec
)?;
# We accept any letter here, but will be more picky in formatAppend
mode = ('a'..'z' | 'A'..'Z') @mode;
fmt_verb = (
'%' @begin
flags
width
precision
argidx
mode
);
main := (
[^%] @emit |
'%%' @emit |
fmt_verb
)* @/finish_err %/finish_ok $!err_char;
}%%
// Ragel state
p := 0 // "Pointer" into data
pe := len(data) // End-of-data "pointer"
cs := 0 // current state (will be initialized by ragel-generated code)
ts := 0
te := 0
eof := pe
// Keep Go compiler happy even if generated code doesn't use these
_ = ts
_ = te
_ = eof
%%{
write init;
write exec;
}%%
// If we fall out here without being in a final state then we've
// encountered something that the scanner can't match, which should
// be impossible (the scanner matches all bytes _somehow_) but we'll
// flag it anyway rather than just losing data from the end.
if cs < formatfsm_first_final {
return buf.String(), fmt.Errorf("extraneous characters beginning at offset %i", p)
}
return buf.String(), nil
}

View File

@ -17,6 +17,13 @@ var JSONEncodeFunc = function.New(&function.Spec{
Type: function.StaticReturnType(cty.String),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
val := args[0]
if !val.IsWhollyKnown() {
// We can't serialize unknowns, so if the value is unknown or
// contains any _nested_ unknowns then our result must be
// unknown.
return cty.UnknownVal(retType), nil
}
buf, err := json.Marshal(val, val.Type())
if err != nil {
return cty.NilVal, err

View File

@ -0,0 +1,195 @@
package stdlib
import (
"fmt"
"github.com/zclconf/go-cty/cty/convert"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
)
var SetHasElementFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "set",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
{
Name: "elem",
Type: cty.DynamicPseudoType,
AllowDynamicType: true,
},
},
Type: function.StaticReturnType(cty.Bool),
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
return args[0].HasElement(args[1]), nil
},
})
var SetUnionFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "first_set",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
},
VarParam: &function.Parameter{
Name: "other_sets",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
Type: setOperationReturnType,
Impl: setOperationImpl(func(s1, s2 cty.ValueSet) cty.ValueSet {
return s1.Union(s2)
}),
})
var SetIntersectionFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "first_set",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
},
VarParam: &function.Parameter{
Name: "other_sets",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
Type: setOperationReturnType,
Impl: setOperationImpl(func(s1, s2 cty.ValueSet) cty.ValueSet {
return s1.Intersection(s2)
}),
})
var SetSubtractFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "a",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
{
Name: "b",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
},
Type: setOperationReturnType,
Impl: setOperationImpl(func(s1, s2 cty.ValueSet) cty.ValueSet {
return s1.Subtract(s2)
}),
})
var SetSymmetricDifferenceFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "first_set",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
},
VarParam: &function.Parameter{
Name: "other_sets",
Type: cty.Set(cty.DynamicPseudoType),
AllowDynamicType: true,
},
Type: setOperationReturnType,
Impl: setOperationImpl(func(s1, s2 cty.ValueSet) cty.ValueSet {
return s1.Subtract(s2)
}),
})
// SetHasElement determines whether the given set contains the given value as an
// element.
func SetHasElement(set cty.Value, elem cty.Value) (cty.Value, error) {
return SetHasElementFunc.Call([]cty.Value{set, elem})
}
// SetUnion returns a new set containing all of the elements from the given
// sets, which must have element types that can all be converted to some
// common type using the standard type unification rules. If conversion
// is not possible, an error is returned.
//
// The union operation is performed after type conversion, which may result
// in some previously-distinct values being conflated.
//
// At least one set must be provided.
func SetUnion(sets ...cty.Value) (cty.Value, error) {
return SetUnionFunc.Call(sets)
}
// Intersection returns a new set containing the elements that exist
// in all of the given sets, which must have element types that can all be
// converted to some common type using the standard type unification rules.
// If conversion is not possible, an error is returned.
//
// The intersection operation is performed after type conversion, which may
// result in some previously-distinct values being conflated.
//
// At least one set must be provided.
func SetIntersection(sets ...cty.Value) (cty.Value, error) {
return SetIntersectionFunc.Call(sets)
}
// SetSubtract returns a new set containing the elements from the
// first set that are not present in the second set. The sets must have
// element types that can both be converted to some common type using the
// standard type unification rules. If conversion is not possible, an error
// is returned.
//
// The subtract operation is performed after type conversion, which may
// result in some previously-distinct values being conflated.
func SetSubtract(a, b cty.Value) (cty.Value, error) {
return SetSubtractFunc.Call([]cty.Value{a, b})
}
// SetSymmetricDifference returns a new set containing elements that appear
// in any of the given sets but not multiple. The sets must have
// element types that can all be converted to some common type using the
// standard type unification rules. If conversion is not possible, an error
// is returned.
//
// The difference operation is performed after type conversion, which may
// result in some previously-distinct values being conflated.
func SetSymmetricDifference(sets ...cty.Value) (cty.Value, error) {
return SetSymmetricDifferenceFunc.Call(sets)
}
func setOperationReturnType(args []cty.Value) (ret cty.Type, err error) {
var etys []cty.Type
for _, arg := range args {
etys = append(etys, arg.Type().ElementType())
}
newEty, _ := convert.UnifyUnsafe(etys)
if newEty == cty.NilType {
return cty.NilType, fmt.Errorf("given sets must all have compatible element types")
}
return cty.Set(newEty), nil
}
func setOperationImpl(f func(s1, s2 cty.ValueSet) cty.ValueSet) function.ImplFunc {
return func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
first := args[0]
first, err = convert.Convert(first, retType)
if err != nil {
return cty.NilVal, function.NewArgError(0, err)
}
set := first.AsValueSet()
for i, arg := range args[1:] {
arg, err := convert.Convert(arg, retType)
if err != nil {
return cty.NilVal, function.NewArgError(i+1, err)
}
argSet := arg.AsValueSet()
set = f(set, argSet)
}
return cty.SetValFromValueSet(set), nil
}
}

View File

@ -0,0 +1,31 @@
package function
import (
"github.com/zclconf/go-cty/cty"
)
// Unpredictable wraps a given function such that it retains the same arguments
// and type checking behavior but will return an unknown value when called.
//
// It is recommended that most functions be "pure", which is to say that they
// will always produce the same value given particular input. However,
// sometimes it is necessary to offer functions whose behavior depends on
// some external state, such as reading a file or determining the current time.
// In such cases, an unpredictable wrapper might be used to stand in for
// the function during some sort of prior "checking" phase in order to delay
// the actual effect until later.
//
// While Unpredictable can support a function that isn't pure in its
// implementation, it still expects a function to be pure in its type checking
// behavior, except for the special case of returning cty.DynamicPseudoType
// if it is not yet able to predict its return value based on current argument
// information.
func Unpredictable(f Function) Function {
newSpec := *f.spec // shallow copy
newSpec.Impl = unpredictableImpl
return New(&newSpec)
}
func unpredictableImpl(args []cty.Value, retType cty.Type) (cty.Value, error) {
return cty.UnknownVal(retType), nil
}

View File

@ -14,9 +14,14 @@ type typeObject struct {
// After a map is passed to this function the caller must no longer access it,
// since ownership is transferred to this library.
func Object(attrTypes map[string]Type) Type {
attrTypesNorm := make(map[string]Type, len(attrTypes))
for k, v := range attrTypes {
attrTypesNorm[NormalizeString(k)] = v
}
return Type{
typeObject{
AttrTypes: attrTypes,
AttrTypes: attrTypesNorm,
},
}
}
@ -91,6 +96,7 @@ func (t Type) IsObjectType() bool {
// name, regardless of its type. Will panic if the reciever isn't an object
// type; use IsObjectType to determine whether this operation will succeed.
func (t Type) HasAttribute(name string) bool {
name = NormalizeString(name)
if ot, ok := t.typeImpl.(typeObject); ok {
_, hasAttr := ot.AttrTypes[name]
return hasAttr
@ -102,6 +108,7 @@ func (t Type) HasAttribute(name string) bool {
// panic if the receiver is not an object type (use IsObjectType to confirm)
// or if the object type has no such attribute (use HasAttribute to confirm).
func (t Type) AttributeType(name string) Type {
name = NormalizeString(name)
if ot, ok := t.typeImpl.(typeObject); ok {
aty, hasAttr := ot.AttrTypes[name]
if !hasAttr {

View File

@ -156,6 +156,10 @@ func (s IndexStep) Apply(val Value) (Value, error) {
return val.Index(s.Key), nil
}
func (s IndexStep) GoString() string {
return fmt.Sprintf("cty.IndexStep{Key:%#v}", s.Key)
}
// GetAttrStep is a Step implementation representing retrieving an attribute
// from a value, which must be of an object type.
type GetAttrStep struct {
@ -176,3 +180,7 @@ func (s GetAttrStep) Apply(val Value) (Value, error) {
return val.GetAttr(s.Name), nil
}
func (s GetAttrStep) GoString() string {
return fmt.Sprintf("cty.GetAttrStep{Name:%q}", s.Name)
}

View File

@ -65,6 +65,16 @@ func (s Set) Has(val interface{}) bool {
return false
}
// Copy performs a shallow copy of the receiving set, returning a new set
// with the same rules and elements.
func (s Set) Copy() Set {
ret := NewSet(s.rules)
for k, v := range s.vals {
ret.vals[k] = v
}
return ret
}
// Iterator returns an iterator over values in the set, in an undefined order
// that callers should not depend on.
//

126
vendor/github.com/zclconf/go-cty/cty/set_helper.go generated vendored Normal file
View File

@ -0,0 +1,126 @@
package cty
import (
"fmt"
"github.com/zclconf/go-cty/cty/set"
)
// ValueSet is to cty.Set what []cty.Value is to cty.List and
// map[string]cty.Value is to cty.Map. It's provided to allow callers a
// convenient interface for manipulating sets before wrapping them in cty.Set
// values using cty.SetValFromValueSet.
//
// Unlike value slices and value maps, ValueSet instances have a single
// homogenous element type because that is a requirement of the underlying
// set implementation, which uses the element type to select a suitable
// hashing function.
//
// Set mutations are not concurrency-safe.
type ValueSet struct {
// ValueSet is just a thin wrapper around a set.Set with our value-oriented
// "rules" applied. We do this so that the caller can work in terms of
// cty.Value objects even though the set internals use the raw values.
s set.Set
}
// NewValueSet creates and returns a new ValueSet with the given element type.
func NewValueSet(ety Type) ValueSet {
return newValueSet(set.NewSet(setRules{Type: ety}))
}
func newValueSet(s set.Set) ValueSet {
return ValueSet{
s: s,
}
}
// ElementType returns the element type for the receiving ValueSet.
func (s ValueSet) ElementType() Type {
return s.s.Rules().(setRules).Type
}
// Add inserts the given value into the receiving set.
func (s ValueSet) Add(v Value) {
s.requireElementType(v)
s.s.Add(v.v)
}
// Remove deletes the given value from the receiving set, if indeed it was
// there in the first place. If the value is not present, this is a no-op.
func (s ValueSet) Remove(v Value) {
s.requireElementType(v)
s.s.Remove(v.v)
}
// Has returns true if the given value is in the receiving set, or false if
// it is not.
func (s ValueSet) Has(v Value) bool {
s.requireElementType(v)
return s.s.Has(v.v)
}
// Copy performs a shallow copy of the receiving set, returning a new set
// with the same rules and elements.
func (s ValueSet) Copy() ValueSet {
return newValueSet(s.s.Copy())
}
// Length returns the number of values in the set.
func (s ValueSet) Length() int {
return s.s.Length()
}
// Values returns a slice of all of the values in the set in no particular
// order.
func (s ValueSet) Values() []Value {
l := s.s.Length()
if l == 0 {
return nil
}
ret := make([]Value, 0, l)
ety := s.ElementType()
for it := s.s.Iterator(); it.Next(); {
ret = append(ret, Value{
ty: ety,
v: it.Value(),
})
}
return ret
}
// Union returns a new set that contains all of the members of both the
// receiving set and the given set. Both sets must have the same element type,
// or else this function will panic.
func (s ValueSet) Union(other ValueSet) ValueSet {
return newValueSet(s.s.Union(other.s))
}
// Intersection returns a new set that contains the values that both the
// receiver and given sets have in common. Both sets must have the same element
// type, or else this function will panic.
func (s ValueSet) Intersection(other ValueSet) ValueSet {
return newValueSet(s.s.Intersection(other.s))
}
// Subtract returns a new set that contains all of the values from the receiver
// that are not also in the given set. Both sets must have the same element
// type, or else this function will panic.
func (s ValueSet) Subtract(other ValueSet) ValueSet {
return newValueSet(s.s.Subtract(other.s))
}
// SymmetricDifference returns a new set that contains all of the values from
// both the receiver and given sets, except those that both sets have in
// common. Both sets must have the same element type, or else this function
// will panic.
func (s ValueSet) SymmetricDifference(other ValueSet) ValueSet {
return newValueSet(s.s.SymmetricDifference(other.s))
}
// requireElementType panics if the given value is not of the set's element type.
func (s ValueSet) requireElementType(v Value) {
if !v.Type().Equals(s.ElementType()) {
panic(fmt.Errorf("attempt to use %#v value with set of %#v", v.Type(), s.ElementType()))
}
}

View File

@ -69,3 +69,30 @@ var NilVal = Value{
ty: Type{typeImpl: nil},
v: nil,
}
// IsWhollyKnown is an extension of IsKnown that also recursively checks
// inside collections and structures to see if there are any nested unknown
// values.
func (val Value) IsWhollyKnown() bool {
if !val.IsKnown() {
return false
}
if val.IsNull() {
// Can't recurse into a null, so we're done
return true
}
switch {
case val.CanIterateElements():
for it := val.ElementIterator(); it.Next(); {
_, ev := it.Element()
if !ev.IsWhollyKnown() {
return false
}
}
return true
default:
return true
}
}

View File

@ -59,10 +59,19 @@ func NumberFloatVal(v float64) Value {
func StringVal(v string) Value {
return Value{
ty: String,
v: norm.NFC.String(v),
v: NormalizeString(v),
}
}
// NormalizeString applies the same normalization that cty applies when
// constructing string values.
//
// A return value from this function can be meaningfully compared byte-for-byte
// with a Value.AsString result.
func NormalizeString(s string) string {
return norm.NFC.String(s)
}
// ObjectVal returns a Value of an object type whose structure is defined
// by the key names and value types in the given map.
func ObjectVal(attrs map[string]Value) Value {
@ -70,6 +79,7 @@ func ObjectVal(attrs map[string]Value) Value {
attrVals := make(map[string]interface{}, len(attrs))
for attr, val := range attrs {
attr = NormalizeString(attr)
attrTypes[attr] = val.ty
attrVals[attr] = val.v
}
@ -162,7 +172,7 @@ func MapVal(vals map[string]Value) Value {
))
}
rawMap[key] = val.v
rawMap[NormalizeString(key)] = val.v
}
return Value{
@ -214,6 +224,21 @@ func SetVal(vals []Value) Value {
}
}
// SetValFromValueSet returns a Value of set type based on an already-constructed
// ValueSet.
//
// The element type of the returned value is the element type of the given
// set.
func SetValFromValueSet(s ValueSet) Value {
ety := s.ElementType()
rawVal := s.s.Copy() // copy so caller can't mutate what we wrap
return Value{
ty: Set(ety),
v: rawVal,
}
}
// SetValEmpty returns an empty set of the given element type.
func SetValEmpty(element Type) Value {
return Value{

View File

@ -15,14 +15,14 @@ func (val Value) GoString() string {
}
if val.ty == DynamicPseudoType {
return "cty.DynamicValue"
return "cty.DynamicVal"
}
if !val.IsKnown() {
return fmt.Sprintf("cty.Unknown(%#v)", val.ty)
return fmt.Sprintf("cty.UnknownVal(%#v)", val.ty)
}
if val.IsNull() {
return fmt.Sprintf("cty.Null(%#v)", val.ty)
return fmt.Sprintf("cty.NullVal(%#v)", val.ty)
}
// By the time we reach here we've dealt with all of the exceptions around
@ -540,6 +540,8 @@ func (val Value) GetAttr(name string) Value {
if !val.ty.IsObjectType() {
panic("value is not an object")
}
name = NormalizeString(name)
if !val.ty.HasAttribute(name) {
panic("value has no attribute of that name")
}
@ -756,6 +758,9 @@ func (val Value) HasElement(elem Value) Value {
if val.IsNull() {
panic("can't call HasElement on a nil value")
}
if ty.ElementType() != elem.Type() {
return False
}
s := val.v.(set.Set)
return BoolVal(s.Has(elem.v))
@ -967,7 +972,7 @@ func (val Value) AsString() string {
// cty.Number value, or panics if called on any other value.
//
// For more convenient conversions to other native numeric types, use the
// "convert" package.
// "gocty" package.
func (val Value) AsBigFloat() *big.Float {
if val.ty != Number {
panic("not a number")
@ -985,6 +990,72 @@ func (val Value) AsBigFloat() *big.Float {
return &ret
}
// AsValueSlice returns a []cty.Value representation of a non-null, non-unknown
// value of any type that CanIterateElements, or panics if called on
// any other value.
//
// For more convenient conversions to slices of more specific types, use
// the "gocty" package.
func (val Value) AsValueSlice() []Value {
l := val.LengthInt()
if l == 0 {
return nil
}
ret := make([]Value, 0, l)
for it := val.ElementIterator(); it.Next(); {
_, v := it.Element()
ret = append(ret, v)
}
return ret
}
// AsValueMap returns a map[string]cty.Value representation of a non-null,
// non-unknown value of any type that CanIterateElements, or panics if called
// on any other value.
//
// For more convenient conversions to maps of more specific types, use
// the "gocty" package.
func (val Value) AsValueMap() map[string]Value {
l := val.LengthInt()
if l == 0 {
return nil
}
ret := make(map[string]Value, l)
for it := val.ElementIterator(); it.Next(); {
k, v := it.Element()
ret[k.AsString()] = v
}
return ret
}
// AsValueSet returns a ValueSet representation of a non-null,
// non-unknown value of any collection type, or panics if called
// on any other value.
//
// Unlike AsValueSlice and AsValueMap, this method requires specifically a
// collection type (list, set or map) and does not allow structural types
// (tuple or object), because the ValueSet type requires homogenous
// element types.
//
// The returned ValueSet can store only values of the receiver's element type.
func (val Value) AsValueSet() ValueSet {
if !val.Type().IsCollectionType() {
panic("not a collection type")
}
// We don't give the caller our own set.Set (assuming we're a cty.Set value)
// because then the caller could mutate our internals, which is forbidden.
// Instead, we will construct a new set and append our elements into it.
ret := NewValueSet(val.Type().ElementType())
for it := val.ElementIterator(); it.Next(); {
_, v := it.Element()
ret.Add(v)
}
return ret
}
// EncapsulatedValue returns the native value encapsulated in a non-null,
// non-unknown capsule-typed value, or panics if called on any other value.
//

182
vendor/github.com/zclconf/go-cty/cty/walk.go generated vendored Normal file
View File

@ -0,0 +1,182 @@
package cty
// Walk visits all of the values in a possibly-complex structure, calling
// a given function for each value.
//
// For example, given a list of strings the callback would first be called
// with the whole list and then called once for each element of the list.
//
// The callback function may prevent recursive visits to child values by
// returning false. The callback function my halt the walk altogether by
// returning a non-nil error. If the returned error is about the element
// currently being visited, it is recommended to use the provided path
// value to produce a PathError describing that context.
//
// The path passed to the given function may not be used after that function
// returns, since its backing array is re-used for other calls.
func Walk(val Value, cb func(Path, Value) (bool, error)) error {
var path Path
return walk(path, val, cb)
}
func walk(path Path, val Value, cb func(Path, Value) (bool, error)) error {
deeper, err := cb(path, val)
if err != nil {
return err
}
if !deeper {
return nil
}
if val.IsNull() || !val.IsKnown() {
// Can't recurse into null or unknown values, regardless of type
return nil
}
ty := val.Type()
switch {
case ty.IsObjectType():
for it := val.ElementIterator(); it.Next(); {
nameVal, av := it.Element()
path := append(path, GetAttrStep{
Name: nameVal.AsString(),
})
err := walk(path, av, cb)
if err != nil {
return err
}
}
case val.CanIterateElements():
for it := val.ElementIterator(); it.Next(); {
kv, ev := it.Element()
path := append(path, IndexStep{
Key: kv,
})
err := walk(path, ev, cb)
if err != nil {
return err
}
}
}
return nil
}
// Transform visits all of the values in a possibly-complex structure,
// calling a given function for each value which has an opportunity to
// replace that value.
//
// Unlike Walk, Transform visits child nodes first, so for a list of strings
// it would first visit the strings and then the _new_ list constructed
// from the transformed values of the list items.
//
// This is useful for creating the effect of being able to make deep mutations
// to a value even though values are immutable. However, it's the responsibility
// of the given function to preserve expected invariants, such as homogenity of
// element types in collections; this function can panic if such invariants
// are violated, just as if new values were constructed directly using the
// value constructor functions. An easy way to preserve invariants is to
// ensure that the transform function never changes the value type.
//
// The callback function my halt the walk altogether by
// returning a non-nil error. If the returned error is about the element
// currently being visited, it is recommended to use the provided path
// value to produce a PathError describing that context.
//
// The path passed to the given function may not be used after that function
// returns, since its backing array is re-used for other calls.
func Transform(val Value, cb func(Path, Value) (Value, error)) (Value, error) {
var path Path
return transform(path, val, cb)
}
func transform(path Path, val Value, cb func(Path, Value) (Value, error)) (Value, error) {
ty := val.Type()
var newVal Value
switch {
case val.IsNull() || !val.IsKnown():
// Can't recurse into null or unknown values, regardless of type
newVal = val
case ty.IsListType() || ty.IsSetType() || ty.IsTupleType():
l := val.LengthInt()
switch l {
case 0:
// No deep transform for an empty sequence
newVal = val
default:
elems := make([]Value, 0, l)
for it := val.ElementIterator(); it.Next(); {
kv, ev := it.Element()
path := append(path, IndexStep{
Key: kv,
})
newEv, err := transform(path, ev, cb)
if err != nil {
return DynamicVal, err
}
elems = append(elems, newEv)
}
switch {
case ty.IsListType():
newVal = ListVal(elems)
case ty.IsSetType():
newVal = SetVal(elems)
case ty.IsTupleType():
newVal = TupleVal(elems)
default:
panic("unknown sequence type") // should never happen because of the case we are in
}
}
case ty.IsMapType():
l := val.LengthInt()
switch l {
case 0:
// No deep transform for an empty map
newVal = val
default:
elems := make(map[string]Value)
for it := val.ElementIterator(); it.Next(); {
kv, ev := it.Element()
path := append(path, IndexStep{
Key: kv,
})
newEv, err := transform(path, ev, cb)
if err != nil {
return DynamicVal, err
}
elems[kv.AsString()] = newEv
}
newVal = MapVal(elems)
}
case ty.IsObjectType():
switch {
case ty.Equals(EmptyObject):
// No deep transform for an empty object
newVal = val
default:
atys := ty.AttributeTypes()
newAVs := make(map[string]Value)
for name := range atys {
av := val.GetAttr(name)
path := append(path, GetAttrStep{
Name: name,
})
newAV, err := transform(path, av, cb)
if err != nil {
return DynamicVal, err
}
newAVs[name] = newAV
}
newVal = ObjectVal(newAVs)
}
default:
newVal = val
}
return cb(path, newVal)
}

88
vendor/vendor.json vendored
View File

@ -1715,58 +1715,64 @@
"revisionTime": "2017-05-04T19:02:34Z"
},
{
"checksumSHA1": "6kxMiZSmgazD/CZgmnEeEMJSAOM=",
"checksumSHA1": "Tpj2tK/XrhxbIKB5xEJlfTI46M0=",
"path": "github.com/hashicorp/hcl2/ext/typeexpr",
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "BRJaQcKriVKEirVC7YxBxPufQF0=",
"path": "github.com/hashicorp/hcl2/gohcl",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "l2zkxDVi2EUwFdvsVcIfyuOr4zo=",
"checksumSHA1": "v1JCFNvhLqF3ErYcxkJJPboKO8c=",
"path": "github.com/hashicorp/hcl2/hcl",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "iLOUzHOej23ORpmbXAndg5Ft5H0=",
"checksumSHA1": "ekhg+MJLLGkJQdh/tZ4A3EZwpNY=",
"path": "github.com/hashicorp/hcl2/hcl/hclsyntax",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "O8jJfHiwuQFmAo0ivcBhni4pWyg=",
"checksumSHA1": "G40fCmu1bSWXv4Hw5JXwEUTVThk=",
"path": "github.com/hashicorp/hcl2/hcl/json",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "672O/GQ9z+OFsG3eHLKq1yg3ZGM=",
"path": "github.com/hashicorp/hcl2/hcldec",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "sySYF9Ew71VS/LfrG+s/0jK+1VQ=",
"path": "github.com/hashicorp/hcl2/hcled",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "IzmftuG99BqNhbFGhxZaGwtiMtM=",
"path": "github.com/hashicorp/hcl2/hclparse",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "v5qx2XghQ+EtvFLa4a0Efjiwt9I=",
"path": "github.com/hashicorp/hcl2/hcltest",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "9UCSLRG+TEAsNKOZJUaJj/7d6r8=",
"path": "github.com/hashicorp/hcl2/hclwrite",
"revision": "5ca9713bf06addcefc0a4e16f779e43a2c88570c",
"revisionTime": "2018-02-05T02:55:09Z"
"revision": "5f8ed954abd873b2c09616ba0aa607892bbca7e9",
"revisionTime": "2018-03-08T16:30:58Z"
},
{
"checksumSHA1": "M09yxoBoCEtG7EcHR8aEWLzMMJc=",
@ -2218,46 +2224,46 @@
"revisionTime": "2016-10-29T10:40:18Z"
},
{
"checksumSHA1": "TudZOVOvOvR5zw7EFbvD3eZpmLI=",
"checksumSHA1": "lGCvuEPfb2vhxEgYamNhnd1jYH8=",
"path": "github.com/zclconf/go-cty/cty",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "IjvfMUZ9S1L1NM0haXwMfKzkyvM=",
"checksumSHA1": "gDpi8g5VxCRM3JKm/kaYlGdFUdQ=",
"path": "github.com/zclconf/go-cty/cty/convert",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "TU21yqpRZdbEbH8pp4I5YsQa00E=",
"checksumSHA1": "MyyLCGg3RREMllTJyK6ehZl/dHk=",
"path": "github.com/zclconf/go-cty/cty/function",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "Ke4kpRBTSophcLSCrusR8XxSC0Y=",
"checksumSHA1": "4R+DQqBew6i9a4lYiLZW1OXVwTI=",
"path": "github.com/zclconf/go-cty/cty/function/stdlib",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "tmCzwfNXOEB1sSO7TKVzilb2vjA=",
"path": "github.com/zclconf/go-cty/cty/gocty",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "1ApmO+Q33+Oem/3f6BU6sztJWNc=",
"path": "github.com/zclconf/go-cty/cty/json",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "gH4rRyzIQknMIXAJfpvC04KTsME=",
"checksumSHA1": "y5Sk+n6SOspFj8mlyb8swr4DMIs=",
"path": "github.com/zclconf/go-cty/cty/set",
"revision": "709e4033eeb037dc543dbc2048065dfb814ce316",
"revisionTime": "2018-01-06T05:58:34Z"
"revision": "49fa5e03c418f95f78684c91e155af06aa901a32",
"revisionTime": "2018-03-02T16:03:48Z"
},
{
"checksumSHA1": "vE43s37+4CJ2CDU6TlOUOYE0K9c=",