core: Remove the last few HIL remnants

We've not been using HIL in the main codepaths since Terraform 0.12, but
some references to it (and some supporting functionality in Terraform)
stuck around due to interactions with types we'd kept around to support
legacy shims.

However, removing the configs.RawConfig field from
terraform.ResourceConfig disconnects that subtree of dependencies from
everything else, allowing us to remove it. This is safe because the only
remaining uses of terraform.ResourceConfig are shims from values that
were already evaluated using the HCL 2 API, and thus they never need
the "just in time" HIL evaluation that ResourceConfig.interpolateForce
used to do.

We also had some HIL references in configs/hcl2shim that were previously
in support of the "terraform 0.12upgrade" command, but the implementation
of that command is now removed.

There was one remaining reference to HIL in a now-unused function in the
helper/schema package, which I removed entirely here.

This then allows us to remove the HIL dependency entirely, and also to
clean up some remaining old remants of the legacy "config" package that
we'd recently moved into the "configs" package pending further pruning.
This commit is contained in:
Martin Atkins 2020-08-28 15:28:59 -07:00
parent 56c0e35243
commit b0da5b1ce5
65 changed files with 0 additions and 8747 deletions

View File

@ -4,7 +4,6 @@ import (
"fmt"
"math/big"
"github.com/hashicorp/hil/ast"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/configs/configschema"
@ -229,125 +228,3 @@ func HCL2ValueFromConfigValue(v interface{}) cty.Value {
panic(fmt.Errorf("can't convert %#v to cty.Value", v))
}
}
func HILVariableFromHCL2Value(v cty.Value) ast.Variable {
if v.IsNull() {
// Caller should guarantee/check this before calling
panic("Null values cannot be represented in HIL")
}
if !v.IsKnown() {
return ast.Variable{
Type: ast.TypeUnknown,
Value: UnknownVariableValue,
}
}
switch v.Type() {
case cty.Bool:
return ast.Variable{
Type: ast.TypeBool,
Value: v.True(),
}
case cty.Number:
v := ConfigValueFromHCL2(v)
switch tv := v.(type) {
case int:
return ast.Variable{
Type: ast.TypeInt,
Value: tv,
}
case float64:
return ast.Variable{
Type: ast.TypeFloat,
Value: tv,
}
default:
// should never happen
panic("invalid return value for configValueFromHCL2")
}
case cty.String:
return ast.Variable{
Type: ast.TypeString,
Value: v.AsString(),
}
}
if v.Type().IsListType() || v.Type().IsSetType() || v.Type().IsTupleType() {
l := make([]ast.Variable, 0, v.LengthInt())
it := v.ElementIterator()
for it.Next() {
_, ev := it.Element()
l = append(l, HILVariableFromHCL2Value(ev))
}
// If we were given a tuple then this could actually produce an invalid
// list with non-homogenous types, which we expect to be caught inside
// HIL just like a user-supplied non-homogenous list would be.
return ast.Variable{
Type: ast.TypeList,
Value: l,
}
}
if v.Type().IsMapType() || v.Type().IsObjectType() {
l := make(map[string]ast.Variable)
it := v.ElementIterator()
for it.Next() {
ek, ev := it.Element()
l[ek.AsString()] = HILVariableFromHCL2Value(ev)
}
// If we were given an object then this could actually produce an invalid
// map with non-homogenous types, which we expect to be caught inside
// HIL just like a user-supplied non-homogenous map would be.
return ast.Variable{
Type: ast.TypeMap,
Value: l,
}
}
// If we fall out here then we have some weird type that we haven't
// accounted for. This should never happen unless the caller is using
// capsule types, and we don't currently have any such types defined.
panic(fmt.Errorf("can't convert %#v to HIL variable", v))
}
func HCL2ValueFromHILVariable(v ast.Variable) cty.Value {
switch v.Type {
case ast.TypeList:
vals := make([]cty.Value, len(v.Value.([]ast.Variable)))
for i, ev := range v.Value.([]ast.Variable) {
vals[i] = HCL2ValueFromHILVariable(ev)
}
return cty.TupleVal(vals)
case ast.TypeMap:
vals := make(map[string]cty.Value, len(v.Value.(map[string]ast.Variable)))
for k, ev := range v.Value.(map[string]ast.Variable) {
vals[k] = HCL2ValueFromHILVariable(ev)
}
return cty.ObjectVal(vals)
default:
return HCL2ValueFromConfigValue(v.Value)
}
}
func HCL2TypeForHILType(hilType ast.Type) cty.Type {
switch hilType {
case ast.TypeAny:
return cty.DynamicPseudoType
case ast.TypeUnknown:
return cty.DynamicPseudoType
case ast.TypeBool:
return cty.Bool
case ast.TypeInt:
return cty.Number
case ast.TypeFloat:
return cty.Number
case ast.TypeString:
return cty.String
case ast.TypeList:
return cty.List(cty.DynamicPseudoType)
case ast.TypeMap:
return cty.Map(cty.DynamicPseudoType)
default:
return cty.NilType // equilvalent to ast.TypeInvalid
}
}

View File

@ -1,436 +0,0 @@
package configs
import (
"fmt"
"strconv"
"strings"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/tfdiags"
"github.com/hashicorp/hil/ast"
)
// An InterpolatedVariable is a variable reference within an interpolation.
//
// Implementations of this interface represents various sources where
// variables can come from: user variables, resources, etc.
type InterpolatedVariable interface {
FullKey() string
SourceRange() tfdiags.SourceRange
}
// varRange can be embedded into an InterpolatedVariable implementation to
// implement the SourceRange method.
type varRange struct {
rng tfdiags.SourceRange
}
func (r varRange) SourceRange() tfdiags.SourceRange {
return r.rng
}
// CountVariable is a variable for referencing information about
// the count.
type CountVariable struct {
Type CountValueType
key string
varRange
}
// CountValueType is the type of the count variable that is referenced.
type CountValueType byte
const (
CountValueInvalid CountValueType = iota
CountValueIndex
)
// A ModuleVariable is a variable that is referencing the output
// of a module, such as "${module.foo.bar}"
type ModuleVariable struct {
Name string
Field string
key string
varRange
}
// A PathVariable is a variable that references path information about the
// module.
type PathVariable struct {
Type PathValueType
key string
varRange
}
type PathValueType byte
const (
PathValueInvalid PathValueType = iota
PathValueCwd
PathValueModule
PathValueRoot
)
// A ResourceVariable is a variable that is referencing the field
// of a resource, such as "${aws_instance.foo.ami}"
type ResourceVariable struct {
Mode addrs.ResourceMode
Type string // Resource type, i.e. "aws_instance"
Name string // Resource name
Field string // Resource field
Multi bool // True if multi-variable: aws_instance.foo.*.id
Index int // Index for multi-variable: aws_instance.foo.1.id == 1
key string
varRange
}
// SelfVariable is a variable that is referencing the same resource
// it is running on: "${self.address}"
type SelfVariable struct {
Field string
key string
varRange
}
// SimpleVariable is an unprefixed variable, which can show up when users have
// strings they are passing down to resources that use interpolation
// internally. The template_file resource is an example of this.
type SimpleVariable struct {
Key string
varRange
}
// TerraformVariable is a "terraform."-prefixed variable used to access
// metadata about the Terraform run.
type TerraformVariable struct {
Field string
key string
varRange
}
// A UserVariable is a variable that is referencing a user variable
// that is inputted from outside the configuration. This looks like
// "${var.foo}"
type UserVariable struct {
Name string
Elem string
key string
varRange
}
// A LocalVariable is a variable that references a local value defined within
// the current module, via a "locals" block. This looks like "${local.foo}".
type LocalVariable struct {
Name string
varRange
}
func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
if strings.HasPrefix(v, "count.") {
return NewCountVariable(v)
} else if strings.HasPrefix(v, "path.") {
return NewPathVariable(v)
} else if strings.HasPrefix(v, "self.") {
return NewSelfVariable(v)
} else if strings.HasPrefix(v, "terraform.") {
return NewTerraformVariable(v)
} else if strings.HasPrefix(v, "var.") {
return NewUserVariable(v)
} else if strings.HasPrefix(v, "local.") {
return NewLocalVariable(v)
} else if strings.HasPrefix(v, "module.") {
return NewModuleVariable(v)
} else if !strings.ContainsRune(v, '.') {
return NewSimpleVariable(v)
} else {
return NewResourceVariable(v)
}
}
func NewCountVariable(key string) (*CountVariable, error) {
var fieldType CountValueType
parts := strings.SplitN(key, ".", 2)
switch parts[1] {
case "index":
fieldType = CountValueIndex
}
return &CountVariable{
Type: fieldType,
key: key,
}, nil
}
func (c *CountVariable) FullKey() string {
return c.key
}
func NewModuleVariable(key string) (*ModuleVariable, error) {
parts := strings.SplitN(key, ".", 3)
if len(parts) < 3 {
return nil, fmt.Errorf(
"%s: module variables must be three parts: module.name.attr",
key)
}
return &ModuleVariable{
Name: parts[1],
Field: parts[2],
key: key,
}, nil
}
func (v *ModuleVariable) FullKey() string {
return v.key
}
func (v *ModuleVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
func NewPathVariable(key string) (*PathVariable, error) {
var fieldType PathValueType
parts := strings.SplitN(key, ".", 2)
switch parts[1] {
case "cwd":
fieldType = PathValueCwd
case "module":
fieldType = PathValueModule
case "root":
fieldType = PathValueRoot
}
return &PathVariable{
Type: fieldType,
key: key,
}, nil
}
func (v *PathVariable) FullKey() string {
return v.key
}
func NewResourceVariable(key string) (*ResourceVariable, error) {
var mode addrs.ResourceMode
var parts []string
if strings.HasPrefix(key, "data.") {
mode = addrs.DataResourceMode
parts = strings.SplitN(key, ".", 4)
if len(parts) < 4 {
return nil, fmt.Errorf(
"%s: data variables must be four parts: data.TYPE.NAME.ATTR",
key)
}
// Don't actually need the "data." prefix for parsing, since it's
// always constant.
parts = parts[1:]
} else {
mode = addrs.ManagedResourceMode
parts = strings.SplitN(key, ".", 3)
if len(parts) < 3 {
return nil, fmt.Errorf(
"%s: resource variables must be three parts: TYPE.NAME.ATTR",
key)
}
}
field := parts[2]
multi := false
var index int
if idx := strings.Index(field, "."); idx != -1 {
indexStr := field[:idx]
multi = indexStr == "*"
index = -1
if !multi {
indexInt, err := strconv.ParseInt(indexStr, 0, 0)
if err == nil {
multi = true
index = int(indexInt)
}
}
if multi {
field = field[idx+1:]
}
}
return &ResourceVariable{
Mode: mode,
Type: parts[0],
Name: parts[1],
Field: field,
Multi: multi,
Index: index,
key: key,
}, nil
}
func (v *ResourceVariable) ResourceId() string {
switch v.Mode {
case addrs.ManagedResourceMode:
return fmt.Sprintf("%s.%s", v.Type, v.Name)
case addrs.DataResourceMode:
return fmt.Sprintf("data.%s.%s", v.Type, v.Name)
default:
panic(fmt.Errorf("unknown resource mode %s", v.Mode))
}
}
func (v *ResourceVariable) FullKey() string {
return v.key
}
func NewSelfVariable(key string) (*SelfVariable, error) {
field := key[len("self."):]
return &SelfVariable{
Field: field,
key: key,
}, nil
}
func (v *SelfVariable) FullKey() string {
return v.key
}
func (v *SelfVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
func NewSimpleVariable(key string) (*SimpleVariable, error) {
return &SimpleVariable{Key: key}, nil
}
func (v *SimpleVariable) FullKey() string {
return v.Key
}
func (v *SimpleVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
func NewTerraformVariable(key string) (*TerraformVariable, error) {
field := key[len("terraform."):]
return &TerraformVariable{
Field: field,
key: key,
}, nil
}
func (v *TerraformVariable) FullKey() string {
return v.key
}
func (v *TerraformVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
func NewUserVariable(key string) (*UserVariable, error) {
name := key[len("var."):]
elem := ""
if idx := strings.Index(name, "."); idx > -1 {
elem = name[idx+1:]
name = name[:idx]
}
if len(elem) > 0 {
return nil, fmt.Errorf("Invalid dot index found: 'var.%s.%s'. Values in maps and lists can be referenced using square bracket indexing, like: 'var.mymap[\"key\"]' or 'var.mylist[1]'.", name, elem)
}
return &UserVariable{
key: key,
Name: name,
Elem: elem,
}, nil
}
func (v *UserVariable) FullKey() string {
return v.key
}
func (v *UserVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
func NewLocalVariable(key string) (*LocalVariable, error) {
name := key[len("local."):]
if idx := strings.Index(name, "."); idx > -1 {
return nil, fmt.Errorf("Can't use dot (.) attribute access in local.%s; use square bracket indexing", name)
}
return &LocalVariable{
Name: name,
}, nil
}
func (v *LocalVariable) FullKey() string {
return fmt.Sprintf("local.%s", v.Name)
}
func (v *LocalVariable) GoString() string {
return fmt.Sprintf("*%#v", *v)
}
// DetectVariables takes an AST root and returns all the interpolated
// variables that are detected in the AST tree.
func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) {
var result []InterpolatedVariable
var resultErr error
// Visitor callback
fn := func(n ast.Node) ast.Node {
if resultErr != nil {
return n
}
switch vn := n.(type) {
case *ast.VariableAccess:
v, err := NewInterpolatedVariable(vn.Name)
if err != nil {
resultErr = err
return n
}
result = append(result, v)
case *ast.Index:
if va, ok := vn.Target.(*ast.VariableAccess); ok {
v, err := NewInterpolatedVariable(va.Name)
if err != nil {
resultErr = err
return n
}
result = append(result, v)
}
if va, ok := vn.Key.(*ast.VariableAccess); ok {
v, err := NewInterpolatedVariable(va.Name)
if err != nil {
resultErr = err
return n
}
result = append(result, v)
}
default:
return n
}
return n
}
// Visitor pattern
root.Accept(fn)
if resultErr != nil {
return nil, resultErr
}
return result, nil
}

View File

@ -1,301 +0,0 @@
package configs
import (
"reflect"
"strings"
"testing"
"github.com/hashicorp/hil"
"github.com/hashicorp/terraform/addrs"
)
func TestNewInterpolatedVariable(t *testing.T) {
tests := []struct {
Input string
Want InterpolatedVariable
Error bool
}{
{
"var.foo",
&UserVariable{
Name: "foo",
key: "var.foo",
},
false,
},
{
"local.foo",
&LocalVariable{
Name: "foo",
},
false,
},
{
"local.foo.nope",
nil,
true,
},
{
"module.foo.bar",
&ModuleVariable{
Name: "foo",
Field: "bar",
key: "module.foo.bar",
},
false,
},
{
"count.index",
&CountVariable{
Type: CountValueIndex,
key: "count.index",
},
false,
},
{
"count.nope",
&CountVariable{
Type: CountValueInvalid,
key: "count.nope",
},
false,
},
{
"path.module",
&PathVariable{
Type: PathValueModule,
key: "path.module",
},
false,
},
{
"self.address",
&SelfVariable{
Field: "address",
key: "self.address",
},
false,
},
{
"terraform.env",
&TerraformVariable{
Field: "env",
key: "terraform.env",
},
false,
},
}
for i, test := range tests {
t.Run(test.Input, func(t *testing.T) {
got, err := NewInterpolatedVariable(test.Input)
if err != nil != test.Error {
t.Errorf("%d. Error: %s", i, err)
}
if !test.Error && !reflect.DeepEqual(got, test.Want) {
t.Errorf(
"wrong result\ninput: %s\ngot: %#v\nwant: %#v",
test.Input, got, test.Want,
)
}
})
}
}
func TestNewResourceVariable(t *testing.T) {
v, err := NewResourceVariable("foo.bar.baz")
if err != nil {
t.Fatalf("err: %s", err)
}
if v.Mode != addrs.ManagedResourceMode {
t.Fatalf("bad: %#v", v)
}
if v.Type != "foo" {
t.Fatalf("bad: %#v", v)
}
if v.Name != "bar" {
t.Fatalf("bad: %#v", v)
}
if v.Field != "baz" {
t.Fatalf("bad: %#v", v)
}
if v.Multi {
t.Fatal("should not be multi")
}
if v.FullKey() != "foo.bar.baz" {
t.Fatalf("bad: %#v", v)
}
}
func TestNewResourceVariableData(t *testing.T) {
v, err := NewResourceVariable("data.foo.bar.baz")
if err != nil {
t.Fatalf("err: %s", err)
}
if v.Mode != addrs.DataResourceMode {
t.Fatalf("bad: %#v", v)
}
if v.Type != "foo" {
t.Fatalf("bad: %#v", v)
}
if v.Name != "bar" {
t.Fatalf("bad: %#v", v)
}
if v.Field != "baz" {
t.Fatalf("bad: %#v", v)
}
if v.Multi {
t.Fatal("should not be multi")
}
if v.FullKey() != "data.foo.bar.baz" {
t.Fatalf("bad: %#v", v)
}
}
func TestNewUserVariable(t *testing.T) {
v, err := NewUserVariable("var.bar")
if err != nil {
t.Fatalf("err: %s", err)
}
if v.Name != "bar" {
t.Fatalf("bad: %#v", v.Name)
}
if v.FullKey() != "var.bar" {
t.Fatalf("bad: %#v", v)
}
}
func TestNewUserVariable_oldMapDotIndexErr(t *testing.T) {
_, err := NewUserVariable("var.bar.baz")
if err == nil || !strings.Contains(err.Error(), "Invalid dot index") {
t.Fatalf("Expected dot index err, got: %#v", err)
}
}
func TestResourceVariable_impl(t *testing.T) {
var _ InterpolatedVariable = new(ResourceVariable)
}
func TestResourceVariable_Multi(t *testing.T) {
v, err := NewResourceVariable("foo.bar.*.baz")
if err != nil {
t.Fatalf("err: %s", err)
}
if v.Type != "foo" {
t.Fatalf("bad: %#v", v)
}
if v.Name != "bar" {
t.Fatalf("bad: %#v", v)
}
if v.Field != "baz" {
t.Fatalf("bad: %#v", v)
}
if !v.Multi {
t.Fatal("should be multi")
}
}
func TestResourceVariable_MultiIndex(t *testing.T) {
cases := []struct {
Input string
Index int
Field string
}{
{"foo.bar.*.baz", -1, "baz"},
{"foo.bar.0.baz", 0, "baz"},
{"foo.bar.5.baz", 5, "baz"},
}
for _, tc := range cases {
v, err := NewResourceVariable(tc.Input)
if err != nil {
t.Fatalf("err: %s", err)
}
if !v.Multi {
t.Fatalf("should be multi: %s", tc.Input)
}
if v.Index != tc.Index {
t.Fatalf("bad: %d\n\n%s", v.Index, tc.Input)
}
if v.Field != tc.Field {
t.Fatalf("bad: %s\n\n%s", v.Field, tc.Input)
}
}
}
func TestUserVariable_impl(t *testing.T) {
var _ InterpolatedVariable = new(UserVariable)
}
func TestDetectVariables(t *testing.T) {
cases := []struct {
Input string
Result []InterpolatedVariable
}{
{
"foo $${var.foo}",
nil,
},
{
"foo ${var.foo}",
[]InterpolatedVariable{
&UserVariable{
Name: "foo",
key: "var.foo",
},
},
},
{
"foo ${var.foo} ${var.bar}",
[]InterpolatedVariable{
&UserVariable{
Name: "foo",
key: "var.foo",
},
&UserVariable{
Name: "bar",
key: "var.bar",
},
},
},
{
`foo ${module.foo.output["key"]}`,
[]InterpolatedVariable{
&ModuleVariable{
Name: "foo",
Field: "output",
key: "module.foo.output",
},
&ModuleVariable{
Name: "foo",
Field: "output",
key: "module.foo.output",
},
},
},
}
for _, tc := range cases {
ast, err := hil.Parse(tc.Input)
if err != nil {
t.Fatalf("%s\n\nInput: %s", err, tc.Input)
}
actual, err := DetectVariables(ast)
if err != nil {
t.Fatalf("err: %s", err)
}
if !reflect.DeepEqual(actual, tc.Result) {
t.Fatalf("bad: %#v\n\nInput: %s", actual, tc.Input)
}
}
}

View File

@ -1,282 +0,0 @@
package configs
import (
"fmt"
"reflect"
"strings"
"github.com/hashicorp/hil"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/terraform/configs/hcl2shim"
"github.com/mitchellh/reflectwalk"
)
// interpolationWalker implements interfaces for the reflectwalk package
// (github.com/mitchellh/reflectwalk) that can be used to automatically
// execute a callback for an interpolation.
type interpolationWalker struct {
// F is the function to call for every interpolation. It can be nil.
//
// If Replace is true, then the return value of F will be used to
// replace the interpolation.
F interpolationWalkerFunc
Replace bool
// ContextF is an advanced version of F that also receives the
// location of where it is in the structure. This lets you do
// context-aware validation.
ContextF interpolationWalkerContextFunc
key []string
lastValue reflect.Value
loc reflectwalk.Location
cs []reflect.Value
csKey []reflect.Value
csData interface{}
sliceIndex []int
unknownKeys []string
}
// interpolationWalkerFunc is the callback called by interpolationWalk.
// It is called with any interpolation found. It should return a value
// to replace the interpolation with, along with any errors.
//
// If Replace is set to false in interpolationWalker, then the replace
// value can be anything as it will have no effect.
type interpolationWalkerFunc func(ast.Node) (interface{}, error)
// interpolationWalkerContextFunc is called by interpolationWalk if
// ContextF is set. This receives both the interpolation and the location
// where the interpolation is.
//
// This callback can be used to validate the location of the interpolation
// within the configuration.
type interpolationWalkerContextFunc func(reflectwalk.Location, ast.Node)
func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
w.loc = loc
return nil
}
func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
w.loc = reflectwalk.None
switch loc {
case reflectwalk.Map:
w.cs = w.cs[:len(w.cs)-1]
case reflectwalk.MapValue:
w.key = w.key[:len(w.key)-1]
w.csKey = w.csKey[:len(w.csKey)-1]
case reflectwalk.Slice:
// Split any values that need to be split
w.splitSlice()
w.cs = w.cs[:len(w.cs)-1]
case reflectwalk.SliceElem:
w.csKey = w.csKey[:len(w.csKey)-1]
w.sliceIndex = w.sliceIndex[:len(w.sliceIndex)-1]
}
return nil
}
func (w *interpolationWalker) Map(m reflect.Value) error {
w.cs = append(w.cs, m)
return nil
}
func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
w.csData = k
w.csKey = append(w.csKey, k)
if l := len(w.sliceIndex); l > 0 {
w.key = append(w.key, fmt.Sprintf("%d.%s", w.sliceIndex[l-1], k.String()))
} else {
w.key = append(w.key, k.String())
}
w.lastValue = v
return nil
}
func (w *interpolationWalker) Slice(s reflect.Value) error {
w.cs = append(w.cs, s)
return nil
}
func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
w.csKey = append(w.csKey, reflect.ValueOf(i))
w.sliceIndex = append(w.sliceIndex, i)
return nil
}
func (w *interpolationWalker) Primitive(v reflect.Value) error {
setV := v
// We only care about strings
if v.Kind() == reflect.Interface {
setV = v
v = v.Elem()
}
if v.Kind() != reflect.String {
return nil
}
astRoot, err := hil.Parse(v.String())
if err != nil {
return err
}
// If the AST we got is just a literal string value with the same
// value then we ignore it. We have to check if its the same value
// because it is possible to input a string, get out a string, and
// have it be different. For example: "foo-$${bar}" turns into
// "foo-${bar}"
if n, ok := astRoot.(*ast.LiteralNode); ok {
if s, ok := n.Value.(string); ok && s == v.String() {
return nil
}
}
if w.ContextF != nil {
w.ContextF(w.loc, astRoot)
}
if w.F == nil {
return nil
}
replaceVal, err := w.F(astRoot)
if err != nil {
return fmt.Errorf(
"%s in:\n\n%s",
err, v.String())
}
if w.Replace {
// We need to determine if we need to remove this element
// if the result contains any "UnknownVariableValue" which is
// set if it is computed. This behavior is different if we're
// splitting (in a SliceElem) or not.
remove := false
if w.loc == reflectwalk.SliceElem {
switch typedReplaceVal := replaceVal.(type) {
case string:
if typedReplaceVal == hcl2shim.UnknownVariableValue {
remove = true
}
case []interface{}:
if hasUnknownValue(typedReplaceVal) {
remove = true
}
}
} else if replaceVal == hcl2shim.UnknownVariableValue {
remove = true
}
if remove {
w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
}
resultVal := reflect.ValueOf(replaceVal)
switch w.loc {
case reflectwalk.MapKey:
m := w.cs[len(w.cs)-1]
// Delete the old value
var zero reflect.Value
m.SetMapIndex(w.csData.(reflect.Value), zero)
// Set the new key with the existing value
m.SetMapIndex(resultVal, w.lastValue)
// Set the key to be the new key
w.csData = resultVal
case reflectwalk.MapValue:
// If we're in a map, then the only way to set a map value is
// to set it directly.
m := w.cs[len(w.cs)-1]
mk := w.csData.(reflect.Value)
m.SetMapIndex(mk, resultVal)
default:
// Otherwise, we should be addressable
setV.Set(resultVal)
}
}
return nil
}
func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
// if we don't have at least 2 values, we're not going to find a map, but
// we could panic.
if len(w.cs) < 2 {
return
}
c := w.cs[len(w.cs)-2]
switch c.Kind() {
case reflect.Map:
// Get the key and delete it
k := w.csKey[len(w.csKey)-1]
c.SetMapIndex(k, v)
}
}
func hasUnknownValue(variable []interface{}) bool {
for _, value := range variable {
if strVal, ok := value.(string); ok {
if strVal == hcl2shim.UnknownVariableValue {
return true
}
}
}
return false
}
func (w *interpolationWalker) splitSlice() {
raw := w.cs[len(w.cs)-1]
var s []interface{}
switch v := raw.Interface().(type) {
case []interface{}:
s = v
case []map[string]interface{}:
return
}
split := false
for _, val := range s {
if varVal, ok := val.(ast.Variable); ok && varVal.Type == ast.TypeList {
split = true
}
if _, ok := val.([]interface{}); ok {
split = true
}
}
if !split {
return
}
result := make([]interface{}, 0)
for _, v := range s {
switch val := v.(type) {
case ast.Variable:
switch val.Type {
case ast.TypeList:
elements := val.Value.([]ast.Variable)
for _, element := range elements {
result = append(result, element.Value)
}
default:
result = append(result, val.Value)
}
case []interface{}:
result = append(result, val...)
default:
result = append(result, v)
}
}
w.replaceCurrent(reflect.ValueOf(result))
}

View File

@ -1,200 +0,0 @@
package configs
import (
"fmt"
"reflect"
"testing"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/terraform/configs/hcl2shim"
"github.com/mitchellh/reflectwalk"
)
func TestInterpolationWalker_detect(t *testing.T) {
cases := []struct {
Input interface{}
Result []string
}{
{
Input: map[string]interface{}{
"foo": "$${var.foo}",
},
Result: []string{
"Literal(TypeString, ${var.foo})",
},
},
{
Input: map[string]interface{}{
"foo": "${var.foo}",
},
Result: []string{
"Variable(var.foo)",
},
},
{
Input: map[string]interface{}{
"foo": "${aws_instance.foo.*.num}",
},
Result: []string{
"Variable(aws_instance.foo.*.num)",
},
},
{
Input: map[string]interface{}{
"foo": "${lookup(var.foo)}",
},
Result: []string{
"Call(lookup, Variable(var.foo))",
},
},
{
Input: map[string]interface{}{
"foo": `${file("test.txt")}`,
},
Result: []string{
"Call(file, Literal(TypeString, test.txt))",
},
},
{
Input: map[string]interface{}{
"foo": `${file("foo/bar.txt")}`,
},
Result: []string{
"Call(file, Literal(TypeString, foo/bar.txt))",
},
},
{
Input: map[string]interface{}{
"foo": `${join(",", foo.bar.*.id)}`,
},
Result: []string{
"Call(join, Literal(TypeString, ,), Variable(foo.bar.*.id))",
},
},
{
Input: map[string]interface{}{
"foo": `${concat("localhost", ":8080")}`,
},
Result: []string{
"Call(concat, Literal(TypeString, localhost), Literal(TypeString, :8080))",
},
},
}
for i, tc := range cases {
var actual []string
detectFn := func(root ast.Node) (interface{}, error) {
actual = append(actual, fmt.Sprintf("%s", root))
return "", nil
}
w := &interpolationWalker{F: detectFn}
if err := reflectwalk.Walk(tc.Input, w); err != nil {
t.Fatalf("err: %s", err)
}
if !reflect.DeepEqual(actual, tc.Result) {
t.Fatalf("%d: bad:\n\n%#v", i, actual)
}
}
}
func TestInterpolationWalker_replace(t *testing.T) {
cases := []struct {
Input interface{}
Output interface{}
Value interface{}
}{
{
Input: map[string]interface{}{
"foo": "$${var.foo}",
},
Output: map[string]interface{}{
"foo": "bar",
},
Value: "bar",
},
{
Input: map[string]interface{}{
"foo": "hello, ${var.foo}",
},
Output: map[string]interface{}{
"foo": "bar",
},
Value: "bar",
},
{
Input: map[string]interface{}{
"foo": map[string]interface{}{
"${var.foo}": "bar",
},
},
Output: map[string]interface{}{
"foo": map[string]interface{}{
"bar": "bar",
},
},
Value: "bar",
},
{
Input: map[string]interface{}{
"foo": []interface{}{
"${var.foo}",
"bing",
},
},
Output: map[string]interface{}{
"foo": []interface{}{
"bar",
"baz",
"bing",
},
},
Value: []interface{}{"bar", "baz"},
},
{
Input: map[string]interface{}{
"foo": []interface{}{
"${var.foo}",
"bing",
},
},
Output: map[string]interface{}{
"foo": []interface{}{
hcl2shim.UnknownVariableValue,
"baz",
"bing",
},
},
Value: []interface{}{hcl2shim.UnknownVariableValue, "baz"},
},
}
for i, tc := range cases {
fn := func(ast.Node) (interface{}, error) {
return tc.Value, nil
}
t.Run(fmt.Sprintf("walk-%d", i), func(t *testing.T) {
w := &interpolationWalker{F: fn, Replace: true}
if err := reflectwalk.Walk(tc.Input, w); err != nil {
t.Fatalf("err: %s", err)
}
if !reflect.DeepEqual(tc.Input, tc.Output) {
t.Fatalf("%d: bad:\n\nexpected:%#v\ngot:%#v", i, tc.Output, tc.Input)
}
})
}
}

View File

@ -1,406 +0,0 @@
package configs
import (
"bytes"
"encoding/gob"
"errors"
"strconv"
"sync"
hcl2 "github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hil"
"github.com/hashicorp/hil/ast"
"github.com/mitchellh/copystructure"
"github.com/mitchellh/reflectwalk"
)
// RawConfig is a structure that holds a piece of configuration
// where the overall structure is unknown since it will be used
// to configure a plugin or some other similar external component.
//
// RawConfigs can be interpolated with variables that come from
// other resources, user variables, etc.
//
// RawConfig supports a query-like interface to request
// information from deep within the structure.
type RawConfig struct {
Key string
// Only _one_ of Raw and Body may be populated at a time.
//
// In the normal case, Raw is populated and Body is nil.
//
// When the experimental HCL2 parsing mode is enabled, "Body"
// is populated and RawConfig serves only to transport the hcl2.Body
// through the rest of Terraform core so we can ultimately decode it
// once its schema is known.
//
// Once we transition to HCL2 as the primary representation, RawConfig
// should be removed altogether and the hcl2.Body should be passed
// around directly.
Raw map[string]interface{}
Body hcl2.Body
Interpolations []ast.Node
Variables map[string]InterpolatedVariable
lock sync.Mutex
config map[string]interface{}
unknownKeys []string
}
// NewRawConfig creates a new RawConfig structure and populates the
// publicly readable struct fields.
func NewRawConfig(raw map[string]interface{}) (*RawConfig, error) {
result := &RawConfig{Raw: raw}
if err := result.init(); err != nil {
return nil, err
}
return result, nil
}
// NewRawConfigHCL2 creates a new RawConfig that is serving as a capsule
// to transport a hcl2.Body. In this mode, the publicly-readable struct
// fields are not populated since all operations should instead be diverted
// to the HCL2 body.
//
// For a RawConfig object constructed with this function, the only valid use
// is to later retrieve the Body value and call its own methods. Callers
// may choose to set and then later handle the Key field, in a manner
// consistent with how it is handled by the Value method, but the Value
// method itself must not be used.
//
// This is an experimental codepath to be used only by the HCL2 config loader.
// Non-experimental parsing should _always_ use NewRawConfig to produce a
// fully-functional RawConfig object.
func NewRawConfigHCL2(body hcl2.Body) *RawConfig {
return &RawConfig{
Body: body,
}
}
// RawMap returns a copy of the RawConfig.Raw map.
func (r *RawConfig) RawMap() map[string]interface{} {
r.lock.Lock()
defer r.lock.Unlock()
m := make(map[string]interface{})
for k, v := range r.Raw {
m[k] = v
}
return m
}
// Copy returns a copy of this RawConfig, uninterpolated.
func (r *RawConfig) Copy() *RawConfig {
if r == nil {
return nil
}
r.lock.Lock()
defer r.lock.Unlock()
if r.Body != nil {
return NewRawConfigHCL2(r.Body)
}
newRaw := make(map[string]interface{})
for k, v := range r.Raw {
newRaw[k] = v
}
result, err := NewRawConfig(newRaw)
if err != nil {
panic("copy failed: " + err.Error())
}
result.Key = r.Key
return result
}
// Value returns the value of the configuration if this configuration
// has a Key set. If this does not have a Key set, nil will be returned.
func (r *RawConfig) Value() interface{} {
if c := r.Config(); c != nil {
if v, ok := c[r.Key]; ok {
return v
}
}
r.lock.Lock()
defer r.lock.Unlock()
return r.Raw[r.Key]
}
// Config returns the entire configuration with the variables
// interpolated from any call to Interpolate.
//
// If any interpolated variables are unknown (value set to
// UnknownVariableValue), the first non-container (map, slice, etc.) element
// will be removed from the config. The keys of unknown variables
// can be found using the UnknownKeys function.
//
// By pruning out unknown keys from the configuration, the raw
// structure will always successfully decode into its ultimate
// structure using something like mapstructure.
func (r *RawConfig) Config() map[string]interface{} {
r.lock.Lock()
defer r.lock.Unlock()
return r.config
}
// Interpolate uses the given mapping of variable values and uses
// those as the values to replace any variables in this raw
// configuration.
//
// Any prior calls to Interpolate are replaced with this one.
//
// If a variable key is missing, this will panic.
func (r *RawConfig) Interpolate(vs map[string]ast.Variable) error {
r.lock.Lock()
defer r.lock.Unlock()
// Create the evaluation configuration we use to execute
config := &hil.EvalConfig{
GlobalScope: &ast.BasicScope{
VarMap: vs,
},
}
return r.interpolate(func(root ast.Node) (interface{}, error) {
// None of the variables we need are computed, meaning we should
// be able to properly evaluate.
result, err := hil.Eval(root, config)
if err != nil {
return "", err
}
return result.Value, nil
})
}
// Merge merges another RawConfig into this one (overriding any conflicting
// values in this config) and returns a new config. The original config
// is not modified.
func (r *RawConfig) Merge(other *RawConfig) *RawConfig {
r.lock.Lock()
defer r.lock.Unlock()
// Merge the raw configurations
raw := make(map[string]interface{})
for k, v := range r.Raw {
raw[k] = v
}
for k, v := range other.Raw {
raw[k] = v
}
// Create the result
result, err := NewRawConfig(raw)
if err != nil {
panic(err)
}
// Merge the interpolated results
result.config = make(map[string]interface{})
for k, v := range r.config {
result.config[k] = v
}
for k, v := range other.config {
result.config[k] = v
}
// Build the unknown keys
if len(r.unknownKeys) > 0 || len(other.unknownKeys) > 0 {
unknownKeys := make(map[string]struct{})
for _, k := range r.unknownKeys {
unknownKeys[k] = struct{}{}
}
for _, k := range other.unknownKeys {
unknownKeys[k] = struct{}{}
}
result.unknownKeys = make([]string, 0, len(unknownKeys))
for k, _ := range unknownKeys {
result.unknownKeys = append(result.unknownKeys, k)
}
}
return result
}
func (r *RawConfig) init() error {
r.lock.Lock()
defer r.lock.Unlock()
r.config = r.Raw
r.Interpolations = nil
r.Variables = nil
fn := func(node ast.Node) (interface{}, error) {
r.Interpolations = append(r.Interpolations, node)
vars, err := DetectVariables(node)
if err != nil {
return "", err
}
for _, v := range vars {
if r.Variables == nil {
r.Variables = make(map[string]InterpolatedVariable)
}
r.Variables[v.FullKey()] = v
}
return "", nil
}
walker := &interpolationWalker{F: fn}
if err := reflectwalk.Walk(r.Raw, walker); err != nil {
return err
}
return nil
}
func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error {
if r.Body != nil {
// For RawConfigs created for the HCL2 experiement, callers must
// use the HCL2 Body API directly rather than interpolating via
// the RawConfig.
return errors.New("this feature is not yet supported under the HCL2 experiment")
}
config, err := copystructure.Copy(r.Raw)
if err != nil {
return err
}
r.config = config.(map[string]interface{})
w := &interpolationWalker{F: fn, Replace: true}
err = reflectwalk.Walk(r.config, w)
if err != nil {
return err
}
r.unknownKeys = w.unknownKeys
return nil
}
func (r *RawConfig) merge(r2 *RawConfig) *RawConfig {
if r == nil && r2 == nil {
return nil
}
if r == nil {
r = &RawConfig{}
}
rawRaw, err := copystructure.Copy(r.Raw)
if err != nil {
panic(err)
}
raw := rawRaw.(map[string]interface{})
if r2 != nil {
for k, v := range r2.Raw {
raw[k] = v
}
}
result, err := NewRawConfig(raw)
if err != nil {
panic(err)
}
return result
}
// couldBeInteger is a helper that determines if the represented value could
// result in an integer.
//
// This function only works for RawConfigs that have "Key" set, meaning that
// a single result can be produced. Calling this function will overwrite
// the Config and Value results to be a test value.
//
// This function is conservative. If there is some doubt about whether the
// result could be an integer -- for example, if it depends on a variable
// whose type we don't know yet -- it will still return true.
func (r *RawConfig) couldBeInteger() bool {
if r.Key == "" {
// un-keyed RawConfigs can never produce numbers
return false
}
if r.Body == nil {
// Normal path: using the interpolator in this package
// Interpolate with a fixed number to verify that its a number.
r.interpolate(func(root ast.Node) (interface{}, error) {
// Execute the node but transform the AST so that it returns
// a fixed value of "5" for all interpolations.
result, err := hil.Eval(
hil.FixedValueTransform(
root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
nil)
if err != nil {
return "", err
}
return result.Value, nil
})
_, err := strconv.ParseInt(r.Value().(string), 0, 0)
return err == nil
} else {
// We briefly tried to gradually implement HCL2 support by adding a
// branch here, but that experiment was not successful.
panic("HCL2 experimental path no longer supported")
}
}
// UnknownKeys returns the keys of the configuration that are unknown
// because they had interpolated variables that must be computed.
func (r *RawConfig) UnknownKeys() []string {
r.lock.Lock()
defer r.lock.Unlock()
return r.unknownKeys
}
// See GobEncode
func (r *RawConfig) GobDecode(b []byte) error {
var data gobRawConfig
err := gob.NewDecoder(bytes.NewReader(b)).Decode(&data)
if err != nil {
return err
}
r.Key = data.Key
r.Raw = data.Raw
return r.init()
}
// GobEncode is a custom Gob encoder to use so that we only include the
// raw configuration. Interpolated variables and such are lost and the
// tree of interpolated variables is recomputed on decode, since it is
// referentially transparent.
func (r *RawConfig) GobEncode() ([]byte, error) {
r.lock.Lock()
defer r.lock.Unlock()
data := gobRawConfig{
Key: r.Key,
Raw: r.Raw,
}
var buf bytes.Buffer
if err := gob.NewEncoder(&buf).Encode(data); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
type gobRawConfig struct {
Key string
Raw map[string]interface{}
}

View File

@ -1,514 +0,0 @@
package configs
import (
"encoding/gob"
"reflect"
"testing"
hcl2 "github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/terraform/configs/hcl2shim"
)
func TestNewRawConfig(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
"bar": `${file("boom.txt")}`,
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
if len(rc.Interpolations) != 2 {
t.Fatalf("bad: %#v", rc.Interpolations)
}
if len(rc.Variables) != 1 {
t.Fatalf("bad: %#v", rc.Variables)
}
}
func TestRawConfig_basic(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
// Before interpolate, Config() should be the raw
if !reflect.DeepEqual(rc.Config(), raw) {
t.Fatalf("bad: %#v", rc.Config())
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "baz",
Type: ast.TypeString,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{
"foo": "baz",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
if len(rc.UnknownKeys()) != 0 {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
func TestRawConfig_double(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "baz",
Type: ast.TypeString,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{
"foo": "baz",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
vars = map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "what",
Type: ast.TypeString,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual = rc.Config()
expected = map[string]interface{}{
"foo": "what",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
}
func TestRawConfigInterpolate_escaped(t *testing.T) {
raw := map[string]interface{}{
"foo": "bar-$${baz}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
// Before interpolate, Config() should be the raw
if !reflect.DeepEqual(rc.Config(), raw) {
t.Fatalf("bad: %#v", rc.Config())
}
if err := rc.Interpolate(nil); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{
"foo": "bar-${baz}",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
if len(rc.UnknownKeys()) != 0 {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
func TestRawConfig_merge(t *testing.T) {
raw1 := map[string]interface{}{
"foo": "${var.foo}",
"bar": "${var.bar}",
}
rc1, err := NewRawConfig(raw1)
if err != nil {
t.Fatalf("err: %s", err)
}
{
vars := map[string]ast.Variable{
"var.foo": ast.Variable{
Value: "foovalue",
Type: ast.TypeString,
},
"var.bar": ast.Variable{
Value: "nope",
Type: ast.TypeString,
},
}
if err := rc1.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
}
raw2 := map[string]interface{}{
"bar": "${var.bar}",
"baz": "${var.baz}",
}
rc2, err := NewRawConfig(raw2)
if err != nil {
t.Fatalf("err: %s", err)
}
{
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "barvalue",
Type: ast.TypeString,
},
"var.baz": ast.Variable{
Value: hcl2shim.UnknownVariableValue,
Type: ast.TypeUnknown,
},
}
if err := rc2.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
}
// Merge the two
rc3 := rc1.Merge(rc2)
// Raw should be merged
raw3 := map[string]interface{}{
"foo": "${var.foo}",
"bar": "${var.bar}",
"baz": "${var.baz}",
}
if !reflect.DeepEqual(rc3.Raw, raw3) {
t.Fatalf("bad: %#v", rc3.Raw)
}
actual := rc3.Config()
expected := map[string]interface{}{
"foo": "foovalue",
"bar": "barvalue",
"baz": hcl2shim.UnknownVariableValue,
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
expectedKeys := []string{"baz"}
if !reflect.DeepEqual(rc3.UnknownKeys(), expectedKeys) {
t.Fatalf("bad: %#v", rc3.UnknownKeys())
}
}
func TestRawConfig_syntax(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var",
}
if _, err := NewRawConfig(raw); err == nil {
t.Fatal("should error")
}
}
func TestRawConfig_unknown(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: hcl2shim.UnknownVariableValue,
Type: ast.TypeUnknown,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{"foo": hcl2shim.UnknownVariableValue}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
expectedKeys := []string{"foo"}
if !reflect.DeepEqual(rc.UnknownKeys(), expectedKeys) {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
func TestRawConfig_unknownPartial(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}/32",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: hcl2shim.UnknownVariableValue,
Type: ast.TypeUnknown,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{"foo": hcl2shim.UnknownVariableValue}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
expectedKeys := []string{"foo"}
if !reflect.DeepEqual(rc.UnknownKeys(), expectedKeys) {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
func TestRawConfig_unknownPartialList(t *testing.T) {
raw := map[string]interface{}{
"foo": []interface{}{
"${var.bar}/32",
},
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: hcl2shim.UnknownVariableValue,
Type: ast.TypeUnknown,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
actual := rc.Config()
expected := map[string]interface{}{"foo": []interface{}{hcl2shim.UnknownVariableValue}}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
expectedKeys := []string{"foo"}
if !reflect.DeepEqual(rc.UnknownKeys(), expectedKeys) {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
// This tests a race found where we were not maintaining the "slice index"
// accounting properly. The result would be that some computed keys would
// look like they had no slice index when they in fact do. This test is not
// very reliable but it did fail before the fix and passed after.
func TestRawConfig_sliceIndexLoss(t *testing.T) {
raw := map[string]interface{}{
"slice": []map[string]interface{}{
map[string]interface{}{
"foo": []interface{}{"foo/${var.unknown}"},
"bar": []interface{}{"bar"},
},
},
}
vars := map[string]ast.Variable{
"var.unknown": ast.Variable{
Value: hcl2shim.UnknownVariableValue,
Type: ast.TypeUnknown,
},
"var.known": ast.Variable{
Value: "123456",
Type: ast.TypeString,
},
}
// We run it a lot because its fast and we try to get a race out
for i := 0; i < 50; i++ {
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
expectedKeys := []string{"slice.0.foo"}
if !reflect.DeepEqual(rc.UnknownKeys(), expectedKeys) {
t.Fatalf("bad: %#v", rc.UnknownKeys())
}
}
}
func TestRawConfigCopy(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
rc.Key = "foo"
if rc.Value() != "${var.bar}" {
t.Fatalf("err: %#v", rc.Value())
}
// Interpolate the first one
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "baz",
Type: ast.TypeString,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
if rc.Value() != "baz" {
t.Fatalf("bad: %#v", rc.Value())
}
// Copy and interpolate
{
rc2 := rc.Copy()
if rc2.Value() != "${var.bar}" {
t.Fatalf("err: %#v", rc2.Value())
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "qux",
Type: ast.TypeString,
},
}
if err := rc2.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
if rc2.Value() != "qux" {
t.Fatalf("bad: %#v", rc2.Value())
}
}
}
func TestRawConfigCopyHCL2(t *testing.T) {
rc := NewRawConfigHCL2(hcl2.EmptyBody())
rc2 := rc.Copy()
if rc.Body == nil {
t.Errorf("RawConfig copy has a nil Body")
}
if rc2.Raw != nil {
t.Errorf("RawConfig copy got a non-nil Raw")
}
}
func TestRawConfigValue(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
rc.Key = ""
if rc.Value() != nil {
t.Fatalf("bad: %#v", rc.Value())
}
rc.Key = "foo"
if rc.Value() != "${var.bar}" {
t.Fatalf("err: %#v", rc.Value())
}
vars := map[string]ast.Variable{
"var.bar": ast.Variable{
Value: "baz",
Type: ast.TypeString,
},
}
if err := rc.Interpolate(vars); err != nil {
t.Fatalf("err: %s", err)
}
if rc.Value() != "baz" {
t.Fatalf("bad: %#v", rc.Value())
}
}
func TestRawConfig_implGob(t *testing.T) {
var _ gob.GobDecoder = new(RawConfig)
var _ gob.GobEncoder = new(RawConfig)
}
// verify that RawMap returns an identical copy
func TestNewRawConfig_rawMap(t *testing.T) {
raw := map[string]interface{}{
"foo": "${var.bar}",
"bar": `${file("boom.txt")}`,
}
rc, err := NewRawConfig(raw)
if err != nil {
t.Fatalf("err: %s", err)
}
rawCopy := rc.RawMap()
if !reflect.DeepEqual(raw, rawCopy) {
t.Fatalf("bad: %#v", rawCopy)
}
// make sure they aren't the same map
raw["test"] = "value"
if reflect.DeepEqual(raw, rawCopy) {
t.Fatal("RawMap() didn't return a copy")
}
}

3
go.mod
View File

@ -22,7 +22,6 @@ require (
github.com/aws/aws-sdk-go v1.31.9
github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f // indirect
github.com/bgentry/speakeasy v0.1.0
github.com/blang/semver v3.5.1+incompatible
github.com/bmatcuk/doublestar v1.1.5
github.com/boltdb/bolt v1.3.1 // indirect
github.com/chzyer/logex v1.1.10 // indirect
@ -71,7 +70,6 @@ require (
github.com/hashicorp/go-version v1.2.0
github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f
github.com/hashicorp/hcl/v2 v2.6.0
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
github.com/hashicorp/memberlist v0.1.0 // indirect
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb // indirect
github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7
@ -101,7 +99,6 @@ require (
github.com/mitchellh/go-linereader v0.0.0-20190213213312-1b945b3263eb
github.com/mitchellh/go-wordwrap v1.0.0
github.com/mitchellh/gox v1.0.1
github.com/mitchellh/hashstructure v1.0.0
github.com/mitchellh/mapstructure v1.1.2
github.com/mitchellh/panicwrap v1.0.0
github.com/mitchellh/prefixedio v0.0.0-20190213213902-5733675afd51

6
go.sum
View File

@ -94,8 +94,6 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1U
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4=
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bmatcuk/doublestar v1.1.5 h1:2bNwBOmhyFEFcoB3tGvTD5xanq+4kyOZlB8wFYbMjkk=
github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
@ -279,8 +277,6 @@ github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh
github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV3txagJ90=
github.com/hashicorp/hcl/v2 v2.6.0 h1:3krZOfGY6SziUXa6H9PJU6TyohHn7I+ARYnhbeNBz+o=
github.com/hashicorp/hcl/v2 v2.6.0/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yIfAEN3jqY=
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590 h1:2yzhWGdgQUWZUCNK+AoO35V+HTsgEmcM4J9IkArh7PI=
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590/go.mod h1:n2TSygSNwsLJ76m8qFXTSc7beTb+auJxYdqrnoqwZWE=
github.com/hashicorp/memberlist v0.1.0 h1:qSsCiC0WYD39lbSitKNt40e30uorm2Ss/d4JGU1hzH8=
github.com/hashicorp/memberlist v0.1.0/go.mod h1:ncdBp14cuox2iFOq3kDiquKU6fqsTBc3W6JvZwjxxsE=
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb h1:ZbgmOQt8DOg796figP87/EFCVx2v2h9yRvwHF/zceX4=
@ -385,8 +381,6 @@ github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
github.com/mitchellh/gox v1.0.1 h1:x0jD3dcHk9a9xPSDN6YEL4xL6Qz0dvNYm8yZqui5chI=
github.com/mitchellh/gox v1.0.1/go.mod h1:ED6BioOGXMswlXa2zxfh/xdd5QhwYliBFn9V18Ap4z4=
github.com/mitchellh/hashstructure v1.0.0 h1:ZkRJX1CyOoTkar7p/mLS5TZU4nJ1Rn/F8u9dGS02Q3Y=
github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
github.com/mitchellh/iochan v1.0.0 h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=

View File

@ -11,8 +11,6 @@ import (
"strings"
"testing"
"github.com/hashicorp/hil"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/terraform/configs/hcl2shim"
"github.com/hashicorp/terraform/helper/hashcode"
"github.com/hashicorp/terraform/terraform"
@ -127,11 +125,6 @@ func TestValueType_Zero(t *testing.T) {
}
}
func interfaceToVariableSwallowError(input interface{}) ast.Variable {
variable, _ := hil.InterfaceToVariable(input)
return variable
}
func TestSchemaMap_Diff(t *testing.T) {
cases := []struct {
Name string

View File

@ -12,7 +12,6 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/hashicorp/terraform/configs/hcl2shim"
)
@ -185,15 +184,6 @@ type ResourceConfig struct {
ComputedKeys []string
Raw map[string]interface{}
Config map[string]interface{}
raw *configs.RawConfig
}
// NewResourceConfig creates a new ResourceConfig from a configs.RawConfig.
func NewResourceConfig(c *configs.RawConfig) *ResourceConfig {
result := &ResourceConfig{raw: c}
result.interpolateForce()
return result
}
// NewResourceConfigRaw constructs a ResourceConfig whose content is exactly
@ -512,31 +502,6 @@ func (c *ResourceConfig) get(
return current, true
}
// interpolateForce is a temporary thing. We want to get rid of interpolate
// above and likewise this, but it can only be done after the f-ast-graph
// refactor is complete.
func (c *ResourceConfig) interpolateForce() {
if c.raw == nil {
// If we don't have a lowercase "raw" but we _do_ have the uppercase
// Raw populated then this indicates that we're recieving a shim
// ResourceConfig created by NewResourceConfigShimmed, which is already
// fully evaluated and thus this function doesn't need to do anything.
if c.Raw != nil {
return
}
var err error
c.raw, err = configs.NewRawConfig(make(map[string]interface{}))
if err != nil {
panic(err)
}
}
c.ComputedKeys = c.raw.UnknownKeys()
c.Raw = c.raw.RawMap()
c.Config = c.raw.Config()
}
// unknownCheckWalker
type unknownCheckWalker struct {
Unknown bool

View File

@ -1,21 +0,0 @@
language: go
matrix:
include:
- go: 1.4.3
- go: 1.5.4
- go: 1.6.3
- go: 1.7
- go: tip
allow_failures:
- go: tip
install:
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
script:
- echo "Test and track coverage" ; $HOME/gopath/bin/goveralls -package "." -service=travis-ci
-repotoken $COVERALLS_TOKEN
- echo "Build examples" ; cd examples && go build
- echo "Check if gofmt'd" ; diff -u <(echo -n) <(gofmt -d -s .)
env:
global:
secure: HroGEAUQpVq9zX1b1VIkraLiywhGbzvNnTZq2TMxgK7JHP8xqNplAeF1izrR2i4QLL9nsY+9WtYss4QuPvEtZcVHUobw6XnL6radF7jS1LgfYZ9Y7oF+zogZ2I5QUMRLGA7rcxQ05s7mKq3XZQfeqaNts4bms/eZRefWuaFZbkw=

View File

@ -1,22 +0,0 @@
The MIT License
Copyright (c) 2014 Benedikt Lang <github at benediktlang.de>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,194 +0,0 @@
semver for golang [![Build Status](https://travis-ci.org/blang/semver.svg?branch=master)](https://travis-ci.org/blang/semver) [![GoDoc](https://godoc.org/github.com/blang/semver?status.png)](https://godoc.org/github.com/blang/semver) [![Coverage Status](https://img.shields.io/coveralls/blang/semver.svg)](https://coveralls.io/r/blang/semver?branch=master)
======
semver is a [Semantic Versioning](http://semver.org/) library written in golang. It fully covers spec version `2.0.0`.
Usage
-----
```bash
$ go get github.com/blang/semver
```
Note: Always vendor your dependencies or fix on a specific version tag.
```go
import github.com/blang/semver
v1, err := semver.Make("1.0.0-beta")
v2, err := semver.Make("2.0.0-beta")
v1.Compare(v2)
```
Also check the [GoDocs](http://godoc.org/github.com/blang/semver).
Why should I use this lib?
-----
- Fully spec compatible
- No reflection
- No regex
- Fully tested (Coverage >99%)
- Readable parsing/validation errors
- Fast (See [Benchmarks](#benchmarks))
- Only Stdlib
- Uses values instead of pointers
- Many features, see below
Features
-----
- Parsing and validation at all levels
- Comparator-like comparisons
- Compare Helper Methods
- InPlace manipulation
- Ranges `>=1.0.0 <2.0.0 || >=3.0.0 !3.0.1-beta.1`
- Wildcards `>=1.x`, `<=2.5.x`
- Sortable (implements sort.Interface)
- database/sql compatible (sql.Scanner/Valuer)
- encoding/json compatible (json.Marshaler/Unmarshaler)
Ranges
------
A `Range` is a set of conditions which specify which versions satisfy the range.
A condition is composed of an operator and a version. The supported operators are:
- `<1.0.0` Less than `1.0.0`
- `<=1.0.0` Less than or equal to `1.0.0`
- `>1.0.0` Greater than `1.0.0`
- `>=1.0.0` Greater than or equal to `1.0.0`
- `1.0.0`, `=1.0.0`, `==1.0.0` Equal to `1.0.0`
- `!1.0.0`, `!=1.0.0` Not equal to `1.0.0`. Excludes version `1.0.0`.
Note that spaces between the operator and the version will be gracefully tolerated.
A `Range` can link multiple `Ranges` separated by space:
Ranges can be linked by logical AND:
- `>1.0.0 <2.0.0` would match between both ranges, so `1.1.1` and `1.8.7` but not `1.0.0` or `2.0.0`
- `>1.0.0 <3.0.0 !2.0.3-beta.2` would match every version between `1.0.0` and `3.0.0` except `2.0.3-beta.2`
Ranges can also be linked by logical OR:
- `<2.0.0 || >=3.0.0` would match `1.x.x` and `3.x.x` but not `2.x.x`
AND has a higher precedence than OR. It's not possible to use brackets.
Ranges can be combined by both AND and OR
- `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
Range usage:
```
v, err := semver.Parse("1.2.3")
range, err := semver.ParseRange(">1.0.0 <2.0.0 || >=3.0.0")
if range(v) {
//valid
}
```
Example
-----
Have a look at full examples in [examples/main.go](examples/main.go)
```go
import github.com/blang/semver
v, err := semver.Make("0.0.1-alpha.preview+123.github")
fmt.Printf("Major: %d\n", v.Major)
fmt.Printf("Minor: %d\n", v.Minor)
fmt.Printf("Patch: %d\n", v.Patch)
fmt.Printf("Pre: %s\n", v.Pre)
fmt.Printf("Build: %s\n", v.Build)
// Prerelease versions array
if len(v.Pre) > 0 {
fmt.Println("Prerelease versions:")
for i, pre := range v.Pre {
fmt.Printf("%d: %q\n", i, pre)
}
}
// Build meta data array
if len(v.Build) > 0 {
fmt.Println("Build meta data:")
for i, build := range v.Build {
fmt.Printf("%d: %q\n", i, build)
}
}
v001, err := semver.Make("0.0.1")
// Compare using helpers: v.GT(v2), v.LT, v.GTE, v.LTE
v001.GT(v) == true
v.LT(v001) == true
v.GTE(v) == true
v.LTE(v) == true
// Or use v.Compare(v2) for comparisons (-1, 0, 1):
v001.Compare(v) == 1
v.Compare(v001) == -1
v.Compare(v) == 0
// Manipulate Version in place:
v.Pre[0], err = semver.NewPRVersion("beta")
if err != nil {
fmt.Printf("Error parsing pre release version: %q", err)
}
fmt.Println("\nValidate versions:")
v.Build[0] = "?"
err = v.Validate()
if err != nil {
fmt.Printf("Validation failed: %s\n", err)
}
```
Benchmarks
-----
BenchmarkParseSimple-4 5000000 390 ns/op 48 B/op 1 allocs/op
BenchmarkParseComplex-4 1000000 1813 ns/op 256 B/op 7 allocs/op
BenchmarkParseAverage-4 1000000 1171 ns/op 163 B/op 4 allocs/op
BenchmarkStringSimple-4 20000000 119 ns/op 16 B/op 1 allocs/op
BenchmarkStringLarger-4 10000000 206 ns/op 32 B/op 2 allocs/op
BenchmarkStringComplex-4 5000000 324 ns/op 80 B/op 3 allocs/op
BenchmarkStringAverage-4 5000000 273 ns/op 53 B/op 2 allocs/op
BenchmarkValidateSimple-4 200000000 9.33 ns/op 0 B/op 0 allocs/op
BenchmarkValidateComplex-4 3000000 469 ns/op 0 B/op 0 allocs/op
BenchmarkValidateAverage-4 5000000 256 ns/op 0 B/op 0 allocs/op
BenchmarkCompareSimple-4 100000000 11.8 ns/op 0 B/op 0 allocs/op
BenchmarkCompareComplex-4 50000000 30.8 ns/op 0 B/op 0 allocs/op
BenchmarkCompareAverage-4 30000000 41.5 ns/op 0 B/op 0 allocs/op
BenchmarkSort-4 3000000 419 ns/op 256 B/op 2 allocs/op
BenchmarkRangeParseSimple-4 2000000 850 ns/op 192 B/op 5 allocs/op
BenchmarkRangeParseAverage-4 1000000 1677 ns/op 400 B/op 10 allocs/op
BenchmarkRangeParseComplex-4 300000 5214 ns/op 1440 B/op 30 allocs/op
BenchmarkRangeMatchSimple-4 50000000 25.6 ns/op 0 B/op 0 allocs/op
BenchmarkRangeMatchAverage-4 30000000 56.4 ns/op 0 B/op 0 allocs/op
BenchmarkRangeMatchComplex-4 10000000 153 ns/op 0 B/op 0 allocs/op
See benchmark cases at [semver_test.go](semver_test.go)
Motivation
-----
I simply couldn't find any lib supporting the full spec. Others were just wrong or used reflection and regex which i don't like.
Contribution
-----
Feel free to make a pull request. For bigger changes create a issue first to discuss about it.
License
-----
See [LICENSE](LICENSE) file.

View File

@ -1,23 +0,0 @@
package semver
import (
"encoding/json"
)
// MarshalJSON implements the encoding/json.Marshaler interface.
func (v Version) MarshalJSON() ([]byte, error) {
return json.Marshal(v.String())
}
// UnmarshalJSON implements the encoding/json.Unmarshaler interface.
func (v *Version) UnmarshalJSON(data []byte) (err error) {
var versionString string
if err = json.Unmarshal(data, &versionString); err != nil {
return
}
*v, err = Parse(versionString)
return
}

View File

@ -1,17 +0,0 @@
{
"author": "blang",
"bugs": {
"URL": "https://github.com/blang/semver/issues",
"url": "https://github.com/blang/semver/issues"
},
"gx": {
"dvcsimport": "github.com/blang/semver"
},
"gxVersion": "0.10.0",
"language": "go",
"license": "MIT",
"name": "semver",
"releaseCmd": "git commit -a -m \"gx publish $VERSION\"",
"version": "3.5.1"
}

View File

@ -1,416 +0,0 @@
package semver
import (
"fmt"
"strconv"
"strings"
"unicode"
)
type wildcardType int
const (
noneWildcard wildcardType = iota
majorWildcard wildcardType = 1
minorWildcard wildcardType = 2
patchWildcard wildcardType = 3
)
func wildcardTypefromInt(i int) wildcardType {
switch i {
case 1:
return majorWildcard
case 2:
return minorWildcard
case 3:
return patchWildcard
default:
return noneWildcard
}
}
type comparator func(Version, Version) bool
var (
compEQ comparator = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) == 0
}
compNE = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) != 0
}
compGT = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) == 1
}
compGE = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) >= 0
}
compLT = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) == -1
}
compLE = func(v1 Version, v2 Version) bool {
return v1.Compare(v2) <= 0
}
)
type versionRange struct {
v Version
c comparator
}
// rangeFunc creates a Range from the given versionRange.
func (vr *versionRange) rangeFunc() Range {
return Range(func(v Version) bool {
return vr.c(v, vr.v)
})
}
// Range represents a range of versions.
// A Range can be used to check if a Version satisfies it:
//
// range, err := semver.ParseRange(">1.0.0 <2.0.0")
// range(semver.MustParse("1.1.1") // returns true
type Range func(Version) bool
// OR combines the existing Range with another Range using logical OR.
func (rf Range) OR(f Range) Range {
return Range(func(v Version) bool {
return rf(v) || f(v)
})
}
// AND combines the existing Range with another Range using logical AND.
func (rf Range) AND(f Range) Range {
return Range(func(v Version) bool {
return rf(v) && f(v)
})
}
// ParseRange parses a range and returns a Range.
// If the range could not be parsed an error is returned.
//
// Valid ranges are:
// - "<1.0.0"
// - "<=1.0.0"
// - ">1.0.0"
// - ">=1.0.0"
// - "1.0.0", "=1.0.0", "==1.0.0"
// - "!1.0.0", "!=1.0.0"
//
// A Range can consist of multiple ranges separated by space:
// Ranges can be linked by logical AND:
// - ">1.0.0 <2.0.0" would match between both ranges, so "1.1.1" and "1.8.7" but not "1.0.0" or "2.0.0"
// - ">1.0.0 <3.0.0 !2.0.3-beta.2" would match every version between 1.0.0 and 3.0.0 except 2.0.3-beta.2
//
// Ranges can also be linked by logical OR:
// - "<2.0.0 || >=3.0.0" would match "1.x.x" and "3.x.x" but not "2.x.x"
//
// AND has a higher precedence than OR. It's not possible to use brackets.
//
// Ranges can be combined by both AND and OR
//
// - `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
func ParseRange(s string) (Range, error) {
parts := splitAndTrim(s)
orParts, err := splitORParts(parts)
if err != nil {
return nil, err
}
expandedParts, err := expandWildcardVersion(orParts)
if err != nil {
return nil, err
}
var orFn Range
for _, p := range expandedParts {
var andFn Range
for _, ap := range p {
opStr, vStr, err := splitComparatorVersion(ap)
if err != nil {
return nil, err
}
vr, err := buildVersionRange(opStr, vStr)
if err != nil {
return nil, fmt.Errorf("Could not parse Range %q: %s", ap, err)
}
rf := vr.rangeFunc()
// Set function
if andFn == nil {
andFn = rf
} else { // Combine with existing function
andFn = andFn.AND(rf)
}
}
if orFn == nil {
orFn = andFn
} else {
orFn = orFn.OR(andFn)
}
}
return orFn, nil
}
// splitORParts splits the already cleaned parts by '||'.
// Checks for invalid positions of the operator and returns an
// error if found.
func splitORParts(parts []string) ([][]string, error) {
var ORparts [][]string
last := 0
for i, p := range parts {
if p == "||" {
if i == 0 {
return nil, fmt.Errorf("First element in range is '||'")
}
ORparts = append(ORparts, parts[last:i])
last = i + 1
}
}
if last == len(parts) {
return nil, fmt.Errorf("Last element in range is '||'")
}
ORparts = append(ORparts, parts[last:])
return ORparts, nil
}
// buildVersionRange takes a slice of 2: operator and version
// and builds a versionRange, otherwise an error.
func buildVersionRange(opStr, vStr string) (*versionRange, error) {
c := parseComparator(opStr)
if c == nil {
return nil, fmt.Errorf("Could not parse comparator %q in %q", opStr, strings.Join([]string{opStr, vStr}, ""))
}
v, err := Parse(vStr)
if err != nil {
return nil, fmt.Errorf("Could not parse version %q in %q: %s", vStr, strings.Join([]string{opStr, vStr}, ""), err)
}
return &versionRange{
v: v,
c: c,
}, nil
}
// inArray checks if a byte is contained in an array of bytes
func inArray(s byte, list []byte) bool {
for _, el := range list {
if el == s {
return true
}
}
return false
}
// splitAndTrim splits a range string by spaces and cleans whitespaces
func splitAndTrim(s string) (result []string) {
last := 0
var lastChar byte
excludeFromSplit := []byte{'>', '<', '='}
for i := 0; i < len(s); i++ {
if s[i] == ' ' && !inArray(lastChar, excludeFromSplit) {
if last < i-1 {
result = append(result, s[last:i])
}
last = i + 1
} else if s[i] != ' ' {
lastChar = s[i]
}
}
if last < len(s)-1 {
result = append(result, s[last:])
}
for i, v := range result {
result[i] = strings.Replace(v, " ", "", -1)
}
// parts := strings.Split(s, " ")
// for _, x := range parts {
// if s := strings.TrimSpace(x); len(s) != 0 {
// result = append(result, s)
// }
// }
return
}
// splitComparatorVersion splits the comparator from the version.
// Input must be free of leading or trailing spaces.
func splitComparatorVersion(s string) (string, string, error) {
i := strings.IndexFunc(s, unicode.IsDigit)
if i == -1 {
return "", "", fmt.Errorf("Could not get version from string: %q", s)
}
return strings.TrimSpace(s[0:i]), s[i:], nil
}
// getWildcardType will return the type of wildcard that the
// passed version contains
func getWildcardType(vStr string) wildcardType {
parts := strings.Split(vStr, ".")
nparts := len(parts)
wildcard := parts[nparts-1]
possibleWildcardType := wildcardTypefromInt(nparts)
if wildcard == "x" {
return possibleWildcardType
}
return noneWildcard
}
// createVersionFromWildcard will convert a wildcard version
// into a regular version, replacing 'x's with '0's, handling
// special cases like '1.x.x' and '1.x'
func createVersionFromWildcard(vStr string) string {
// handle 1.x.x
vStr2 := strings.Replace(vStr, ".x.x", ".x", 1)
vStr2 = strings.Replace(vStr2, ".x", ".0", 1)
parts := strings.Split(vStr2, ".")
// handle 1.x
if len(parts) == 2 {
return vStr2 + ".0"
}
return vStr2
}
// incrementMajorVersion will increment the major version
// of the passed version
func incrementMajorVersion(vStr string) (string, error) {
parts := strings.Split(vStr, ".")
i, err := strconv.Atoi(parts[0])
if err != nil {
return "", err
}
parts[0] = strconv.Itoa(i + 1)
return strings.Join(parts, "."), nil
}
// incrementMajorVersion will increment the minor version
// of the passed version
func incrementMinorVersion(vStr string) (string, error) {
parts := strings.Split(vStr, ".")
i, err := strconv.Atoi(parts[1])
if err != nil {
return "", err
}
parts[1] = strconv.Itoa(i + 1)
return strings.Join(parts, "."), nil
}
// expandWildcardVersion will expand wildcards inside versions
// following these rules:
//
// * when dealing with patch wildcards:
// >= 1.2.x will become >= 1.2.0
// <= 1.2.x will become < 1.3.0
// > 1.2.x will become >= 1.3.0
// < 1.2.x will become < 1.2.0
// != 1.2.x will become < 1.2.0 >= 1.3.0
//
// * when dealing with minor wildcards:
// >= 1.x will become >= 1.0.0
// <= 1.x will become < 2.0.0
// > 1.x will become >= 2.0.0
// < 1.0 will become < 1.0.0
// != 1.x will become < 1.0.0 >= 2.0.0
//
// * when dealing with wildcards without
// version operator:
// 1.2.x will become >= 1.2.0 < 1.3.0
// 1.x will become >= 1.0.0 < 2.0.0
func expandWildcardVersion(parts [][]string) ([][]string, error) {
var expandedParts [][]string
for _, p := range parts {
var newParts []string
for _, ap := range p {
if strings.Index(ap, "x") != -1 {
opStr, vStr, err := splitComparatorVersion(ap)
if err != nil {
return nil, err
}
versionWildcardType := getWildcardType(vStr)
flatVersion := createVersionFromWildcard(vStr)
var resultOperator string
var shouldIncrementVersion bool
switch opStr {
case ">":
resultOperator = ">="
shouldIncrementVersion = true
case ">=":
resultOperator = ">="
case "<":
resultOperator = "<"
case "<=":
resultOperator = "<"
shouldIncrementVersion = true
case "", "=", "==":
newParts = append(newParts, ">="+flatVersion)
resultOperator = "<"
shouldIncrementVersion = true
case "!=", "!":
newParts = append(newParts, "<"+flatVersion)
resultOperator = ">="
shouldIncrementVersion = true
}
var resultVersion string
if shouldIncrementVersion {
switch versionWildcardType {
case patchWildcard:
resultVersion, _ = incrementMinorVersion(flatVersion)
case minorWildcard:
resultVersion, _ = incrementMajorVersion(flatVersion)
}
} else {
resultVersion = flatVersion
}
ap = resultOperator + resultVersion
}
newParts = append(newParts, ap)
}
expandedParts = append(expandedParts, newParts)
}
return expandedParts, nil
}
func parseComparator(s string) comparator {
switch s {
case "==":
fallthrough
case "":
fallthrough
case "=":
return compEQ
case ">":
return compGT
case ">=":
return compGE
case "<":
return compLT
case "<=":
return compLE
case "!":
fallthrough
case "!=":
return compNE
}
return nil
}
// MustParseRange is like ParseRange but panics if the range cannot be parsed.
func MustParseRange(s string) Range {
r, err := ParseRange(s)
if err != nil {
panic(`semver: ParseRange(` + s + `): ` + err.Error())
}
return r
}

View File

@ -1,418 +0,0 @@
package semver
import (
"errors"
"fmt"
"strconv"
"strings"
)
const (
numbers string = "0123456789"
alphas = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-"
alphanum = alphas + numbers
)
// SpecVersion is the latest fully supported spec version of semver
var SpecVersion = Version{
Major: 2,
Minor: 0,
Patch: 0,
}
// Version represents a semver compatible version
type Version struct {
Major uint64
Minor uint64
Patch uint64
Pre []PRVersion
Build []string //No Precendence
}
// Version to string
func (v Version) String() string {
b := make([]byte, 0, 5)
b = strconv.AppendUint(b, v.Major, 10)
b = append(b, '.')
b = strconv.AppendUint(b, v.Minor, 10)
b = append(b, '.')
b = strconv.AppendUint(b, v.Patch, 10)
if len(v.Pre) > 0 {
b = append(b, '-')
b = append(b, v.Pre[0].String()...)
for _, pre := range v.Pre[1:] {
b = append(b, '.')
b = append(b, pre.String()...)
}
}
if len(v.Build) > 0 {
b = append(b, '+')
b = append(b, v.Build[0]...)
for _, build := range v.Build[1:] {
b = append(b, '.')
b = append(b, build...)
}
}
return string(b)
}
// Equals checks if v is equal to o.
func (v Version) Equals(o Version) bool {
return (v.Compare(o) == 0)
}
// EQ checks if v is equal to o.
func (v Version) EQ(o Version) bool {
return (v.Compare(o) == 0)
}
// NE checks if v is not equal to o.
func (v Version) NE(o Version) bool {
return (v.Compare(o) != 0)
}
// GT checks if v is greater than o.
func (v Version) GT(o Version) bool {
return (v.Compare(o) == 1)
}
// GTE checks if v is greater than or equal to o.
func (v Version) GTE(o Version) bool {
return (v.Compare(o) >= 0)
}
// GE checks if v is greater than or equal to o.
func (v Version) GE(o Version) bool {
return (v.Compare(o) >= 0)
}
// LT checks if v is less than o.
func (v Version) LT(o Version) bool {
return (v.Compare(o) == -1)
}
// LTE checks if v is less than or equal to o.
func (v Version) LTE(o Version) bool {
return (v.Compare(o) <= 0)
}
// LE checks if v is less than or equal to o.
func (v Version) LE(o Version) bool {
return (v.Compare(o) <= 0)
}
// Compare compares Versions v to o:
// -1 == v is less than o
// 0 == v is equal to o
// 1 == v is greater than o
func (v Version) Compare(o Version) int {
if v.Major != o.Major {
if v.Major > o.Major {
return 1
}
return -1
}
if v.Minor != o.Minor {
if v.Minor > o.Minor {
return 1
}
return -1
}
if v.Patch != o.Patch {
if v.Patch > o.Patch {
return 1
}
return -1
}
// Quick comparison if a version has no prerelease versions
if len(v.Pre) == 0 && len(o.Pre) == 0 {
return 0
} else if len(v.Pre) == 0 && len(o.Pre) > 0 {
return 1
} else if len(v.Pre) > 0 && len(o.Pre) == 0 {
return -1
}
i := 0
for ; i < len(v.Pre) && i < len(o.Pre); i++ {
if comp := v.Pre[i].Compare(o.Pre[i]); comp == 0 {
continue
} else if comp == 1 {
return 1
} else {
return -1
}
}
// If all pr versions are the equal but one has further prversion, this one greater
if i == len(v.Pre) && i == len(o.Pre) {
return 0
} else if i == len(v.Pre) && i < len(o.Pre) {
return -1
} else {
return 1
}
}
// Validate validates v and returns error in case
func (v Version) Validate() error {
// Major, Minor, Patch already validated using uint64
for _, pre := range v.Pre {
if !pre.IsNum { //Numeric prerelease versions already uint64
if len(pre.VersionStr) == 0 {
return fmt.Errorf("Prerelease can not be empty %q", pre.VersionStr)
}
if !containsOnly(pre.VersionStr, alphanum) {
return fmt.Errorf("Invalid character(s) found in prerelease %q", pre.VersionStr)
}
}
}
for _, build := range v.Build {
if len(build) == 0 {
return fmt.Errorf("Build meta data can not be empty %q", build)
}
if !containsOnly(build, alphanum) {
return fmt.Errorf("Invalid character(s) found in build meta data %q", build)
}
}
return nil
}
// New is an alias for Parse and returns a pointer, parses version string and returns a validated Version or error
func New(s string) (vp *Version, err error) {
v, err := Parse(s)
vp = &v
return
}
// Make is an alias for Parse, parses version string and returns a validated Version or error
func Make(s string) (Version, error) {
return Parse(s)
}
// ParseTolerant allows for certain version specifications that do not strictly adhere to semver
// specs to be parsed by this library. It does so by normalizing versions before passing them to
// Parse(). It currently trims spaces, removes a "v" prefix, and adds a 0 patch number to versions
// with only major and minor components specified
func ParseTolerant(s string) (Version, error) {
s = strings.TrimSpace(s)
s = strings.TrimPrefix(s, "v")
// Split into major.minor.(patch+pr+meta)
parts := strings.SplitN(s, ".", 3)
if len(parts) < 3 {
if strings.ContainsAny(parts[len(parts)-1], "+-") {
return Version{}, errors.New("Short version cannot contain PreRelease/Build meta data")
}
for len(parts) < 3 {
parts = append(parts, "0")
}
s = strings.Join(parts, ".")
}
return Parse(s)
}
// Parse parses version string and returns a validated Version or error
func Parse(s string) (Version, error) {
if len(s) == 0 {
return Version{}, errors.New("Version string empty")
}
// Split into major.minor.(patch+pr+meta)
parts := strings.SplitN(s, ".", 3)
if len(parts) != 3 {
return Version{}, errors.New("No Major.Minor.Patch elements found")
}
// Major
if !containsOnly(parts[0], numbers) {
return Version{}, fmt.Errorf("Invalid character(s) found in major number %q", parts[0])
}
if hasLeadingZeroes(parts[0]) {
return Version{}, fmt.Errorf("Major number must not contain leading zeroes %q", parts[0])
}
major, err := strconv.ParseUint(parts[0], 10, 64)
if err != nil {
return Version{}, err
}
// Minor
if !containsOnly(parts[1], numbers) {
return Version{}, fmt.Errorf("Invalid character(s) found in minor number %q", parts[1])
}
if hasLeadingZeroes(parts[1]) {
return Version{}, fmt.Errorf("Minor number must not contain leading zeroes %q", parts[1])
}
minor, err := strconv.ParseUint(parts[1], 10, 64)
if err != nil {
return Version{}, err
}
v := Version{}
v.Major = major
v.Minor = minor
var build, prerelease []string
patchStr := parts[2]
if buildIndex := strings.IndexRune(patchStr, '+'); buildIndex != -1 {
build = strings.Split(patchStr[buildIndex+1:], ".")
patchStr = patchStr[:buildIndex]
}
if preIndex := strings.IndexRune(patchStr, '-'); preIndex != -1 {
prerelease = strings.Split(patchStr[preIndex+1:], ".")
patchStr = patchStr[:preIndex]
}
if !containsOnly(patchStr, numbers) {
return Version{}, fmt.Errorf("Invalid character(s) found in patch number %q", patchStr)
}
if hasLeadingZeroes(patchStr) {
return Version{}, fmt.Errorf("Patch number must not contain leading zeroes %q", patchStr)
}
patch, err := strconv.ParseUint(patchStr, 10, 64)
if err != nil {
return Version{}, err
}
v.Patch = patch
// Prerelease
for _, prstr := range prerelease {
parsedPR, err := NewPRVersion(prstr)
if err != nil {
return Version{}, err
}
v.Pre = append(v.Pre, parsedPR)
}
// Build meta data
for _, str := range build {
if len(str) == 0 {
return Version{}, errors.New("Build meta data is empty")
}
if !containsOnly(str, alphanum) {
return Version{}, fmt.Errorf("Invalid character(s) found in build meta data %q", str)
}
v.Build = append(v.Build, str)
}
return v, nil
}
// MustParse is like Parse but panics if the version cannot be parsed.
func MustParse(s string) Version {
v, err := Parse(s)
if err != nil {
panic(`semver: Parse(` + s + `): ` + err.Error())
}
return v
}
// PRVersion represents a PreRelease Version
type PRVersion struct {
VersionStr string
VersionNum uint64
IsNum bool
}
// NewPRVersion creates a new valid prerelease version
func NewPRVersion(s string) (PRVersion, error) {
if len(s) == 0 {
return PRVersion{}, errors.New("Prerelease is empty")
}
v := PRVersion{}
if containsOnly(s, numbers) {
if hasLeadingZeroes(s) {
return PRVersion{}, fmt.Errorf("Numeric PreRelease version must not contain leading zeroes %q", s)
}
num, err := strconv.ParseUint(s, 10, 64)
// Might never be hit, but just in case
if err != nil {
return PRVersion{}, err
}
v.VersionNum = num
v.IsNum = true
} else if containsOnly(s, alphanum) {
v.VersionStr = s
v.IsNum = false
} else {
return PRVersion{}, fmt.Errorf("Invalid character(s) found in prerelease %q", s)
}
return v, nil
}
// IsNumeric checks if prerelease-version is numeric
func (v PRVersion) IsNumeric() bool {
return v.IsNum
}
// Compare compares two PreRelease Versions v and o:
// -1 == v is less than o
// 0 == v is equal to o
// 1 == v is greater than o
func (v PRVersion) Compare(o PRVersion) int {
if v.IsNum && !o.IsNum {
return -1
} else if !v.IsNum && o.IsNum {
return 1
} else if v.IsNum && o.IsNum {
if v.VersionNum == o.VersionNum {
return 0
} else if v.VersionNum > o.VersionNum {
return 1
} else {
return -1
}
} else { // both are Alphas
if v.VersionStr == o.VersionStr {
return 0
} else if v.VersionStr > o.VersionStr {
return 1
} else {
return -1
}
}
}
// PreRelease version to string
func (v PRVersion) String() string {
if v.IsNum {
return strconv.FormatUint(v.VersionNum, 10)
}
return v.VersionStr
}
func containsOnly(s string, set string) bool {
return strings.IndexFunc(s, func(r rune) bool {
return !strings.ContainsRune(set, r)
}) == -1
}
func hasLeadingZeroes(s string) bool {
return len(s) > 1 && s[0] == '0'
}
// NewBuildVersion creates a new valid build version
func NewBuildVersion(s string) (string, error) {
if len(s) == 0 {
return "", errors.New("Buildversion is empty")
}
if !containsOnly(s, alphanum) {
return "", fmt.Errorf("Invalid character(s) found in build meta data %q", s)
}
return s, nil
}

View File

@ -1,28 +0,0 @@
package semver
import (
"sort"
)
// Versions represents multiple versions.
type Versions []Version
// Len returns length of version collection
func (s Versions) Len() int {
return len(s)
}
// Swap swaps two versions inside the collection by its indices
func (s Versions) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Less checks if version at index i is less than version at index j
func (s Versions) Less(i, j int) bool {
return s[i].LT(s[j])
}
// Sort sorts a slice of versions
func Sort(versions []Version) {
sort.Sort(Versions(versions))
}

View File

@ -1,30 +0,0 @@
package semver
import (
"database/sql/driver"
"fmt"
)
// Scan implements the database/sql.Scanner interface.
func (v *Version) Scan(src interface{}) (err error) {
var str string
switch src := src.(type) {
case string:
str = src
case []byte:
str = string(src)
default:
return fmt.Errorf("Version.Scan: cannot convert %T to string.", src)
}
if t, err := Parse(str); err == nil {
*v = t
}
return
}
// Value implements the database/sql/driver.Valuer interface.
func (v Version) Value() (driver.Value, error) {
return v.String(), nil
}

View File

@ -1,3 +0,0 @@
.DS_Store
.idea
*.iml

View File

@ -1,3 +0,0 @@
sudo: false
language: go
go: 1.7

View File

@ -1,353 +0,0 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -1,102 +0,0 @@
# HIL
[![GoDoc](https://godoc.org/github.com/hashicorp/hil?status.png)](https://godoc.org/github.com/hashicorp/hil) [![Build Status](https://travis-ci.org/hashicorp/hil.svg?branch=master)](https://travis-ci.org/hashicorp/hil)
HIL (HashiCorp Interpolation Language) is a lightweight embedded language used
primarily for configuration interpolation. The goal of HIL is to make a simple
language for interpolations in the various configurations of HashiCorp tools.
HIL is built to interpolate any string, but is in use by HashiCorp primarily
with [HCL](https://github.com/hashicorp/hcl). HCL is _not required_ in any
way for use with HIL.
HIL isn't meant to be a general purpose language. It was built for basic
configuration interpolations. Therefore, you can't currently write functions,
have conditionals, set intermediary variables, etc. within HIL itself. It is
possible some of these may be added later but the right use case must exist.
## Why?
Many of our tools have support for something similar to templates, but
within the configuration itself. The most prominent requirement was in
[Terraform](https://github.com/hashicorp/terraform) where we wanted the
configuration to be able to reference values from elsewhere in the
configuration. Example:
foo = "hi ${var.world}"
We originally used a full templating language for this, but found it
was too heavy weight. Additionally, many full languages required bindings
to C (and thus the usage of cgo) which we try to avoid to make cross-compilation
easier. We then moved to very basic regular expression based
string replacement, but found the need for basic arithmetic and function
calls resulting in overly complex regular expressions.
Ultimately, we wrote our own mini-language within Terraform itself. As
we built other projects such as [Nomad](https://nomadproject.io) and
[Otto](https://ottoproject.io), the need for basic interpolations arose
again.
Thus HIL was born. It is extracted from Terraform, cleaned up, and
better tested for general purpose use.
## Syntax
For a complete grammar, please see the parser itself. A high-level overview
of the syntax and grammer is listed here.
Code begins within `${` and `}`. Outside of this, text is treated
literally. For example, `foo` is a valid HIL program that is just the
string "foo", but `foo ${bar}` is an HIL program that is the string "foo "
concatened with the value of `bar`. For the remainder of the syntax
docs, we'll assume you're within `${}`.
* Identifiers are any text in the format of `[a-zA-Z0-9-.]`. Example
identifiers: `foo`, `var.foo`, `foo-bar`.
* Strings are double quoted and can contain any UTF-8 characters.
Example: `"Hello, World"`
* Numbers are assumed to be base 10. If you prefix a number with 0x,
it is treated as a hexadecimal. If it is prefixed with 0, it is
treated as an octal. Numbers can be in scientific notation: "1e10".
* Unary `-` can be used for negative numbers. Example: `-10` or `-0.2`
* Boolean values: `true`, `false`
* The following arithmetic operations are allowed: +, -, *, /, %.
* Function calls are in the form of `name(arg1, arg2, ...)`. Example:
`add(1, 5)`. Arguments can be any valid HIL expression, example:
`add(1, var.foo)` or even nested function calls:
`add(1, get("some value"))`.
* Within strings, further interpolations can be opened with `${}`.
Example: `"Hello ${nested}"`. A full example including the
original `${}` (remember this list assumes were inside of one
already) could be: `foo ${func("hello ${var.foo}")}`.
## Language Changes
We've used this mini-language in Terraform for years. For backwards compatibility
reasons, we're unlikely to make an incompatible change to the language but
we're not currently making that promise, either.
The internal API of this project may very well change as we evolve it
to work with more of our projects. We recommend using some sort of dependency
management solution with this package.
## Future Changes
The following changes are already planned to be made at some point:
* Richer types: lists, maps, etc.
* Convert to a more standard Go parser structure similar to HCL. This
will improve our error messaging as well as allow us to have automatic
formatting.
* Allow interpolations to result in more types than just a string. While
within the interpolation basic types are honored, the result is always
a string.

View File

@ -1,18 +0,0 @@
version: "build-{branch}-{build}"
image: Visual Studio 2015
clone_folder: c:\gopath\src\github.com\hashicorp\hil
environment:
GOPATH: c:\gopath
init:
- git config --global core.autocrlf true
install:
- cmd: >-
echo %Path%
go version
go env
go get -d -v -t ./...
build_script:
- cmd: go test -v ./...

View File

@ -1,43 +0,0 @@
package ast
import (
"bytes"
"fmt"
)
// Arithmetic represents a node where the result is arithmetic of
// two or more operands in the order given.
type Arithmetic struct {
Op ArithmeticOp
Exprs []Node
Posx Pos
}
func (n *Arithmetic) Accept(v Visitor) Node {
for i, expr := range n.Exprs {
n.Exprs[i] = expr.Accept(v)
}
return v(n)
}
func (n *Arithmetic) Pos() Pos {
return n.Posx
}
func (n *Arithmetic) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *Arithmetic) String() string {
var b bytes.Buffer
for _, expr := range n.Exprs {
b.WriteString(fmt.Sprintf("%s", expr))
}
return b.String()
}
func (n *Arithmetic) Type(Scope) (Type, error) {
return TypeInt, nil
}

View File

@ -1,24 +0,0 @@
package ast
// ArithmeticOp is the operation to use for the math.
type ArithmeticOp int
const (
ArithmeticOpInvalid ArithmeticOp = 0
ArithmeticOpAdd ArithmeticOp = iota
ArithmeticOpSub
ArithmeticOpMul
ArithmeticOpDiv
ArithmeticOpMod
ArithmeticOpLogicalAnd
ArithmeticOpLogicalOr
ArithmeticOpEqual
ArithmeticOpNotEqual
ArithmeticOpLessThan
ArithmeticOpLessThanOrEqual
ArithmeticOpGreaterThan
ArithmeticOpGreaterThanOrEqual
)

View File

@ -1,99 +0,0 @@
package ast
import (
"fmt"
)
// Node is the interface that all AST nodes must implement.
type Node interface {
// Accept is called to dispatch to the visitors. It must return the
// resulting Node (which might be different in an AST transform).
Accept(Visitor) Node
// Pos returns the position of this node in some source.
Pos() Pos
// Type returns the type of this node for the given context.
Type(Scope) (Type, error)
}
// Pos is the starting position of an AST node
type Pos struct {
Column, Line int // Column/Line number, starting at 1
Filename string // Optional source filename, if known
}
func (p Pos) String() string {
if p.Filename == "" {
return fmt.Sprintf("%d:%d", p.Line, p.Column)
} else {
return fmt.Sprintf("%s:%d:%d", p.Filename, p.Line, p.Column)
}
}
// InitPos is an initiaial position value. This should be used as
// the starting position (presets the column and line to 1).
var InitPos = Pos{Column: 1, Line: 1}
// Visitors are just implementations of this function.
//
// The function must return the Node to replace this node with. "nil" is
// _not_ a valid return value. If there is no replacement, the original node
// should be returned. We build this replacement directly into the visitor
// pattern since AST transformations are a common and useful tool and
// building it into the AST itself makes it required for future Node
// implementations and very easy to do.
//
// Note that this isn't a true implementation of the visitor pattern, which
// generally requires proper type dispatch on the function. However,
// implementing this basic visitor pattern style is still very useful even
// if you have to type switch.
type Visitor func(Node) Node
//go:generate stringer -type=Type
// Type is the type of any value.
type Type uint32
const (
TypeInvalid Type = 0
TypeAny Type = 1 << iota
TypeBool
TypeString
TypeInt
TypeFloat
TypeList
TypeMap
// This is a special type used by Terraform to mark "unknown" values.
// It is impossible for this type to be introduced into your HIL programs
// unless you explicitly set a variable to this value. In that case,
// any operation including the variable will return "TypeUnknown" as the
// type.
TypeUnknown
)
func (t Type) Printable() string {
switch t {
case TypeInvalid:
return "invalid type"
case TypeAny:
return "any type"
case TypeBool:
return "type bool"
case TypeString:
return "type string"
case TypeInt:
return "type int"
case TypeFloat:
return "type float"
case TypeList:
return "type list"
case TypeMap:
return "type map"
case TypeUnknown:
return "type unknown"
default:
return "unknown type"
}
}

View File

@ -1,47 +0,0 @@
package ast
import (
"fmt"
"strings"
)
// Call represents a function call.
type Call struct {
Func string
Args []Node
Posx Pos
}
func (n *Call) Accept(v Visitor) Node {
for i, a := range n.Args {
n.Args[i] = a.Accept(v)
}
return v(n)
}
func (n *Call) Pos() Pos {
return n.Posx
}
func (n *Call) String() string {
args := make([]string, len(n.Args))
for i, arg := range n.Args {
args[i] = fmt.Sprintf("%s", arg)
}
return fmt.Sprintf("Call(%s, %s)", n.Func, strings.Join(args, ", "))
}
func (n *Call) Type(s Scope) (Type, error) {
f, ok := s.LookupFunc(n.Func)
if !ok {
return TypeInvalid, fmt.Errorf("unknown function: %s", n.Func)
}
return f.ReturnType, nil
}
func (n *Call) GoString() string {
return fmt.Sprintf("*%#v", *n)
}

View File

@ -1,36 +0,0 @@
package ast
import (
"fmt"
)
type Conditional struct {
CondExpr Node
TrueExpr Node
FalseExpr Node
Posx Pos
}
// Accept passes the given visitor to the child nodes in this order:
// CondExpr, TrueExpr, FalseExpr. It then finally passes itself to the visitor.
func (n *Conditional) Accept(v Visitor) Node {
n.CondExpr = n.CondExpr.Accept(v)
n.TrueExpr = n.TrueExpr.Accept(v)
n.FalseExpr = n.FalseExpr.Accept(v)
return v(n)
}
func (n *Conditional) Pos() Pos {
return n.Posx
}
func (n *Conditional) Type(Scope) (Type, error) {
// This is not actually a useful value; the type checker ignores
// this function when analyzing conditionals, just as with Arithmetic.
return TypeInt, nil
}
func (n *Conditional) GoString() string {
return fmt.Sprintf("*%#v", *n)
}

View File

@ -1,76 +0,0 @@
package ast
import (
"fmt"
"strings"
)
// Index represents an indexing operation into another data structure
type Index struct {
Target Node
Key Node
Posx Pos
}
func (n *Index) Accept(v Visitor) Node {
n.Target = n.Target.Accept(v)
n.Key = n.Key.Accept(v)
return v(n)
}
func (n *Index) Pos() Pos {
return n.Posx
}
func (n *Index) String() string {
return fmt.Sprintf("Index(%s, %s)", n.Target, n.Key)
}
func (n *Index) Type(s Scope) (Type, error) {
variableAccess, ok := n.Target.(*VariableAccess)
if !ok {
return TypeInvalid, fmt.Errorf("target is not a variable")
}
variable, ok := s.LookupVar(variableAccess.Name)
if !ok {
return TypeInvalid, fmt.Errorf("unknown variable accessed: %s", variableAccess.Name)
}
switch variable.Type {
case TypeList:
return n.typeList(variable, variableAccess.Name)
case TypeMap:
return n.typeMap(variable, variableAccess.Name)
default:
return TypeInvalid, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
}
}
func (n *Index) typeList(variable Variable, variableName string) (Type, error) {
// We assume type checking has already determined that this is a list
list := variable.Value.([]Variable)
return VariableListElementTypesAreHomogenous(variableName, list)
}
func (n *Index) typeMap(variable Variable, variableName string) (Type, error) {
// We assume type checking has already determined that this is a map
vmap := variable.Value.(map[string]Variable)
return VariableMapValueTypesAreHomogenous(variableName, vmap)
}
func reportTypes(typesFound map[Type]struct{}) string {
stringTypes := make([]string, len(typesFound))
i := 0
for k, _ := range typesFound {
stringTypes[0] = k.String()
i++
}
return strings.Join(stringTypes, ", ")
}
func (n *Index) GoString() string {
return fmt.Sprintf("*%#v", *n)
}

View File

@ -1,88 +0,0 @@
package ast
import (
"fmt"
"reflect"
)
// LiteralNode represents a single literal value, such as "foo" or
// 42 or 3.14159. Based on the Type, the Value can be safely cast.
type LiteralNode struct {
Value interface{}
Typex Type
Posx Pos
}
// NewLiteralNode returns a new literal node representing the given
// literal Go value, which must correspond to one of the primitive types
// supported by HIL. Lists and maps cannot currently be constructed via
// this function.
//
// If an inappropriately-typed value is provided, this function will
// return an error. The main intended use of this function is to produce
// "synthetic" literals from constants in code, where the value type is
// well known at compile time. To easily store these in global variables,
// see also MustNewLiteralNode.
func NewLiteralNode(value interface{}, pos Pos) (*LiteralNode, error) {
goType := reflect.TypeOf(value)
var hilType Type
switch goType.Kind() {
case reflect.Bool:
hilType = TypeBool
case reflect.Int:
hilType = TypeInt
case reflect.Float64:
hilType = TypeFloat
case reflect.String:
hilType = TypeString
default:
return nil, fmt.Errorf("unsupported literal node type: %T", value)
}
return &LiteralNode{
Value: value,
Typex: hilType,
Posx: pos,
}, nil
}
// MustNewLiteralNode wraps NewLiteralNode and panics if an error is
// returned, thus allowing valid literal nodes to be easily assigned to
// global variables.
func MustNewLiteralNode(value interface{}, pos Pos) *LiteralNode {
node, err := NewLiteralNode(value, pos)
if err != nil {
panic(err)
}
return node
}
func (n *LiteralNode) Accept(v Visitor) Node {
return v(n)
}
func (n *LiteralNode) Pos() Pos {
return n.Posx
}
func (n *LiteralNode) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *LiteralNode) String() string {
return fmt.Sprintf("Literal(%s, %v)", n.Typex, n.Value)
}
func (n *LiteralNode) Type(Scope) (Type, error) {
return n.Typex, nil
}
// IsUnknown returns true either if the node's value is itself unknown
// of if it is a collection containing any unknown elements, deeply.
func (n *LiteralNode) IsUnknown() bool {
return IsUnknown(Variable{
Type: n.Typex,
Value: n.Value,
})
}

View File

@ -1,78 +0,0 @@
package ast
import (
"bytes"
"fmt"
)
// Output represents the root node of all interpolation evaluations. If the
// output only has one expression which is either a TypeList or TypeMap, the
// Output can be type-asserted to []interface{} or map[string]interface{}
// respectively. Otherwise the Output evaluates as a string, and concatenates
// the evaluation of each expression.
type Output struct {
Exprs []Node
Posx Pos
}
func (n *Output) Accept(v Visitor) Node {
for i, expr := range n.Exprs {
n.Exprs[i] = expr.Accept(v)
}
return v(n)
}
func (n *Output) Pos() Pos {
return n.Posx
}
func (n *Output) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *Output) String() string {
var b bytes.Buffer
for _, expr := range n.Exprs {
b.WriteString(fmt.Sprintf("%s", expr))
}
return b.String()
}
func (n *Output) Type(s Scope) (Type, error) {
// Special case no expressions for backward compatibility
if len(n.Exprs) == 0 {
return TypeString, nil
}
// Special case a single expression of types list or map
if len(n.Exprs) == 1 {
exprType, err := n.Exprs[0].Type(s)
if err != nil {
return TypeInvalid, err
}
switch exprType {
case TypeList:
return TypeList, nil
case TypeMap:
return TypeMap, nil
}
}
// Otherwise ensure all our expressions are strings
for index, expr := range n.Exprs {
exprType, err := expr.Type(s)
if err != nil {
return TypeInvalid, err
}
// We only look for things we know we can't coerce with an implicit conversion func
if exprType == TypeList || exprType == TypeMap {
return TypeInvalid, fmt.Errorf(
"multi-expression HIL outputs may only have string inputs: %d is type %s",
index, exprType)
}
}
return TypeString, nil
}

View File

@ -1,90 +0,0 @@
package ast
import (
"fmt"
"reflect"
)
// Scope is the interface used to look up variables and functions while
// evaluating. How these functions/variables are defined are up to the caller.
type Scope interface {
LookupFunc(string) (Function, bool)
LookupVar(string) (Variable, bool)
}
// Variable is a variable value for execution given as input to the engine.
// It records the value of a variables along with their type.
type Variable struct {
Value interface{}
Type Type
}
// NewVariable creates a new Variable for the given value. This will
// attempt to infer the correct type. If it can't, an error will be returned.
func NewVariable(v interface{}) (result Variable, err error) {
switch v := reflect.ValueOf(v); v.Kind() {
case reflect.String:
result.Type = TypeString
default:
err = fmt.Errorf("Unknown type: %s", v.Kind())
}
result.Value = v
return
}
// String implements Stringer on Variable, displaying the type and value
// of the Variable.
func (v Variable) String() string {
return fmt.Sprintf("{Variable (%s): %+v}", v.Type, v.Value)
}
// Function defines a function that can be executed by the engine.
// The type checker will validate that the proper types will be called
// to the callback.
type Function struct {
// ArgTypes is the list of types in argument order. These are the
// required arguments.
//
// ReturnType is the type of the returned value. The Callback MUST
// return this type.
ArgTypes []Type
ReturnType Type
// Variadic, if true, says that this function is variadic, meaning
// it takes a variable number of arguments. In this case, the
// VariadicType must be set.
Variadic bool
VariadicType Type
// Callback is the function called for a function. The argument
// types are guaranteed to match the spec above by the type checker.
// The length of the args is strictly == len(ArgTypes) unless Varidiac
// is true, in which case its >= len(ArgTypes).
Callback func([]interface{}) (interface{}, error)
}
// BasicScope is a simple scope that looks up variables and functions
// using a map.
type BasicScope struct {
FuncMap map[string]Function
VarMap map[string]Variable
}
func (s *BasicScope) LookupFunc(n string) (Function, bool) {
if s == nil {
return Function{}, false
}
v, ok := s.FuncMap[n]
return v, ok
}
func (s *BasicScope) LookupVar(n string) (Variable, bool) {
if s == nil {
return Variable{}, false
}
v, ok := s.VarMap[n]
return v, ok
}

View File

@ -1,25 +0,0 @@
package ast
// Stack is a stack of Node.
type Stack struct {
stack []Node
}
func (s *Stack) Len() int {
return len(s.stack)
}
func (s *Stack) Push(n Node) {
s.stack = append(s.stack, n)
}
func (s *Stack) Pop() Node {
x := s.stack[len(s.stack)-1]
s.stack[len(s.stack)-1] = nil
s.stack = s.stack[:len(s.stack)-1]
return x
}
func (s *Stack) Reset() {
s.stack = nil
}

View File

@ -1,54 +0,0 @@
// Code generated by "stringer -type=Type"; DO NOT EDIT
package ast
import "fmt"
const (
_Type_name_0 = "TypeInvalid"
_Type_name_1 = "TypeAny"
_Type_name_2 = "TypeBool"
_Type_name_3 = "TypeString"
_Type_name_4 = "TypeInt"
_Type_name_5 = "TypeFloat"
_Type_name_6 = "TypeList"
_Type_name_7 = "TypeMap"
_Type_name_8 = "TypeUnknown"
)
var (
_Type_index_0 = [...]uint8{0, 11}
_Type_index_1 = [...]uint8{0, 7}
_Type_index_2 = [...]uint8{0, 8}
_Type_index_3 = [...]uint8{0, 10}
_Type_index_4 = [...]uint8{0, 7}
_Type_index_5 = [...]uint8{0, 9}
_Type_index_6 = [...]uint8{0, 8}
_Type_index_7 = [...]uint8{0, 7}
_Type_index_8 = [...]uint8{0, 11}
)
func (i Type) String() string {
switch {
case i == 0:
return _Type_name_0
case i == 2:
return _Type_name_1
case i == 4:
return _Type_name_2
case i == 8:
return _Type_name_3
case i == 16:
return _Type_name_4
case i == 32:
return _Type_name_5
case i == 64:
return _Type_name_6
case i == 128:
return _Type_name_7
case i == 256:
return _Type_name_8
default:
return fmt.Sprintf("Type(%d)", i)
}
}

View File

@ -1,30 +0,0 @@
package ast
// IsUnknown reports whether a variable is unknown or contains any value
// that is unknown. This will recurse into lists and maps and so on.
func IsUnknown(v Variable) bool {
// If it is unknown itself, return true
if v.Type == TypeUnknown {
return true
}
// If it is a container type, check the values
switch v.Type {
case TypeList:
for _, el := range v.Value.([]Variable) {
if IsUnknown(el) {
return true
}
}
case TypeMap:
for _, el := range v.Value.(map[string]Variable) {
if IsUnknown(el) {
return true
}
}
default:
}
// Not a container type or survive the above checks
return false
}

View File

@ -1,36 +0,0 @@
package ast
import (
"fmt"
)
// VariableAccess represents a variable access.
type VariableAccess struct {
Name string
Posx Pos
}
func (n *VariableAccess) Accept(v Visitor) Node {
return v(n)
}
func (n *VariableAccess) Pos() Pos {
return n.Posx
}
func (n *VariableAccess) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *VariableAccess) String() string {
return fmt.Sprintf("Variable(%s)", n.Name)
}
func (n *VariableAccess) Type(s Scope) (Type, error) {
v, ok := s.LookupVar(n.Name)
if !ok {
return TypeInvalid, fmt.Errorf("unknown variable: %s", n.Name)
}
return v.Type, nil
}

View File

@ -1,63 +0,0 @@
package ast
import "fmt"
func VariableListElementTypesAreHomogenous(variableName string, list []Variable) (Type, error) {
if len(list) == 0 {
return TypeInvalid, fmt.Errorf("list %q does not have any elements so cannot determine type.", variableName)
}
elemType := TypeUnknown
for _, v := range list {
if v.Type == TypeUnknown {
continue
}
if elemType == TypeUnknown {
elemType = v.Type
continue
}
if v.Type != elemType {
return TypeInvalid, fmt.Errorf(
"list %q does not have homogenous types. found %s and then %s",
variableName,
elemType, v.Type,
)
}
elemType = v.Type
}
return elemType, nil
}
func VariableMapValueTypesAreHomogenous(variableName string, vmap map[string]Variable) (Type, error) {
if len(vmap) == 0 {
return TypeInvalid, fmt.Errorf("map %q does not have any elements so cannot determine type.", variableName)
}
elemType := TypeUnknown
for _, v := range vmap {
if v.Type == TypeUnknown {
continue
}
if elemType == TypeUnknown {
elemType = v.Type
continue
}
if v.Type != elemType {
return TypeInvalid, fmt.Errorf(
"map %q does not have homogenous types. found %s and then %s",
variableName,
elemType, v.Type,
)
}
elemType = v.Type
}
return elemType, nil
}

View File

@ -1,331 +0,0 @@
package hil
import (
"errors"
"strconv"
"github.com/hashicorp/hil/ast"
)
// NOTE: All builtins are tested in engine_test.go
func registerBuiltins(scope *ast.BasicScope) *ast.BasicScope {
if scope == nil {
scope = new(ast.BasicScope)
}
if scope.FuncMap == nil {
scope.FuncMap = make(map[string]ast.Function)
}
// Implicit conversions
scope.FuncMap["__builtin_BoolToString"] = builtinBoolToString()
scope.FuncMap["__builtin_FloatToInt"] = builtinFloatToInt()
scope.FuncMap["__builtin_FloatToString"] = builtinFloatToString()
scope.FuncMap["__builtin_IntToFloat"] = builtinIntToFloat()
scope.FuncMap["__builtin_IntToString"] = builtinIntToString()
scope.FuncMap["__builtin_StringToInt"] = builtinStringToInt()
scope.FuncMap["__builtin_StringToFloat"] = builtinStringToFloat()
scope.FuncMap["__builtin_StringToBool"] = builtinStringToBool()
// Math operations
scope.FuncMap["__builtin_IntMath"] = builtinIntMath()
scope.FuncMap["__builtin_FloatMath"] = builtinFloatMath()
scope.FuncMap["__builtin_BoolCompare"] = builtinBoolCompare()
scope.FuncMap["__builtin_FloatCompare"] = builtinFloatCompare()
scope.FuncMap["__builtin_IntCompare"] = builtinIntCompare()
scope.FuncMap["__builtin_StringCompare"] = builtinStringCompare()
scope.FuncMap["__builtin_Logical"] = builtinLogical()
return scope
}
func builtinFloatMath() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
Variadic: true,
VariadicType: ast.TypeFloat,
ReturnType: ast.TypeFloat,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
result := args[1].(float64)
for _, raw := range args[2:] {
arg := raw.(float64)
switch op {
case ast.ArithmeticOpAdd:
result += arg
case ast.ArithmeticOpSub:
result -= arg
case ast.ArithmeticOpMul:
result *= arg
case ast.ArithmeticOpDiv:
result /= arg
}
}
return result, nil
},
}
}
func builtinIntMath() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
Variadic: true,
VariadicType: ast.TypeInt,
ReturnType: ast.TypeInt,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
result := args[1].(int)
for _, raw := range args[2:] {
arg := raw.(int)
switch op {
case ast.ArithmeticOpAdd:
result += arg
case ast.ArithmeticOpSub:
result -= arg
case ast.ArithmeticOpMul:
result *= arg
case ast.ArithmeticOpDiv:
if arg == 0 {
return nil, errors.New("divide by zero")
}
result /= arg
case ast.ArithmeticOpMod:
if arg == 0 {
return nil, errors.New("divide by zero")
}
result = result % arg
}
}
return result, nil
},
}
}
func builtinBoolCompare() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt, ast.TypeBool, ast.TypeBool},
Variadic: false,
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
lhs := args[1].(bool)
rhs := args[2].(bool)
switch op {
case ast.ArithmeticOpEqual:
return lhs == rhs, nil
case ast.ArithmeticOpNotEqual:
return lhs != rhs, nil
default:
return nil, errors.New("invalid comparison operation")
}
},
}
}
func builtinFloatCompare() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt, ast.TypeFloat, ast.TypeFloat},
Variadic: false,
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
lhs := args[1].(float64)
rhs := args[2].(float64)
switch op {
case ast.ArithmeticOpEqual:
return lhs == rhs, nil
case ast.ArithmeticOpNotEqual:
return lhs != rhs, nil
case ast.ArithmeticOpLessThan:
return lhs < rhs, nil
case ast.ArithmeticOpLessThanOrEqual:
return lhs <= rhs, nil
case ast.ArithmeticOpGreaterThan:
return lhs > rhs, nil
case ast.ArithmeticOpGreaterThanOrEqual:
return lhs >= rhs, nil
default:
return nil, errors.New("invalid comparison operation")
}
},
}
}
func builtinIntCompare() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt, ast.TypeInt, ast.TypeInt},
Variadic: false,
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
lhs := args[1].(int)
rhs := args[2].(int)
switch op {
case ast.ArithmeticOpEqual:
return lhs == rhs, nil
case ast.ArithmeticOpNotEqual:
return lhs != rhs, nil
case ast.ArithmeticOpLessThan:
return lhs < rhs, nil
case ast.ArithmeticOpLessThanOrEqual:
return lhs <= rhs, nil
case ast.ArithmeticOpGreaterThan:
return lhs > rhs, nil
case ast.ArithmeticOpGreaterThanOrEqual:
return lhs >= rhs, nil
default:
return nil, errors.New("invalid comparison operation")
}
},
}
}
func builtinStringCompare() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt, ast.TypeString, ast.TypeString},
Variadic: false,
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
lhs := args[1].(string)
rhs := args[2].(string)
switch op {
case ast.ArithmeticOpEqual:
return lhs == rhs, nil
case ast.ArithmeticOpNotEqual:
return lhs != rhs, nil
default:
return nil, errors.New("invalid comparison operation")
}
},
}
}
func builtinLogical() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
Variadic: true,
VariadicType: ast.TypeBool,
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
op := args[0].(ast.ArithmeticOp)
result := args[1].(bool)
for _, raw := range args[2:] {
arg := raw.(bool)
switch op {
case ast.ArithmeticOpLogicalOr:
result = result || arg
case ast.ArithmeticOpLogicalAnd:
result = result && arg
default:
return nil, errors.New("invalid logical operator")
}
}
return result, nil
},
}
}
func builtinFloatToInt() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeFloat},
ReturnType: ast.TypeInt,
Callback: func(args []interface{}) (interface{}, error) {
return int(args[0].(float64)), nil
},
}
}
func builtinFloatToString() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeFloat},
ReturnType: ast.TypeString,
Callback: func(args []interface{}) (interface{}, error) {
return strconv.FormatFloat(
args[0].(float64), 'g', -1, 64), nil
},
}
}
func builtinIntToFloat() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
ReturnType: ast.TypeFloat,
Callback: func(args []interface{}) (interface{}, error) {
return float64(args[0].(int)), nil
},
}
}
func builtinIntToString() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
ReturnType: ast.TypeString,
Callback: func(args []interface{}) (interface{}, error) {
return strconv.FormatInt(int64(args[0].(int)), 10), nil
},
}
}
func builtinStringToInt() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
ReturnType: ast.TypeString,
Callback: func(args []interface{}) (interface{}, error) {
v, err := strconv.ParseInt(args[0].(string), 0, 0)
if err != nil {
return nil, err
}
return int(v), nil
},
}
}
func builtinStringToFloat() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeString},
ReturnType: ast.TypeFloat,
Callback: func(args []interface{}) (interface{}, error) {
v, err := strconv.ParseFloat(args[0].(string), 64)
if err != nil {
return nil, err
}
return v, nil
},
}
}
func builtinBoolToString() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeBool},
ReturnType: ast.TypeString,
Callback: func(args []interface{}) (interface{}, error) {
return strconv.FormatBool(args[0].(bool)), nil
},
}
}
func builtinStringToBool() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeString},
ReturnType: ast.TypeBool,
Callback: func(args []interface{}) (interface{}, error) {
v, err := strconv.ParseBool(args[0].(string))
if err != nil {
return nil, err
}
return v, nil
},
}
}

View File

@ -1,88 +0,0 @@
package hil
import (
"fmt"
"sync"
"github.com/hashicorp/hil/ast"
)
// IdentifierCheck is a SemanticCheck that checks that all identifiers
// resolve properly and that the right number of arguments are passed
// to functions.
type IdentifierCheck struct {
Scope ast.Scope
err error
lock sync.Mutex
}
func (c *IdentifierCheck) Visit(root ast.Node) error {
c.lock.Lock()
defer c.lock.Unlock()
defer c.reset()
root.Accept(c.visit)
return c.err
}
func (c *IdentifierCheck) visit(raw ast.Node) ast.Node {
if c.err != nil {
return raw
}
switch n := raw.(type) {
case *ast.Call:
c.visitCall(n)
case *ast.VariableAccess:
c.visitVariableAccess(n)
case *ast.Output:
// Ignore
case *ast.LiteralNode:
// Ignore
default:
// Ignore
}
// We never do replacement with this visitor
return raw
}
func (c *IdentifierCheck) visitCall(n *ast.Call) {
// Look up the function in the map
function, ok := c.Scope.LookupFunc(n.Func)
if !ok {
c.createErr(n, fmt.Sprintf("unknown function called: %s", n.Func))
return
}
// Break up the args into what is variadic and what is required
args := n.Args
if function.Variadic && len(args) > len(function.ArgTypes) {
args = n.Args[:len(function.ArgTypes)]
}
// Verify the number of arguments
if len(args) != len(function.ArgTypes) {
c.createErr(n, fmt.Sprintf(
"%s: expected %d arguments, got %d",
n.Func, len(function.ArgTypes), len(n.Args)))
return
}
}
func (c *IdentifierCheck) visitVariableAccess(n *ast.VariableAccess) {
// Look up the variable in the map
if _, ok := c.Scope.LookupVar(n.Name); !ok {
c.createErr(n, fmt.Sprintf(
"unknown variable accessed: %s", n.Name))
return
}
}
func (c *IdentifierCheck) createErr(n ast.Node, str string) {
c.err = fmt.Errorf("%s: %s", n.Pos(), str)
}
func (c *IdentifierCheck) reset() {
c.err = nil
}

View File

@ -1,668 +0,0 @@
package hil
import (
"fmt"
"sync"
"github.com/hashicorp/hil/ast"
)
// TypeCheck implements ast.Visitor for type checking an AST tree.
// It requires some configuration to look up the type of nodes.
//
// It also optionally will not type error and will insert an implicit
// type conversions for specific types if specified by the Implicit
// field. Note that this is kind of organizationally weird to put into
// this structure but we'd rather do that than duplicate the type checking
// logic multiple times.
type TypeCheck struct {
Scope ast.Scope
// Implicit is a map of implicit type conversions that we can do,
// and that shouldn't error. The key of the first map is the from type,
// the key of the second map is the to type, and the final string
// value is the function to call (which must be registered in the Scope).
Implicit map[ast.Type]map[ast.Type]string
// Stack of types. This shouldn't be used directly except by implementations
// of TypeCheckNode.
Stack []ast.Type
err error
lock sync.Mutex
}
// TypeCheckNode is the interface that must be implemented by any
// ast.Node that wants to support type-checking. If the type checker
// encounters a node that doesn't implement this, it will error.
type TypeCheckNode interface {
TypeCheck(*TypeCheck) (ast.Node, error)
}
func (v *TypeCheck) Visit(root ast.Node) error {
v.lock.Lock()
defer v.lock.Unlock()
defer v.reset()
root.Accept(v.visit)
// If the resulting type is unknown, then just let the whole thing go.
if v.err == errExitUnknown {
v.err = nil
}
return v.err
}
func (v *TypeCheck) visit(raw ast.Node) ast.Node {
if v.err != nil {
return raw
}
var result ast.Node
var err error
switch n := raw.(type) {
case *ast.Arithmetic:
tc := &typeCheckArithmetic{n}
result, err = tc.TypeCheck(v)
case *ast.Call:
tc := &typeCheckCall{n}
result, err = tc.TypeCheck(v)
case *ast.Conditional:
tc := &typeCheckConditional{n}
result, err = tc.TypeCheck(v)
case *ast.Index:
tc := &typeCheckIndex{n}
result, err = tc.TypeCheck(v)
case *ast.Output:
tc := &typeCheckOutput{n}
result, err = tc.TypeCheck(v)
case *ast.LiteralNode:
tc := &typeCheckLiteral{n}
result, err = tc.TypeCheck(v)
case *ast.VariableAccess:
tc := &typeCheckVariableAccess{n}
result, err = tc.TypeCheck(v)
default:
tc, ok := raw.(TypeCheckNode)
if !ok {
err = fmt.Errorf("unknown node for type check: %#v", raw)
break
}
result, err = tc.TypeCheck(v)
}
if err != nil {
pos := raw.Pos()
v.err = fmt.Errorf("At column %d, line %d: %s",
pos.Column, pos.Line, err)
}
return result
}
type typeCheckArithmetic struct {
n *ast.Arithmetic
}
func (tc *typeCheckArithmetic) TypeCheck(v *TypeCheck) (ast.Node, error) {
// The arguments are on the stack in reverse order, so pop them off.
exprs := make([]ast.Type, len(tc.n.Exprs))
for i, _ := range tc.n.Exprs {
exprs[len(tc.n.Exprs)-1-i] = v.StackPop()
}
// If any operand is unknown then our result is automatically unknown
for _, ty := range exprs {
if ty == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
}
switch tc.n.Op {
case ast.ArithmeticOpLogicalAnd, ast.ArithmeticOpLogicalOr:
return tc.checkLogical(v, exprs)
case ast.ArithmeticOpEqual, ast.ArithmeticOpNotEqual,
ast.ArithmeticOpLessThan, ast.ArithmeticOpGreaterThan,
ast.ArithmeticOpGreaterThanOrEqual, ast.ArithmeticOpLessThanOrEqual:
return tc.checkComparison(v, exprs)
default:
return tc.checkNumeric(v, exprs)
}
}
func (tc *typeCheckArithmetic) checkNumeric(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
// Determine the resulting type we want. We do this by going over
// every expression until we find one with a type we recognize.
// We do this because the first expr might be a string ("var.foo")
// and we need to know what to implicit to.
mathFunc := "__builtin_IntMath"
mathType := ast.TypeInt
for _, v := range exprs {
// We assume int math but if we find ANY float, the entire
// expression turns into floating point math.
if v == ast.TypeFloat {
mathFunc = "__builtin_FloatMath"
mathType = v
break
}
}
// Verify the args
for i, arg := range exprs {
if arg != mathType {
cn := v.ImplicitConversion(exprs[i], mathType, tc.n.Exprs[i])
if cn != nil {
tc.n.Exprs[i] = cn
continue
}
return nil, fmt.Errorf(
"operand %d should be %s, got %s",
i+1, mathType, arg)
}
}
// Modulo doesn't work for floats
if mathType == ast.TypeFloat && tc.n.Op == ast.ArithmeticOpMod {
return nil, fmt.Errorf("modulo cannot be used with floats")
}
// Return type
v.StackPush(mathType)
// Replace our node with a call to the proper function. This isn't
// type checked but we already verified types.
args := make([]ast.Node, len(tc.n.Exprs)+1)
args[0] = &ast.LiteralNode{
Value: tc.n.Op,
Typex: ast.TypeInt,
Posx: tc.n.Pos(),
}
copy(args[1:], tc.n.Exprs)
return &ast.Call{
Func: mathFunc,
Args: args,
Posx: tc.n.Pos(),
}, nil
}
func (tc *typeCheckArithmetic) checkComparison(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
if len(exprs) != 2 {
// This should never happen, because the parser never produces
// nodes that violate this.
return nil, fmt.Errorf(
"comparison operators must have exactly two operands",
)
}
// The first operand always dictates the type for a comparison.
compareFunc := ""
compareType := exprs[0]
switch compareType {
case ast.TypeBool:
compareFunc = "__builtin_BoolCompare"
case ast.TypeFloat:
compareFunc = "__builtin_FloatCompare"
case ast.TypeInt:
compareFunc = "__builtin_IntCompare"
case ast.TypeString:
compareFunc = "__builtin_StringCompare"
default:
return nil, fmt.Errorf(
"comparison operators apply only to bool, float, int, and string",
)
}
// For non-equality comparisons, we will do implicit conversions to
// integer types if possible. In this case, we need to go through and
// determine the type of comparison we're doing to enable the implicit
// conversion.
if tc.n.Op != ast.ArithmeticOpEqual && tc.n.Op != ast.ArithmeticOpNotEqual {
compareFunc = "__builtin_IntCompare"
compareType = ast.TypeInt
for _, expr := range exprs {
if expr == ast.TypeFloat {
compareFunc = "__builtin_FloatCompare"
compareType = ast.TypeFloat
break
}
}
}
// Verify (and possibly, convert) the args
for i, arg := range exprs {
if arg != compareType {
cn := v.ImplicitConversion(exprs[i], compareType, tc.n.Exprs[i])
if cn != nil {
tc.n.Exprs[i] = cn
continue
}
return nil, fmt.Errorf(
"operand %d should be %s, got %s",
i+1, compareType, arg,
)
}
}
// Only ints and floats can have the <, >, <= and >= operators applied
switch tc.n.Op {
case ast.ArithmeticOpEqual, ast.ArithmeticOpNotEqual:
// anything goes
default:
switch compareType {
case ast.TypeFloat, ast.TypeInt:
// fine
default:
return nil, fmt.Errorf(
"<, >, <= and >= may apply only to int and float values",
)
}
}
// Comparison operators always return bool
v.StackPush(ast.TypeBool)
// Replace our node with a call to the proper function. This isn't
// type checked but we already verified types.
args := make([]ast.Node, len(tc.n.Exprs)+1)
args[0] = &ast.LiteralNode{
Value: tc.n.Op,
Typex: ast.TypeInt,
Posx: tc.n.Pos(),
}
copy(args[1:], tc.n.Exprs)
return &ast.Call{
Func: compareFunc,
Args: args,
Posx: tc.n.Pos(),
}, nil
}
func (tc *typeCheckArithmetic) checkLogical(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
for i, t := range exprs {
if t != ast.TypeBool {
cn := v.ImplicitConversion(t, ast.TypeBool, tc.n.Exprs[i])
if cn == nil {
return nil, fmt.Errorf(
"logical operators require boolean operands, not %s",
t,
)
}
tc.n.Exprs[i] = cn
}
}
// Return type is always boolean
v.StackPush(ast.TypeBool)
// Arithmetic nodes are replaced with a call to a built-in function
args := make([]ast.Node, len(tc.n.Exprs)+1)
args[0] = &ast.LiteralNode{
Value: tc.n.Op,
Typex: ast.TypeInt,
Posx: tc.n.Pos(),
}
copy(args[1:], tc.n.Exprs)
return &ast.Call{
Func: "__builtin_Logical",
Args: args,
Posx: tc.n.Pos(),
}, nil
}
type typeCheckCall struct {
n *ast.Call
}
func (tc *typeCheckCall) TypeCheck(v *TypeCheck) (ast.Node, error) {
// Look up the function in the map
function, ok := v.Scope.LookupFunc(tc.n.Func)
if !ok {
return nil, fmt.Errorf("unknown function called: %s", tc.n.Func)
}
// The arguments are on the stack in reverse order, so pop them off.
args := make([]ast.Type, len(tc.n.Args))
for i, _ := range tc.n.Args {
args[len(tc.n.Args)-1-i] = v.StackPop()
}
// Verify the args
for i, expected := range function.ArgTypes {
if expected == ast.TypeAny {
continue
}
if args[i] == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
if args[i] != expected {
cn := v.ImplicitConversion(args[i], expected, tc.n.Args[i])
if cn != nil {
tc.n.Args[i] = cn
continue
}
return nil, fmt.Errorf(
"%s: argument %d should be %s, got %s",
tc.n.Func, i+1, expected.Printable(), args[i].Printable())
}
}
// If we're variadic, then verify the types there
if function.Variadic && function.VariadicType != ast.TypeAny {
args = args[len(function.ArgTypes):]
for i, t := range args {
if t == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
if t != function.VariadicType {
realI := i + len(function.ArgTypes)
cn := v.ImplicitConversion(
t, function.VariadicType, tc.n.Args[realI])
if cn != nil {
tc.n.Args[realI] = cn
continue
}
return nil, fmt.Errorf(
"%s: argument %d should be %s, got %s",
tc.n.Func, realI,
function.VariadicType.Printable(), t.Printable())
}
}
}
// Return type
v.StackPush(function.ReturnType)
return tc.n, nil
}
type typeCheckConditional struct {
n *ast.Conditional
}
func (tc *typeCheckConditional) TypeCheck(v *TypeCheck) (ast.Node, error) {
// On the stack we have the types of the condition, true and false
// expressions, but they are in reverse order.
falseType := v.StackPop()
trueType := v.StackPop()
condType := v.StackPop()
if condType == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
if condType != ast.TypeBool {
cn := v.ImplicitConversion(condType, ast.TypeBool, tc.n.CondExpr)
if cn == nil {
return nil, fmt.Errorf(
"condition must be type bool, not %s", condType.Printable(),
)
}
tc.n.CondExpr = cn
}
// The types of the true and false expression must match
if trueType != falseType && trueType != ast.TypeUnknown && falseType != ast.TypeUnknown {
// Since passing around stringified versions of other types is
// common, we pragmatically allow the false expression to dictate
// the result type when the true expression is a string.
if trueType == ast.TypeString {
cn := v.ImplicitConversion(trueType, falseType, tc.n.TrueExpr)
if cn == nil {
return nil, fmt.Errorf(
"true and false expression types must match; have %s and %s",
trueType.Printable(), falseType.Printable(),
)
}
tc.n.TrueExpr = cn
trueType = falseType
} else {
cn := v.ImplicitConversion(falseType, trueType, tc.n.FalseExpr)
if cn == nil {
return nil, fmt.Errorf(
"true and false expression types must match; have %s and %s",
trueType.Printable(), falseType.Printable(),
)
}
tc.n.FalseExpr = cn
falseType = trueType
}
}
// Currently list and map types cannot be used, because we cannot
// generally assert that their element types are consistent.
// Such support might be added later, either by improving the type
// system or restricting usage to only variable and literal expressions,
// but for now this is simply prohibited because it doesn't seem to
// be a common enough case to be worth the complexity.
switch trueType {
case ast.TypeList:
return nil, fmt.Errorf(
"conditional operator cannot be used with list values",
)
case ast.TypeMap:
return nil, fmt.Errorf(
"conditional operator cannot be used with map values",
)
}
// Result type (guaranteed to also match falseType due to the above)
if trueType == ast.TypeUnknown {
// falseType may also be unknown, but that's okay because two
// unknowns means our result is unknown anyway.
v.StackPush(falseType)
} else {
v.StackPush(trueType)
}
return tc.n, nil
}
type typeCheckOutput struct {
n *ast.Output
}
func (tc *typeCheckOutput) TypeCheck(v *TypeCheck) (ast.Node, error) {
n := tc.n
types := make([]ast.Type, len(n.Exprs))
for i, _ := range n.Exprs {
types[len(n.Exprs)-1-i] = v.StackPop()
}
for _, ty := range types {
if ty == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
}
// If there is only one argument and it is a list, we evaluate to a list
if len(types) == 1 {
switch t := types[0]; t {
case ast.TypeList:
fallthrough
case ast.TypeMap:
v.StackPush(t)
return n, nil
}
}
// Otherwise, all concat args must be strings, so validate that
resultType := ast.TypeString
for i, t := range types {
if t == ast.TypeUnknown {
resultType = ast.TypeUnknown
continue
}
if t != ast.TypeString {
cn := v.ImplicitConversion(t, ast.TypeString, n.Exprs[i])
if cn != nil {
n.Exprs[i] = cn
continue
}
return nil, fmt.Errorf(
"output of an HIL expression must be a string, or a single list (argument %d is %s)", i+1, t)
}
}
// This always results in type string, unless there are unknowns
v.StackPush(resultType)
return n, nil
}
type typeCheckLiteral struct {
n *ast.LiteralNode
}
func (tc *typeCheckLiteral) TypeCheck(v *TypeCheck) (ast.Node, error) {
v.StackPush(tc.n.Typex)
return tc.n, nil
}
type typeCheckVariableAccess struct {
n *ast.VariableAccess
}
func (tc *typeCheckVariableAccess) TypeCheck(v *TypeCheck) (ast.Node, error) {
// Look up the variable in the map
variable, ok := v.Scope.LookupVar(tc.n.Name)
if !ok {
return nil, fmt.Errorf(
"unknown variable accessed: %s", tc.n.Name)
}
// Add the type to the stack
v.StackPush(variable.Type)
return tc.n, nil
}
type typeCheckIndex struct {
n *ast.Index
}
func (tc *typeCheckIndex) TypeCheck(v *TypeCheck) (ast.Node, error) {
keyType := v.StackPop()
targetType := v.StackPop()
if keyType == ast.TypeUnknown || targetType == ast.TypeUnknown {
v.StackPush(ast.TypeUnknown)
return tc.n, nil
}
// Ensure we have a VariableAccess as the target
varAccessNode, ok := tc.n.Target.(*ast.VariableAccess)
if !ok {
return nil, fmt.Errorf(
"target of an index must be a VariableAccess node, was %T", tc.n.Target)
}
// Get the variable
variable, ok := v.Scope.LookupVar(varAccessNode.Name)
if !ok {
return nil, fmt.Errorf(
"unknown variable accessed: %s", varAccessNode.Name)
}
switch targetType {
case ast.TypeList:
if keyType != ast.TypeInt {
tc.n.Key = v.ImplicitConversion(keyType, ast.TypeInt, tc.n.Key)
if tc.n.Key == nil {
return nil, fmt.Errorf(
"key of an index must be an int, was %s", keyType)
}
}
valType, err := ast.VariableListElementTypesAreHomogenous(
varAccessNode.Name, variable.Value.([]ast.Variable))
if err != nil {
return tc.n, err
}
v.StackPush(valType)
return tc.n, nil
case ast.TypeMap:
if keyType != ast.TypeString {
tc.n.Key = v.ImplicitConversion(keyType, ast.TypeString, tc.n.Key)
if tc.n.Key == nil {
return nil, fmt.Errorf(
"key of an index must be a string, was %s", keyType)
}
}
valType, err := ast.VariableMapValueTypesAreHomogenous(
varAccessNode.Name, variable.Value.(map[string]ast.Variable))
if err != nil {
return tc.n, err
}
v.StackPush(valType)
return tc.n, nil
default:
return nil, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
}
}
func (v *TypeCheck) ImplicitConversion(
actual ast.Type, expected ast.Type, n ast.Node) ast.Node {
if v.Implicit == nil {
return nil
}
fromMap, ok := v.Implicit[actual]
if !ok {
return nil
}
toFunc, ok := fromMap[expected]
if !ok {
return nil
}
return &ast.Call{
Func: toFunc,
Args: []ast.Node{n},
Posx: n.Pos(),
}
}
func (v *TypeCheck) reset() {
v.Stack = nil
v.err = nil
}
func (v *TypeCheck) StackPush(t ast.Type) {
v.Stack = append(v.Stack, t)
}
func (v *TypeCheck) StackPop() ast.Type {
var x ast.Type
x, v.Stack = v.Stack[len(v.Stack)-1], v.Stack[:len(v.Stack)-1]
return x
}
func (v *TypeCheck) StackPeek() ast.Type {
if len(v.Stack) == 0 {
return ast.TypeInvalid
}
return v.Stack[len(v.Stack)-1]
}

View File

@ -1,174 +0,0 @@
package hil
import (
"fmt"
"reflect"
"github.com/hashicorp/hil/ast"
"github.com/mitchellh/mapstructure"
)
// UnknownValue is a sentinel value that can be used to denote
// that a value of a variable (or map element, list element, etc.)
// is unknown. This will always have the type ast.TypeUnknown.
const UnknownValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
var hilMapstructureDecodeHookSlice []interface{}
var hilMapstructureDecodeHookStringSlice []string
var hilMapstructureDecodeHookMap map[string]interface{}
// hilMapstructureWeakDecode behaves in the same way as mapstructure.WeakDecode
// but has a DecodeHook which defeats the backward compatibility mode of mapstructure
// which WeakDecodes []interface{}{} into an empty map[string]interface{}. This
// allows us to use WeakDecode (desirable), but not fail on empty lists.
func hilMapstructureWeakDecode(m interface{}, rawVal interface{}) error {
config := &mapstructure.DecoderConfig{
DecodeHook: func(source reflect.Type, target reflect.Type, val interface{}) (interface{}, error) {
sliceType := reflect.TypeOf(hilMapstructureDecodeHookSlice)
stringSliceType := reflect.TypeOf(hilMapstructureDecodeHookStringSlice)
mapType := reflect.TypeOf(hilMapstructureDecodeHookMap)
if (source == sliceType || source == stringSliceType) && target == mapType {
return nil, fmt.Errorf("Cannot convert %s into a %s", source, target)
}
return val, nil
},
WeaklyTypedInput: true,
Result: rawVal,
}
decoder, err := mapstructure.NewDecoder(config)
if err != nil {
return err
}
return decoder.Decode(m)
}
func InterfaceToVariable(input interface{}) (ast.Variable, error) {
if iv, ok := input.(ast.Variable); ok {
return iv, nil
}
// This is just to maintain backward compatibility
// after https://github.com/mitchellh/mapstructure/pull/98
if v, ok := input.([]ast.Variable); ok {
return ast.Variable{
Type: ast.TypeList,
Value: v,
}, nil
}
if v, ok := input.(map[string]ast.Variable); ok {
return ast.Variable{
Type: ast.TypeMap,
Value: v,
}, nil
}
var stringVal string
if err := hilMapstructureWeakDecode(input, &stringVal); err == nil {
// Special case the unknown value to turn into "unknown"
if stringVal == UnknownValue {
return ast.Variable{Value: UnknownValue, Type: ast.TypeUnknown}, nil
}
// Otherwise return the string value
return ast.Variable{
Type: ast.TypeString,
Value: stringVal,
}, nil
}
var mapVal map[string]interface{}
if err := hilMapstructureWeakDecode(input, &mapVal); err == nil {
elements := make(map[string]ast.Variable)
for i, element := range mapVal {
varElement, err := InterfaceToVariable(element)
if err != nil {
return ast.Variable{}, err
}
elements[i] = varElement
}
return ast.Variable{
Type: ast.TypeMap,
Value: elements,
}, nil
}
var sliceVal []interface{}
if err := hilMapstructureWeakDecode(input, &sliceVal); err == nil {
elements := make([]ast.Variable, len(sliceVal))
for i, element := range sliceVal {
varElement, err := InterfaceToVariable(element)
if err != nil {
return ast.Variable{}, err
}
elements[i] = varElement
}
return ast.Variable{
Type: ast.TypeList,
Value: elements,
}, nil
}
return ast.Variable{}, fmt.Errorf("value for conversion must be a string, interface{} or map[string]interface: got %T", input)
}
func VariableToInterface(input ast.Variable) (interface{}, error) {
if input.Type == ast.TypeString {
if inputStr, ok := input.Value.(string); ok {
return inputStr, nil
} else {
return nil, fmt.Errorf("ast.Variable with type string has value which is not a string")
}
}
if input.Type == ast.TypeList {
inputList, ok := input.Value.([]ast.Variable)
if !ok {
return nil, fmt.Errorf("ast.Variable with type list has value which is not a []ast.Variable")
}
result := make([]interface{}, 0)
if len(inputList) == 0 {
return result, nil
}
for _, element := range inputList {
if convertedElement, err := VariableToInterface(element); err == nil {
result = append(result, convertedElement)
} else {
return nil, err
}
}
return result, nil
}
if input.Type == ast.TypeMap {
inputMap, ok := input.Value.(map[string]ast.Variable)
if !ok {
return nil, fmt.Errorf("ast.Variable with type map has value which is not a map[string]ast.Variable")
}
result := make(map[string]interface{}, 0)
if len(inputMap) == 0 {
return result, nil
}
for key, value := range inputMap {
if convertedValue, err := VariableToInterface(value); err == nil {
result[key] = convertedValue
} else {
return nil, err
}
}
return result, nil
}
return nil, fmt.Errorf("unknown input type: %s", input.Type)
}

View File

@ -1,472 +0,0 @@
package hil
import (
"bytes"
"errors"
"fmt"
"sync"
"github.com/hashicorp/hil/ast"
)
// EvalConfig is the configuration for evaluating.
type EvalConfig struct {
// GlobalScope is the global scope of execution for evaluation.
GlobalScope *ast.BasicScope
// SemanticChecks is a list of additional semantic checks that will be run
// on the tree prior to evaluating it. The type checker, identifier checker,
// etc. will be run before these automatically.
SemanticChecks []SemanticChecker
}
// SemanticChecker is the type that must be implemented to do a
// semantic check on an AST tree. This will be called with the root node.
type SemanticChecker func(ast.Node) error
// EvaluationResult is a struct returned from the hil.Eval function,
// representing the result of an interpolation. Results are returned in their
// "natural" Go structure rather than in terms of the HIL AST. For the types
// currently implemented, this means that the Value field can be interpreted as
// the following Go types:
// TypeInvalid: undefined
// TypeString: string
// TypeList: []interface{}
// TypeMap: map[string]interface{}
// TypBool: bool
type EvaluationResult struct {
Type EvalType
Value interface{}
}
// InvalidResult is a structure representing the result of a HIL interpolation
// which has invalid syntax, missing variables, or some other type of error.
// The error is described out of band in the accompanying error return value.
var InvalidResult = EvaluationResult{Type: TypeInvalid, Value: nil}
// errExitUnknown is an internal error that when returned means the result
// is an unknown value. We use this for early exit.
var errExitUnknown = errors.New("unknown value")
func Eval(root ast.Node, config *EvalConfig) (EvaluationResult, error) {
output, outputType, err := internalEval(root, config)
if err != nil {
return InvalidResult, err
}
// If the result contains any nested unknowns then the result as a whole
// is unknown, so that callers only have to deal with "entirely known"
// or "entirely unknown" as outcomes.
if ast.IsUnknown(ast.Variable{Type: outputType, Value: output}) {
outputType = ast.TypeUnknown
output = UnknownValue
}
switch outputType {
case ast.TypeList:
val, err := VariableToInterface(ast.Variable{
Type: ast.TypeList,
Value: output,
})
return EvaluationResult{
Type: TypeList,
Value: val,
}, err
case ast.TypeMap:
val, err := VariableToInterface(ast.Variable{
Type: ast.TypeMap,
Value: output,
})
return EvaluationResult{
Type: TypeMap,
Value: val,
}, err
case ast.TypeString:
return EvaluationResult{
Type: TypeString,
Value: output,
}, nil
case ast.TypeBool:
return EvaluationResult{
Type: TypeBool,
Value: output,
}, nil
case ast.TypeUnknown:
return EvaluationResult{
Type: TypeUnknown,
Value: UnknownValue,
}, nil
default:
return InvalidResult, fmt.Errorf("unknown type %s as interpolation output", outputType)
}
}
// Eval evaluates the given AST tree and returns its output value, the type
// of the output, and any error that occurred.
func internalEval(root ast.Node, config *EvalConfig) (interface{}, ast.Type, error) {
// Copy the scope so we can add our builtins
if config == nil {
config = new(EvalConfig)
}
scope := registerBuiltins(config.GlobalScope)
implicitMap := map[ast.Type]map[ast.Type]string{
ast.TypeFloat: {
ast.TypeInt: "__builtin_FloatToInt",
ast.TypeString: "__builtin_FloatToString",
},
ast.TypeInt: {
ast.TypeFloat: "__builtin_IntToFloat",
ast.TypeString: "__builtin_IntToString",
},
ast.TypeString: {
ast.TypeInt: "__builtin_StringToInt",
ast.TypeFloat: "__builtin_StringToFloat",
ast.TypeBool: "__builtin_StringToBool",
},
ast.TypeBool: {
ast.TypeString: "__builtin_BoolToString",
},
}
// Build our own semantic checks that we always run
tv := &TypeCheck{Scope: scope, Implicit: implicitMap}
ic := &IdentifierCheck{Scope: scope}
// Build up the semantic checks for execution
checks := make(
[]SemanticChecker,
len(config.SemanticChecks),
len(config.SemanticChecks)+2)
copy(checks, config.SemanticChecks)
checks = append(checks, ic.Visit)
checks = append(checks, tv.Visit)
// Run the semantic checks
for _, check := range checks {
if err := check(root); err != nil {
return nil, ast.TypeInvalid, err
}
}
// Execute
v := &evalVisitor{Scope: scope}
return v.Visit(root)
}
// EvalNode is the interface that must be implemented by any ast.Node
// to support evaluation. This will be called in visitor pattern order.
// The result of each call to Eval is automatically pushed onto the
// stack as a LiteralNode. Pop elements off the stack to get child
// values.
type EvalNode interface {
Eval(ast.Scope, *ast.Stack) (interface{}, ast.Type, error)
}
type evalVisitor struct {
Scope ast.Scope
Stack ast.Stack
err error
lock sync.Mutex
}
func (v *evalVisitor) Visit(root ast.Node) (interface{}, ast.Type, error) {
// Run the actual visitor pattern
root.Accept(v.visit)
// Get our result and clear out everything else
var result *ast.LiteralNode
if v.Stack.Len() > 0 {
result = v.Stack.Pop().(*ast.LiteralNode)
} else {
result = new(ast.LiteralNode)
}
resultErr := v.err
if resultErr == errExitUnknown {
// This means the return value is unknown and we used the error
// as an early exit mechanism. Reset since the value on the stack
// should be the unknown value.
resultErr = nil
}
// Clear everything else so we aren't just dangling
v.Stack.Reset()
v.err = nil
t, err := result.Type(v.Scope)
if err != nil {
return nil, ast.TypeInvalid, err
}
return result.Value, t, resultErr
}
func (v *evalVisitor) visit(raw ast.Node) ast.Node {
if v.err != nil {
return raw
}
en, err := evalNode(raw)
if err != nil {
v.err = err
return raw
}
out, outType, err := en.Eval(v.Scope, &v.Stack)
if err != nil {
v.err = err
return raw
}
v.Stack.Push(&ast.LiteralNode{
Value: out,
Typex: outType,
})
if outType == ast.TypeUnknown {
// Halt immediately
v.err = errExitUnknown
return raw
}
return raw
}
// evalNode is a private function that returns an EvalNode for built-in
// types as well as any other EvalNode implementations.
func evalNode(raw ast.Node) (EvalNode, error) {
switch n := raw.(type) {
case *ast.Index:
return &evalIndex{n}, nil
case *ast.Call:
return &evalCall{n}, nil
case *ast.Conditional:
return &evalConditional{n}, nil
case *ast.Output:
return &evalOutput{n}, nil
case *ast.LiteralNode:
return &evalLiteralNode{n}, nil
case *ast.VariableAccess:
return &evalVariableAccess{n}, nil
default:
en, ok := n.(EvalNode)
if !ok {
return nil, fmt.Errorf("node doesn't support evaluation: %#v", raw)
}
return en, nil
}
}
type evalCall struct{ *ast.Call }
func (v *evalCall) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
// Look up the function in the map
function, ok := s.LookupFunc(v.Func)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"unknown function called: %s", v.Func)
}
// The arguments are on the stack in reverse order, so pop them off.
args := make([]interface{}, len(v.Args))
for i, _ := range v.Args {
node := stack.Pop().(*ast.LiteralNode)
if node.IsUnknown() {
// If any arguments are unknown then the result is automatically unknown
return UnknownValue, ast.TypeUnknown, nil
}
args[len(v.Args)-1-i] = node.Value
}
// Call the function
result, err := function.Callback(args)
if err != nil {
return nil, ast.TypeInvalid, fmt.Errorf("%s: %s", v.Func, err)
}
return result, function.ReturnType, nil
}
type evalConditional struct{ *ast.Conditional }
func (v *evalConditional) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
// On the stack we have literal nodes representing the resulting values
// of the condition, true and false expressions, but they are in reverse
// order.
falseLit := stack.Pop().(*ast.LiteralNode)
trueLit := stack.Pop().(*ast.LiteralNode)
condLit := stack.Pop().(*ast.LiteralNode)
if condLit.IsUnknown() {
// If our conditional is unknown then our result is also unknown
return UnknownValue, ast.TypeUnknown, nil
}
if condLit.Value.(bool) {
return trueLit.Value, trueLit.Typex, nil
} else {
return falseLit.Value, trueLit.Typex, nil
}
}
type evalIndex struct{ *ast.Index }
func (v *evalIndex) Eval(scope ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
key := stack.Pop().(*ast.LiteralNode)
target := stack.Pop().(*ast.LiteralNode)
variableName := v.Index.Target.(*ast.VariableAccess).Name
if key.IsUnknown() {
// If our key is unknown then our result is also unknown
return UnknownValue, ast.TypeUnknown, nil
}
// For target, we'll accept collections containing unknown values but
// we still need to catch when the collection itself is unknown, shallowly.
if target.Typex == ast.TypeUnknown {
return UnknownValue, ast.TypeUnknown, nil
}
switch target.Typex {
case ast.TypeList:
return v.evalListIndex(variableName, target.Value, key.Value)
case ast.TypeMap:
return v.evalMapIndex(variableName, target.Value, key.Value)
default:
return nil, ast.TypeInvalid, fmt.Errorf(
"target %q for indexing must be ast.TypeList or ast.TypeMap, is %s",
variableName, target.Typex)
}
}
func (v *evalIndex) evalListIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
// We assume type checking was already done and we can assume that target
// is a list and key is an int
list, ok := target.([]ast.Variable)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"cannot cast target to []Variable, is: %T", target)
}
keyInt, ok := key.(int)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"cannot cast key to int, is: %T", key)
}
if len(list) == 0 {
return nil, ast.TypeInvalid, fmt.Errorf("list is empty")
}
if keyInt < 0 || len(list) < keyInt+1 {
return nil, ast.TypeInvalid, fmt.Errorf(
"index %d out of range for list %s (max %d)",
keyInt, variableName, len(list))
}
returnVal := list[keyInt].Value
returnType := list[keyInt].Type
return returnVal, returnType, nil
}
func (v *evalIndex) evalMapIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
// We assume type checking was already done and we can assume that target
// is a map and key is a string
vmap, ok := target.(map[string]ast.Variable)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"cannot cast target to map[string]Variable, is: %T", target)
}
keyString, ok := key.(string)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"cannot cast key to string, is: %T", key)
}
if len(vmap) == 0 {
return nil, ast.TypeInvalid, fmt.Errorf("map is empty")
}
value, ok := vmap[keyString]
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"key %q does not exist in map %s", keyString, variableName)
}
return value.Value, value.Type, nil
}
type evalOutput struct{ *ast.Output }
func (v *evalOutput) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
// The expressions should all be on the stack in reverse
// order. So pop them off, reverse their order, and concatenate.
nodes := make([]*ast.LiteralNode, 0, len(v.Exprs))
haveUnknown := false
for range v.Exprs {
n := stack.Pop().(*ast.LiteralNode)
nodes = append(nodes, n)
// If we have any unknowns then the whole result is unknown
// (we must deal with this first, because the type checker can
// skip type conversions in the presence of unknowns, and thus
// any of our other nodes may be incorrectly typed.)
if n.IsUnknown() {
haveUnknown = true
}
}
if haveUnknown {
return UnknownValue, ast.TypeUnknown, nil
}
// Special case the single list and map
if len(nodes) == 1 {
switch t := nodes[0].Typex; t {
case ast.TypeList:
fallthrough
case ast.TypeMap:
fallthrough
case ast.TypeUnknown:
return nodes[0].Value, t, nil
}
}
// Otherwise concatenate the strings
var buf bytes.Buffer
for i := len(nodes) - 1; i >= 0; i-- {
if nodes[i].Typex != ast.TypeString {
return nil, ast.TypeInvalid, fmt.Errorf(
"invalid output with %s value at index %d: %#v",
nodes[i].Typex,
i,
nodes[i].Value,
)
}
buf.WriteString(nodes[i].Value.(string))
}
return buf.String(), ast.TypeString, nil
}
type evalLiteralNode struct{ *ast.LiteralNode }
func (v *evalLiteralNode) Eval(ast.Scope, *ast.Stack) (interface{}, ast.Type, error) {
return v.Value, v.Typex, nil
}
type evalVariableAccess struct{ *ast.VariableAccess }
func (v *evalVariableAccess) Eval(scope ast.Scope, _ *ast.Stack) (interface{}, ast.Type, error) {
// Look up the variable in the map
variable, ok := scope.LookupVar(v.Name)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf(
"unknown variable accessed: %s", v.Name)
}
return variable.Value, variable.Type, nil
}

View File

@ -1,16 +0,0 @@
package hil
//go:generate stringer -type=EvalType eval_type.go
// EvalType represents the type of the output returned from a HIL
// evaluation.
type EvalType uint32
const (
TypeInvalid EvalType = 0
TypeString EvalType = 1 << iota
TypeBool
TypeList
TypeMap
TypeUnknown
)

View File

@ -1,42 +0,0 @@
// Code generated by "stringer -type=EvalType eval_type.go"; DO NOT EDIT
package hil
import "fmt"
const (
_EvalType_name_0 = "TypeInvalid"
_EvalType_name_1 = "TypeString"
_EvalType_name_2 = "TypeBool"
_EvalType_name_3 = "TypeList"
_EvalType_name_4 = "TypeMap"
_EvalType_name_5 = "TypeUnknown"
)
var (
_EvalType_index_0 = [...]uint8{0, 11}
_EvalType_index_1 = [...]uint8{0, 10}
_EvalType_index_2 = [...]uint8{0, 8}
_EvalType_index_3 = [...]uint8{0, 8}
_EvalType_index_4 = [...]uint8{0, 7}
_EvalType_index_5 = [...]uint8{0, 11}
)
func (i EvalType) String() string {
switch {
case i == 0:
return _EvalType_name_0
case i == 2:
return _EvalType_name_1
case i == 4:
return _EvalType_name_2
case i == 8:
return _EvalType_name_3
case i == 16:
return _EvalType_name_4
case i == 32:
return _EvalType_name_5
default:
return fmt.Sprintf("EvalType(%d)", i)
}
}

View File

@ -1,6 +0,0 @@
module github.com/hashicorp/hil
require (
github.com/mitchellh/mapstructure v1.1.2
github.com/mitchellh/reflectwalk v1.0.0
)

View File

@ -1,4 +0,0 @@
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=

View File

@ -1,29 +0,0 @@
package hil
import (
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/hil/parser"
"github.com/hashicorp/hil/scanner"
)
// Parse parses the given program and returns an executable AST tree.
//
// Syntax errors are returned with error having the dynamic type
// *parser.ParseError, which gives the caller access to the source position
// where the error was found, which allows (for example) combining it with
// a known source filename to add context to the error message.
func Parse(v string) (ast.Node, error) {
return ParseWithPosition(v, ast.Pos{Line: 1, Column: 1})
}
// ParseWithPosition is like Parse except that it overrides the source
// row and column position of the first character in the string, which should
// be 1-based.
//
// This can be used when HIL is embedded in another language and the outer
// parser knows the row and column where the HIL expression started within
// the overall source file.
func ParseWithPosition(v string, pos ast.Pos) (ast.Node, error) {
ch := scanner.Scan(v, pos)
return parser.Parse(ch)
}

View File

@ -1,45 +0,0 @@
package parser
import (
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/hil/scanner"
)
var binaryOps []map[scanner.TokenType]ast.ArithmeticOp
func init() {
// This operation table maps from the operator's scanner token type
// to the AST arithmetic operation. All expressions produced from
// binary operators are *ast.Arithmetic nodes.
//
// Binary operator groups are listed in order of precedence, with
// the *lowest* precedence first. Operators within the same group
// have left-to-right associativity.
binaryOps = []map[scanner.TokenType]ast.ArithmeticOp{
{
scanner.OR: ast.ArithmeticOpLogicalOr,
},
{
scanner.AND: ast.ArithmeticOpLogicalAnd,
},
{
scanner.EQUAL: ast.ArithmeticOpEqual,
scanner.NOTEQUAL: ast.ArithmeticOpNotEqual,
},
{
scanner.GT: ast.ArithmeticOpGreaterThan,
scanner.GTE: ast.ArithmeticOpGreaterThanOrEqual,
scanner.LT: ast.ArithmeticOpLessThan,
scanner.LTE: ast.ArithmeticOpLessThanOrEqual,
},
{
scanner.PLUS: ast.ArithmeticOpAdd,
scanner.MINUS: ast.ArithmeticOpSub,
},
{
scanner.STAR: ast.ArithmeticOpMul,
scanner.SLASH: ast.ArithmeticOpDiv,
scanner.PERCENT: ast.ArithmeticOpMod,
},
}
}

View File

@ -1,38 +0,0 @@
package parser
import (
"fmt"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/hil/scanner"
)
type ParseError struct {
Message string
Pos ast.Pos
}
func Errorf(pos ast.Pos, format string, args ...interface{}) error {
return &ParseError{
Message: fmt.Sprintf(format, args...),
Pos: pos,
}
}
// TokenErrorf is a convenient wrapper around Errorf that uses the
// position of the given token.
func TokenErrorf(token *scanner.Token, format string, args ...interface{}) error {
return Errorf(token.Pos, format, args...)
}
func ExpectationError(wanted string, got *scanner.Token) error {
return TokenErrorf(got, "expected %s but found %s", wanted, got)
}
func (e *ParseError) Error() string {
return fmt.Sprintf("parse error at %s: %s", e.Pos, e.Message)
}
func (e *ParseError) String() string {
return e.Error()
}

View File

@ -1,28 +0,0 @@
// +build gofuzz
package parser
import (
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/hil/scanner"
)
// This is a fuzz testing function designed to be used with go-fuzz:
// https://github.com/dvyukov/go-fuzz
//
// It's not included in a normal build due to the gofuzz build tag above.
//
// There are some input files that you can use as a seed corpus for go-fuzz
// in the directory ./fuzz-corpus .
func Fuzz(data []byte) int {
str := string(data)
ch := scanner.Scan(str, ast.Pos{Line: 1, Column: 1})
_, err := Parse(ch)
if err != nil {
return 0
}
return 1
}

View File

@ -1,522 +0,0 @@
package parser
import (
"strconv"
"unicode/utf8"
"github.com/hashicorp/hil/ast"
"github.com/hashicorp/hil/scanner"
)
func Parse(ch <-chan *scanner.Token) (ast.Node, error) {
peeker := scanner.NewPeeker(ch)
parser := &parser{peeker}
output, err := parser.ParseTopLevel()
peeker.Close()
return output, err
}
type parser struct {
peeker *scanner.Peeker
}
func (p *parser) ParseTopLevel() (ast.Node, error) {
return p.parseInterpolationSeq(false)
}
func (p *parser) ParseQuoted() (ast.Node, error) {
return p.parseInterpolationSeq(true)
}
// parseInterpolationSeq parses either the top-level sequence of literals
// and interpolation expressions or a similar sequence within a quoted
// string inside an interpolation expression. The latter case is requested
// by setting 'quoted' to true.
func (p *parser) parseInterpolationSeq(quoted bool) (ast.Node, error) {
literalType := scanner.LITERAL
endType := scanner.EOF
if quoted {
// exceptions for quoted sequences
literalType = scanner.STRING
endType = scanner.CQUOTE
}
startPos := p.peeker.Peek().Pos
if quoted {
tok := p.peeker.Read()
if tok.Type != scanner.OQUOTE {
return nil, ExpectationError("open quote", tok)
}
}
var exprs []ast.Node
for {
tok := p.peeker.Read()
if tok.Type == endType {
break
}
switch tok.Type {
case literalType:
val, err := p.parseStringToken(tok)
if err != nil {
return nil, err
}
exprs = append(exprs, &ast.LiteralNode{
Value: val,
Typex: ast.TypeString,
Posx: tok.Pos,
})
case scanner.BEGIN:
expr, err := p.ParseInterpolation()
if err != nil {
return nil, err
}
exprs = append(exprs, expr)
default:
return nil, ExpectationError(`"${"`, tok)
}
}
if len(exprs) == 0 {
// If we have no parts at all then the input must've
// been an empty string.
exprs = append(exprs, &ast.LiteralNode{
Value: "",
Typex: ast.TypeString,
Posx: startPos,
})
}
// As a special case, if our "Output" contains only one expression
// and it's a literal string then we'll hoist it up to be our
// direct return value, so callers can easily recognize a string
// that has no interpolations at all.
if len(exprs) == 1 {
if lit, ok := exprs[0].(*ast.LiteralNode); ok {
if lit.Typex == ast.TypeString {
return lit, nil
}
}
}
return &ast.Output{
Exprs: exprs,
Posx: startPos,
}, nil
}
// parseStringToken takes a token of either LITERAL or STRING type and
// returns the interpreted string, after processing any relevant
// escape sequences.
func (p *parser) parseStringToken(tok *scanner.Token) (string, error) {
var backslashes bool
switch tok.Type {
case scanner.LITERAL:
backslashes = false
case scanner.STRING:
backslashes = true
default:
panic("unsupported string token type")
}
raw := []byte(tok.Content)
buf := make([]byte, 0, len(raw))
for i := 0; i < len(raw); i++ {
b := raw[i]
more := len(raw) > (i + 1)
if b == '$' {
if more && raw[i+1] == '$' {
// skip over the second dollar sign
i++
}
} else if backslashes && b == '\\' {
if !more {
return "", Errorf(
ast.Pos{
Column: tok.Pos.Column + utf8.RuneCount(raw[:i]),
Line: tok.Pos.Line,
},
`unfinished backslash escape sequence`,
)
}
escapeType := raw[i+1]
switch escapeType {
case '\\':
// skip over the second slash
i++
case 'n':
b = '\n'
i++
case '"':
b = '"'
i++
default:
return "", Errorf(
ast.Pos{
Column: tok.Pos.Column + utf8.RuneCount(raw[:i]),
Line: tok.Pos.Line,
},
`invalid backslash escape sequence`,
)
}
}
buf = append(buf, b)
}
return string(buf), nil
}
func (p *parser) ParseInterpolation() (ast.Node, error) {
// By the time we're called, we're already "inside" the ${ sequence
// because the caller consumed the ${ token.
expr, err := p.ParseExpression()
if err != nil {
return nil, err
}
err = p.requireTokenType(scanner.END, `"}"`)
if err != nil {
return nil, err
}
return expr, nil
}
func (p *parser) ParseExpression() (ast.Node, error) {
return p.parseTernaryCond()
}
func (p *parser) parseTernaryCond() (ast.Node, error) {
// The ternary condition operator (.. ? .. : ..) behaves somewhat
// like a binary operator except that the "operator" is itself
// an expression enclosed in two punctuation characters.
// The middle expression is parsed as if the ? and : symbols
// were parentheses. The "rhs" (the "false expression") is then
// treated right-associatively so it behaves similarly to the
// middle in terms of precedence.
startPos := p.peeker.Peek().Pos
var cond, trueExpr, falseExpr ast.Node
var err error
cond, err = p.parseBinaryOps(binaryOps)
if err != nil {
return nil, err
}
next := p.peeker.Peek()
if next.Type != scanner.QUESTION {
return cond, nil
}
p.peeker.Read() // eat question mark
trueExpr, err = p.ParseExpression()
if err != nil {
return nil, err
}
colon := p.peeker.Read()
if colon.Type != scanner.COLON {
return nil, ExpectationError(":", colon)
}
falseExpr, err = p.ParseExpression()
if err != nil {
return nil, err
}
return &ast.Conditional{
CondExpr: cond,
TrueExpr: trueExpr,
FalseExpr: falseExpr,
Posx: startPos,
}, nil
}
// parseBinaryOps calls itself recursively to work through all of the
// operator precedence groups, and then eventually calls ParseExpressionTerm
// for each operand.
func (p *parser) parseBinaryOps(ops []map[scanner.TokenType]ast.ArithmeticOp) (ast.Node, error) {
if len(ops) == 0 {
// We've run out of operators, so now we'll just try to parse a term.
return p.ParseExpressionTerm()
}
thisLevel := ops[0]
remaining := ops[1:]
startPos := p.peeker.Peek().Pos
var lhs, rhs ast.Node
operator := ast.ArithmeticOpInvalid
var err error
// parse a term that might be the first operand of a binary
// expression or it might just be a standalone term, but
// we won't know until we've parsed it and can look ahead
// to see if there's an operator token.
lhs, err = p.parseBinaryOps(remaining)
if err != nil {
return nil, err
}
// We'll keep eating up arithmetic operators until we run
// out, so that operators with the same precedence will combine in a
// left-associative manner:
// a+b+c => (a+b)+c, not a+(b+c)
//
// Should we later want to have right-associative operators, a way
// to achieve that would be to call back up to ParseExpression here
// instead of iteratively parsing only the remaining operators.
for {
next := p.peeker.Peek()
var newOperator ast.ArithmeticOp
var ok bool
if newOperator, ok = thisLevel[next.Type]; !ok {
break
}
// Are we extending an expression started on
// the previous iteration?
if operator != ast.ArithmeticOpInvalid {
lhs = &ast.Arithmetic{
Op: operator,
Exprs: []ast.Node{lhs, rhs},
Posx: startPos,
}
}
operator = newOperator
p.peeker.Read() // eat operator token
rhs, err = p.parseBinaryOps(remaining)
if err != nil {
return nil, err
}
}
if operator != ast.ArithmeticOpInvalid {
return &ast.Arithmetic{
Op: operator,
Exprs: []ast.Node{lhs, rhs},
Posx: startPos,
}, nil
} else {
return lhs, nil
}
}
func (p *parser) ParseExpressionTerm() (ast.Node, error) {
next := p.peeker.Peek()
switch next.Type {
case scanner.OPAREN:
p.peeker.Read()
expr, err := p.ParseExpression()
if err != nil {
return nil, err
}
err = p.requireTokenType(scanner.CPAREN, `")"`)
return expr, err
case scanner.OQUOTE:
return p.ParseQuoted()
case scanner.INTEGER:
tok := p.peeker.Read()
val, err := strconv.Atoi(tok.Content)
if err != nil {
return nil, TokenErrorf(tok, "invalid integer: %s", err)
}
return &ast.LiteralNode{
Value: val,
Typex: ast.TypeInt,
Posx: tok.Pos,
}, nil
case scanner.FLOAT:
tok := p.peeker.Read()
val, err := strconv.ParseFloat(tok.Content, 64)
if err != nil {
return nil, TokenErrorf(tok, "invalid float: %s", err)
}
return &ast.LiteralNode{
Value: val,
Typex: ast.TypeFloat,
Posx: tok.Pos,
}, nil
case scanner.BOOL:
tok := p.peeker.Read()
// the scanner guarantees that tok.Content is either "true" or "false"
var val bool
if tok.Content[0] == 't' {
val = true
} else {
val = false
}
return &ast.LiteralNode{
Value: val,
Typex: ast.TypeBool,
Posx: tok.Pos,
}, nil
case scanner.MINUS:
opTok := p.peeker.Read()
// important to use ParseExpressionTerm rather than ParseExpression
// here, otherwise we can capture a following binary expression into
// our negation.
// e.g. -46+5 should parse as (0-46)+5, not 0-(46+5)
operand, err := p.ParseExpressionTerm()
if err != nil {
return nil, err
}
// The AST currently represents negative numbers as
// a binary subtraction of the number from zero.
return &ast.Arithmetic{
Op: ast.ArithmeticOpSub,
Exprs: []ast.Node{
&ast.LiteralNode{
Value: 0,
Typex: ast.TypeInt,
Posx: opTok.Pos,
},
operand,
},
Posx: opTok.Pos,
}, nil
case scanner.BANG:
opTok := p.peeker.Read()
// important to use ParseExpressionTerm rather than ParseExpression
// here, otherwise we can capture a following binary expression into
// our negation.
operand, err := p.ParseExpressionTerm()
if err != nil {
return nil, err
}
// The AST currently represents binary negation as an equality
// test with "false".
return &ast.Arithmetic{
Op: ast.ArithmeticOpEqual,
Exprs: []ast.Node{
&ast.LiteralNode{
Value: false,
Typex: ast.TypeBool,
Posx: opTok.Pos,
},
operand,
},
Posx: opTok.Pos,
}, nil
case scanner.IDENTIFIER:
return p.ParseScopeInteraction()
default:
return nil, ExpectationError("expression", next)
}
}
// ParseScopeInteraction parses the expression types that interact
// with the evaluation scope: variable access, function calls, and
// indexing.
//
// Indexing should actually be a distinct operator in its own right,
// so that e.g. it can be applied to the result of a function call,
// but for now we're preserving the behavior of the older yacc-based
// parser.
func (p *parser) ParseScopeInteraction() (ast.Node, error) {
first := p.peeker.Read()
startPos := first.Pos
if first.Type != scanner.IDENTIFIER {
return nil, ExpectationError("identifier", first)
}
next := p.peeker.Peek()
if next.Type == scanner.OPAREN {
// function call
funcName := first.Content
p.peeker.Read() // eat paren
var args []ast.Node
for {
if p.peeker.Peek().Type == scanner.CPAREN {
break
}
arg, err := p.ParseExpression()
if err != nil {
return nil, err
}
args = append(args, arg)
if p.peeker.Peek().Type == scanner.COMMA {
p.peeker.Read() // eat comma
continue
} else {
break
}
}
err := p.requireTokenType(scanner.CPAREN, `")"`)
if err != nil {
return nil, err
}
return &ast.Call{
Func: funcName,
Args: args,
Posx: startPos,
}, nil
}
varNode := &ast.VariableAccess{
Name: first.Content,
Posx: startPos,
}
if p.peeker.Peek().Type == scanner.OBRACKET {
// index operator
startPos := p.peeker.Read().Pos // eat bracket
indexExpr, err := p.ParseExpression()
if err != nil {
return nil, err
}
err = p.requireTokenType(scanner.CBRACKET, `"]"`)
if err != nil {
return nil, err
}
return &ast.Index{
Target: varNode,
Key: indexExpr,
Posx: startPos,
}, nil
}
return varNode, nil
}
// requireTokenType consumes the next token an returns an error if its
// type does not match the given type. nil is returned if the type matches.
//
// This is a helper around peeker.Read() for situations where the parser just
// wants to assert that a particular token type must be present.
func (p *parser) requireTokenType(wantType scanner.TokenType, wantName string) error {
token := p.peeker.Read()
if token.Type != wantType {
return ExpectationError(wantName, token)
}
return nil
}

View File

@ -1,55 +0,0 @@
package scanner
// Peeker is a utility that wraps a token channel returned by Scan and
// provides an interface that allows a caller (e.g. the parser) to
// work with the token stream in a mode that allows one token of lookahead,
// and provides utilities for more convenient processing of the stream.
type Peeker struct {
ch <-chan *Token
peeked *Token
}
func NewPeeker(ch <-chan *Token) *Peeker {
return &Peeker{
ch: ch,
}
}
// Peek returns the next token in the stream without consuming it. A
// subsequent call to Read will return the same token.
func (p *Peeker) Peek() *Token {
if p.peeked == nil {
p.peeked = <-p.ch
}
return p.peeked
}
// Read consumes the next token in the stream and returns it.
func (p *Peeker) Read() *Token {
token := p.Peek()
// As a special case, we will produce the EOF token forever once
// it is reached.
if token.Type != EOF {
p.peeked = nil
}
return token
}
// Close ensures that the token stream has been exhausted, to prevent
// the goroutine in the underlying scanner from leaking.
//
// It's not necessary to call this if the caller reads the token stream
// to EOF, since that implicitly closes the scanner.
func (p *Peeker) Close() {
for _ = range p.ch {
// discard
}
// Install a synthetic EOF token in 'peeked' in case someone
// erroneously calls Peek() or Read() after we've closed.
p.peeked = &Token{
Type: EOF,
Content: "",
}
}

View File

@ -1,556 +0,0 @@
package scanner
import (
"unicode"
"unicode/utf8"
"github.com/hashicorp/hil/ast"
)
// Scan returns a channel that recieves Tokens from the given input string.
//
// The scanner's job is just to partition the string into meaningful parts.
// It doesn't do any transformation of the raw input string, so the caller
// must deal with any further interpretation required, such as parsing INTEGER
// tokens into real ints, or dealing with escape sequences in LITERAL or
// STRING tokens.
//
// Strings in the returned tokens are slices from the original string.
//
// startPos should be set to ast.InitPos unless the caller knows that
// this interpolation string is part of a larger file and knows the position
// of the first character in that larger file.
func Scan(s string, startPos ast.Pos) <-chan *Token {
ch := make(chan *Token)
go scan(s, ch, startPos)
return ch
}
func scan(s string, ch chan<- *Token, pos ast.Pos) {
// 'remain' starts off as the whole string but we gradually
// slice of the front of it as we work our way through.
remain := s
// nesting keeps track of how many ${ .. } sequences we are
// inside, so we can recognize the minor differences in syntax
// between outer string literals (LITERAL tokens) and quoted
// string literals (STRING tokens).
nesting := 0
// We're going to flip back and forth between parsing literals/strings
// and parsing interpolation sequences ${ .. } until we reach EOF or
// some INVALID token.
All:
for {
startPos := pos
// Literal string processing first, since the beginning of
// a string is always outside of an interpolation sequence.
literalVal, terminator := scanLiteral(remain, pos, nesting > 0)
if len(literalVal) > 0 {
litType := LITERAL
if nesting > 0 {
litType = STRING
}
ch <- &Token{
Type: litType,
Content: literalVal,
Pos: startPos,
}
remain = remain[len(literalVal):]
}
ch <- terminator
remain = remain[len(terminator.Content):]
pos = terminator.Pos
// Safe to use len() here because none of the terminator tokens
// can contain UTF-8 sequences.
pos.Column = pos.Column + len(terminator.Content)
switch terminator.Type {
case INVALID:
// Synthetic EOF after invalid token, since further scanning
// is likely to just produce more garbage.
ch <- &Token{
Type: EOF,
Content: "",
Pos: pos,
}
break All
case EOF:
// All done!
break All
case BEGIN:
nesting++
case CQUOTE:
// nothing special to do
default:
// Should never happen
panic("invalid string/literal terminator")
}
// Now we do the processing of the insides of ${ .. } sequences.
// This loop terminates when we encounter either a closing } or
// an opening ", which will cause us to return to literal processing.
Interpolation:
for {
token, size, newPos := scanInterpolationToken(remain, pos)
ch <- token
remain = remain[size:]
pos = newPos
switch token.Type {
case INVALID:
// Synthetic EOF after invalid token, since further scanning
// is likely to just produce more garbage.
ch <- &Token{
Type: EOF,
Content: "",
Pos: pos,
}
break All
case EOF:
// All done
// (though a syntax error that we'll catch in the parser)
break All
case END:
nesting--
if nesting < 0 {
// Can happen if there are unbalanced ${ and } sequences
// in the input, which we'll catch in the parser.
nesting = 0
}
break Interpolation
case OQUOTE:
// Beginning of nested quoted string
break Interpolation
}
}
}
close(ch)
}
// Returns the token found at the start of the given string, followed by
// the number of bytes that were consumed from the string and the adjusted
// source position.
//
// Note that the number of bytes consumed can be more than the length of
// the returned token contents if the string begins with whitespace, since
// it will be silently consumed before reading the token.
func scanInterpolationToken(s string, startPos ast.Pos) (*Token, int, ast.Pos) {
pos := startPos
size := 0
// Consume whitespace, if any
for len(s) > 0 && byteIsSpace(s[0]) {
if s[0] == '\n' {
pos.Column = 1
pos.Line++
} else {
pos.Column++
}
size++
s = s[1:]
}
// Unexpected EOF during sequence
if len(s) == 0 {
return &Token{
Type: EOF,
Content: "",
Pos: pos,
}, size, pos
}
next := s[0]
var token *Token
switch next {
case '(', ')', '[', ']', ',', '.', '+', '-', '*', '/', '%', '?', ':':
// Easy punctuation symbols that don't have any special meaning
// during scanning, and that stand for themselves in the
// TokenType enumeration.
token = &Token{
Type: TokenType(next),
Content: s[:1],
Pos: pos,
}
case '}':
token = &Token{
Type: END,
Content: s[:1],
Pos: pos,
}
case '"':
token = &Token{
Type: OQUOTE,
Content: s[:1],
Pos: pos,
}
case '!':
if len(s) >= 2 && s[:2] == "!=" {
token = &Token{
Type: NOTEQUAL,
Content: s[:2],
Pos: pos,
}
} else {
token = &Token{
Type: BANG,
Content: s[:1],
Pos: pos,
}
}
case '<':
if len(s) >= 2 && s[:2] == "<=" {
token = &Token{
Type: LTE,
Content: s[:2],
Pos: pos,
}
} else {
token = &Token{
Type: LT,
Content: s[:1],
Pos: pos,
}
}
case '>':
if len(s) >= 2 && s[:2] == ">=" {
token = &Token{
Type: GTE,
Content: s[:2],
Pos: pos,
}
} else {
token = &Token{
Type: GT,
Content: s[:1],
Pos: pos,
}
}
case '=':
if len(s) >= 2 && s[:2] == "==" {
token = &Token{
Type: EQUAL,
Content: s[:2],
Pos: pos,
}
} else {
// A single equals is not a valid operator
token = &Token{
Type: INVALID,
Content: s[:1],
Pos: pos,
}
}
case '&':
if len(s) >= 2 && s[:2] == "&&" {
token = &Token{
Type: AND,
Content: s[:2],
Pos: pos,
}
} else {
token = &Token{
Type: INVALID,
Content: s[:1],
Pos: pos,
}
}
case '|':
if len(s) >= 2 && s[:2] == "||" {
token = &Token{
Type: OR,
Content: s[:2],
Pos: pos,
}
} else {
token = &Token{
Type: INVALID,
Content: s[:1],
Pos: pos,
}
}
default:
if next >= '0' && next <= '9' {
num, numType := scanNumber(s)
token = &Token{
Type: numType,
Content: num,
Pos: pos,
}
} else if stringStartsWithIdentifier(s) {
ident, runeLen := scanIdentifier(s)
tokenType := IDENTIFIER
if ident == "true" || ident == "false" {
tokenType = BOOL
}
token = &Token{
Type: tokenType,
Content: ident,
Pos: pos,
}
// Skip usual token handling because it doesn't
// know how to deal with UTF-8 sequences.
pos.Column = pos.Column + runeLen
return token, size + len(ident), pos
} else {
_, byteLen := utf8.DecodeRuneInString(s)
token = &Token{
Type: INVALID,
Content: s[:byteLen],
Pos: pos,
}
// Skip usual token handling because it doesn't
// know how to deal with UTF-8 sequences.
pos.Column = pos.Column + 1
return token, size + byteLen, pos
}
}
// Here we assume that the token content contains no UTF-8 sequences,
// because we dealt with UTF-8 characters as a special case where
// necessary above.
size = size + len(token.Content)
pos.Column = pos.Column + len(token.Content)
return token, size, pos
}
// Returns the (possibly-empty) prefix of the given string that represents
// a literal, followed by the token that marks the end of the literal.
func scanLiteral(s string, startPos ast.Pos, nested bool) (string, *Token) {
litLen := 0
pos := startPos
var terminator *Token
for {
if litLen >= len(s) {
if nested {
// We've ended in the middle of a quoted string,
// which means this token is actually invalid.
return "", &Token{
Type: INVALID,
Content: s,
Pos: startPos,
}
}
terminator = &Token{
Type: EOF,
Content: "",
Pos: pos,
}
break
}
next := s[litLen]
if next == '$' && len(s) > litLen+1 {
follow := s[litLen+1]
if follow == '{' {
terminator = &Token{
Type: BEGIN,
Content: s[litLen : litLen+2],
Pos: pos,
}
pos.Column = pos.Column + 2
break
} else if follow == '$' {
// Double-$ escapes the special processing of $,
// so we will consume both characters here.
pos.Column = pos.Column + 2
litLen = litLen + 2
continue
}
}
// special handling that applies only to quoted strings
if nested {
if next == '"' {
terminator = &Token{
Type: CQUOTE,
Content: s[litLen : litLen+1],
Pos: pos,
}
pos.Column = pos.Column + 1
break
}
// Escaped quote marks do not terminate the string.
//
// All we do here in the scanner is avoid terminating a string
// due to an escaped quote. The parser is responsible for the
// full handling of escape sequences, since it's able to produce
// better error messages than we can produce in here.
if next == '\\' && len(s) > litLen+1 {
follow := s[litLen+1]
if follow == '"' {
// \" escapes the special processing of ",
// so we will consume both characters here.
pos.Column = pos.Column + 2
litLen = litLen + 2
continue
} else if follow == '\\' {
// \\ escapes \
// so we will consume both characters here.
pos.Column = pos.Column + 2
litLen = litLen + 2
continue
}
}
}
if next == '\n' {
pos.Column = 1
pos.Line++
litLen++
} else {
pos.Column++
// "Column" measures runes, so we need to actually consume
// a valid UTF-8 character here.
_, size := utf8.DecodeRuneInString(s[litLen:])
litLen = litLen + size
}
}
return s[:litLen], terminator
}
// scanNumber returns the extent of the prefix of the string that represents
// a valid number, along with what type of number it represents: INT or FLOAT.
//
// scanNumber does only basic character analysis: numbers consist of digits
// and periods, with at least one period signalling a FLOAT. It's the parser's
// responsibility to validate the form and range of the number, such as ensuring
// that a FLOAT actually contains only one period, etc.
func scanNumber(s string) (string, TokenType) {
period := -1
byteLen := 0
numType := INTEGER
for {
if byteLen >= len(s) {
break
}
next := s[byteLen]
if next != '.' && (next < '0' || next > '9') {
// If our last value was a period, then we're not a float,
// we're just an integer that ends in a period.
if period == byteLen-1 {
byteLen--
numType = INTEGER
}
break
}
if next == '.' {
// If we've already seen a period, break out
if period >= 0 {
break
}
period = byteLen
numType = FLOAT
}
byteLen++
}
return s[:byteLen], numType
}
// scanIdentifier returns the extent of the prefix of the string that
// represents a valid identifier, along with the length of that prefix
// in runes.
//
// Identifiers may contain utf8-encoded non-Latin letters, which will
// cause the returned "rune length" to be shorter than the byte length
// of the returned string.
func scanIdentifier(s string) (string, int) {
byteLen := 0
runeLen := 0
for {
if byteLen >= len(s) {
break
}
nextRune, size := utf8.DecodeRuneInString(s[byteLen:])
if !(nextRune == '_' ||
nextRune == '-' ||
nextRune == '.' ||
nextRune == '*' ||
unicode.IsNumber(nextRune) ||
unicode.IsLetter(nextRune) ||
unicode.IsMark(nextRune)) {
break
}
// If we reach a star, it must be between periods to be part
// of the same identifier.
if nextRune == '*' && s[byteLen-1] != '.' {
break
}
// If our previous character was a star, then the current must
// be period. Otherwise, undo that and exit.
if byteLen > 0 && s[byteLen-1] == '*' && nextRune != '.' {
byteLen--
if s[byteLen-1] == '.' {
byteLen--
}
break
}
byteLen = byteLen + size
runeLen = runeLen + 1
}
return s[:byteLen], runeLen
}
// byteIsSpace implements a restrictive interpretation of spaces that includes
// only what's valid inside interpolation sequences: spaces, tabs, newlines.
func byteIsSpace(b byte) bool {
switch b {
case ' ', '\t', '\r', '\n':
return true
default:
return false
}
}
// stringStartsWithIdentifier returns true if the given string begins with
// a character that is a legal start of an identifier: an underscore or
// any character that Unicode considers to be a letter.
func stringStartsWithIdentifier(s string) bool {
if len(s) == 0 {
return false
}
first := s[0]
// Easy ASCII cases first
if (first >= 'a' && first <= 'z') || (first >= 'A' && first <= 'Z') || first == '_' {
return true
}
// If our first byte begins a UTF-8 sequence then the sequence might
// be a unicode letter.
if utf8.RuneStart(first) {
firstRune, _ := utf8.DecodeRuneInString(s)
if unicode.IsLetter(firstRune) {
return true
}
}
return false
}

View File

@ -1,105 +0,0 @@
package scanner
import (
"fmt"
"github.com/hashicorp/hil/ast"
)
type Token struct {
Type TokenType
Content string
Pos ast.Pos
}
//go:generate stringer -type=TokenType
type TokenType rune
const (
// Raw string data outside of ${ .. } sequences
LITERAL TokenType = 'o'
// STRING is like a LITERAL but it's inside a quoted string
// within a ${ ... } sequence, and so it can contain backslash
// escaping.
STRING TokenType = 'S'
// Other Literals
INTEGER TokenType = 'I'
FLOAT TokenType = 'F'
BOOL TokenType = 'B'
BEGIN TokenType = '$' // actually "${"
END TokenType = '}'
OQUOTE TokenType = '“' // Opening quote of a nested quoted sequence
CQUOTE TokenType = '”' // Closing quote of a nested quoted sequence
OPAREN TokenType = '('
CPAREN TokenType = ')'
OBRACKET TokenType = '['
CBRACKET TokenType = ']'
COMMA TokenType = ','
IDENTIFIER TokenType = 'i'
PERIOD TokenType = '.'
PLUS TokenType = '+'
MINUS TokenType = '-'
STAR TokenType = '*'
SLASH TokenType = '/'
PERCENT TokenType = '%'
AND TokenType = '∧'
OR TokenType = ''
BANG TokenType = '!'
EQUAL TokenType = '='
NOTEQUAL TokenType = '≠'
GT TokenType = '>'
LT TokenType = '<'
GTE TokenType = '≥'
LTE TokenType = '≤'
QUESTION TokenType = '?'
COLON TokenType = ':'
EOF TokenType = '␄'
// Produced for sequences that cannot be understood as valid tokens
// e.g. due to use of unrecognized punctuation.
INVALID TokenType = '<27>'
)
func (t *Token) String() string {
switch t.Type {
case EOF:
return "end of string"
case INVALID:
return fmt.Sprintf("invalid sequence %q", t.Content)
case INTEGER:
return fmt.Sprintf("integer %s", t.Content)
case FLOAT:
return fmt.Sprintf("float %s", t.Content)
case STRING:
return fmt.Sprintf("string %q", t.Content)
case LITERAL:
return fmt.Sprintf("literal %q", t.Content)
case OQUOTE:
return fmt.Sprintf("opening quote")
case CQUOTE:
return fmt.Sprintf("closing quote")
case AND:
return "&&"
case OR:
return "||"
case NOTEQUAL:
return "!="
case GTE:
return ">="
case LTE:
return "<="
default:
// The remaining token types have content that
// speaks for itself.
return fmt.Sprintf("%q", t.Content)
}
}

View File

@ -1,51 +0,0 @@
// Code generated by "stringer -type=TokenType"; DO NOT EDIT
package scanner
import "fmt"
const _TokenType_name = "BANGBEGINPERCENTOPARENCPARENSTARPLUSCOMMAMINUSPERIODSLASHCOLONLTEQUALGTQUESTIONBOOLFLOATINTEGERSTRINGOBRACKETCBRACKETIDENTIFIERLITERALENDOQUOTECQUOTEANDORNOTEQUALLTEGTEEOFINVALID"
var _TokenType_map = map[TokenType]string{
33: _TokenType_name[0:4],
36: _TokenType_name[4:9],
37: _TokenType_name[9:16],
40: _TokenType_name[16:22],
41: _TokenType_name[22:28],
42: _TokenType_name[28:32],
43: _TokenType_name[32:36],
44: _TokenType_name[36:41],
45: _TokenType_name[41:46],
46: _TokenType_name[46:52],
47: _TokenType_name[52:57],
58: _TokenType_name[57:62],
60: _TokenType_name[62:64],
61: _TokenType_name[64:69],
62: _TokenType_name[69:71],
63: _TokenType_name[71:79],
66: _TokenType_name[79:83],
70: _TokenType_name[83:88],
73: _TokenType_name[88:95],
83: _TokenType_name[95:101],
91: _TokenType_name[101:109],
93: _TokenType_name[109:117],
105: _TokenType_name[117:127],
111: _TokenType_name[127:134],
125: _TokenType_name[134:137],
8220: _TokenType_name[137:143],
8221: _TokenType_name[143:149],
8743: _TokenType_name[149:152],
8744: _TokenType_name[152:154],
8800: _TokenType_name[154:162],
8804: _TokenType_name[162:165],
8805: _TokenType_name[165:168],
9220: _TokenType_name[168:171],
65533: _TokenType_name[171:178],
}
func (i TokenType) String() string {
if str, ok := _TokenType_map[i]; ok {
return str
}
return fmt.Sprintf("TokenType(%d)", i)
}

View File

@ -1,29 +0,0 @@
package hil
import (
"github.com/hashicorp/hil/ast"
)
// FixedValueTransform transforms an AST to return a fixed value for
// all interpolations. i.e. you can make "hi ${anything}" always
// turn into "hi foo".
//
// The primary use case for this is for config validations where you can
// verify that interpolations result in a certain type of string.
func FixedValueTransform(root ast.Node, Value *ast.LiteralNode) ast.Node {
// We visit the nodes in top-down order
result := root
switch n := result.(type) {
case *ast.Output:
for i, v := range n.Exprs {
n.Exprs[i] = FixedValueTransform(v, Value)
}
case *ast.LiteralNode:
// We keep it as-is
default:
// Anything else we replace
result = Value
}
return result
}

View File

@ -1,266 +0,0 @@
package hil
import (
"fmt"
"reflect"
"strings"
"github.com/hashicorp/hil/ast"
"github.com/mitchellh/reflectwalk"
)
// WalkFn is the type of function to pass to Walk. Modify fields within
// WalkData to control whether replacement happens.
type WalkFn func(*WalkData) error
// WalkData is the structure passed to the callback of the Walk function.
//
// This structure contains data passed in as well as fields that are expected
// to be written by the caller as a result. Please see the documentation for
// each field for more information.
type WalkData struct {
// Root is the parsed root of this HIL program
Root ast.Node
// Location is the location within the structure where this
// value was found. This can be used to modify behavior within
// slices and so on.
Location reflectwalk.Location
// The below two values must be set by the callback to have any effect.
//
// Replace, if true, will replace the value in the structure with
// ReplaceValue. It is up to the caller to make sure this is a string.
Replace bool
ReplaceValue string
}
// Walk will walk an arbitrary Go structure and parse any string as an
// HIL program and call the callback cb to determine what to replace it
// with.
//
// This function is very useful for arbitrary HIL program interpolation
// across a complex configuration structure. Due to the heavy use of
// reflection in this function, it is recommend to write many unit tests
// with your typical configuration structures to hilp mitigate the risk
// of panics.
func Walk(v interface{}, cb WalkFn) error {
walker := &interpolationWalker{F: cb}
return reflectwalk.Walk(v, walker)
}
// interpolationWalker implements interfaces for the reflectwalk package
// (github.com/mitchellh/reflectwalk) that can be used to automatically
// execute a callback for an interpolation.
type interpolationWalker struct {
F WalkFn
key []string
lastValue reflect.Value
loc reflectwalk.Location
cs []reflect.Value
csKey []reflect.Value
csData interface{}
sliceIndex int
unknownKeys []string
}
func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
w.loc = loc
return nil
}
func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
w.loc = reflectwalk.None
switch loc {
case reflectwalk.Map:
w.cs = w.cs[:len(w.cs)-1]
case reflectwalk.MapValue:
w.key = w.key[:len(w.key)-1]
w.csKey = w.csKey[:len(w.csKey)-1]
case reflectwalk.Slice:
// Split any values that need to be split
w.splitSlice()
w.cs = w.cs[:len(w.cs)-1]
case reflectwalk.SliceElem:
w.csKey = w.csKey[:len(w.csKey)-1]
}
return nil
}
func (w *interpolationWalker) Map(m reflect.Value) error {
w.cs = append(w.cs, m)
return nil
}
func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
w.csData = k
w.csKey = append(w.csKey, k)
w.key = append(w.key, k.String())
w.lastValue = v
return nil
}
func (w *interpolationWalker) Slice(s reflect.Value) error {
w.cs = append(w.cs, s)
return nil
}
func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
w.csKey = append(w.csKey, reflect.ValueOf(i))
w.sliceIndex = i
return nil
}
func (w *interpolationWalker) Primitive(v reflect.Value) error {
setV := v
// We only care about strings
if v.Kind() == reflect.Interface {
setV = v
v = v.Elem()
}
if v.Kind() != reflect.String {
return nil
}
astRoot, err := Parse(v.String())
if err != nil {
return err
}
// If the AST we got is just a literal string value with the same
// value then we ignore it. We have to check if its the same value
// because it is possible to input a string, get out a string, and
// have it be different. For example: "foo-$${bar}" turns into
// "foo-${bar}"
if n, ok := astRoot.(*ast.LiteralNode); ok {
if s, ok := n.Value.(string); ok && s == v.String() {
return nil
}
}
if w.F == nil {
return nil
}
data := WalkData{Root: astRoot, Location: w.loc}
if err := w.F(&data); err != nil {
return fmt.Errorf(
"%s in:\n\n%s",
err, v.String())
}
if data.Replace {
/*
if remove {
w.removeCurrent()
return nil
}
*/
resultVal := reflect.ValueOf(data.ReplaceValue)
switch w.loc {
case reflectwalk.MapKey:
m := w.cs[len(w.cs)-1]
// Delete the old value
var zero reflect.Value
m.SetMapIndex(w.csData.(reflect.Value), zero)
// Set the new key with the existing value
m.SetMapIndex(resultVal, w.lastValue)
// Set the key to be the new key
w.csData = resultVal
case reflectwalk.MapValue:
// If we're in a map, then the only way to set a map value is
// to set it directly.
m := w.cs[len(w.cs)-1]
mk := w.csData.(reflect.Value)
m.SetMapIndex(mk, resultVal)
default:
// Otherwise, we should be addressable
setV.Set(resultVal)
}
}
return nil
}
func (w *interpolationWalker) removeCurrent() {
// Append the key to the unknown keys
w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
for i := 1; i <= len(w.cs); i++ {
c := w.cs[len(w.cs)-i]
switch c.Kind() {
case reflect.Map:
// Zero value so that we delete the map key
var val reflect.Value
// Get the key and delete it
k := w.csData.(reflect.Value)
c.SetMapIndex(k, val)
return
}
}
panic("No container found for removeCurrent")
}
func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
c := w.cs[len(w.cs)-2]
switch c.Kind() {
case reflect.Map:
// Get the key and delete it
k := w.csKey[len(w.csKey)-1]
c.SetMapIndex(k, v)
}
}
func (w *interpolationWalker) splitSlice() {
// Get the []interface{} slice so we can do some operations on
// it without dealing with reflection. We'll document each step
// here to be clear.
var s []interface{}
raw := w.cs[len(w.cs)-1]
switch v := raw.Interface().(type) {
case []interface{}:
s = v
case []map[string]interface{}:
return
default:
panic("Unknown kind: " + raw.Kind().String())
}
// Check if we have any elements that we need to split. If not, then
// just return since we're done.
split := false
if !split {
return
}
// Make a new result slice that is twice the capacity to fit our growth.
result := make([]interface{}, 0, len(s)*2)
// Go over each element of the original slice and start building up
// the resulting slice by splitting where we have to.
for _, v := range s {
sv, ok := v.(string)
if !ok {
// Not a string, so just set it
result = append(result, v)
continue
}
// Not a string list, so just set it
result = append(result, sv)
}
// Our slice is now done, we have to replace the slice now
// with this new one that we have.
w.replaceCurrent(reflect.ValueOf(result))
}

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2016 Mitchell Hashimoto
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,65 +0,0 @@
# hashstructure [![GoDoc](https://godoc.org/github.com/mitchellh/hashstructure?status.svg)](https://godoc.org/github.com/mitchellh/hashstructure)
hashstructure is a Go library for creating a unique hash value
for arbitrary values in Go.
This can be used to key values in a hash (for use in a map, set, etc.)
that are complex. The most common use case is comparing two values without
sending data across the network, caching values locally (de-dup), and so on.
## Features
* Hash any arbitrary Go value, including complex types.
* Tag a struct field to ignore it and not affect the hash value.
* Tag a slice type struct field to treat it as a set where ordering
doesn't affect the hash code but the field itself is still taken into
account to create the hash value.
* Optionally specify a custom hash function to optimize for speed, collision
avoidance for your data set, etc.
* Optionally hash the output of `.String()` on structs that implement fmt.Stringer,
allowing effective hashing of time.Time
## Installation
Standard `go get`:
```
$ go get github.com/mitchellh/hashstructure
```
## Usage & Example
For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/hashstructure).
A quick code example is shown below:
```go
type ComplexStruct struct {
Name string
Age uint
Metadata map[string]interface{}
}
v := ComplexStruct{
Name: "mitchellh",
Age: 64,
Metadata: map[string]interface{}{
"car": true,
"location": "California",
"siblings": []string{"Bob", "John"},
},
}
hash, err := hashstructure.Hash(v, nil)
if err != nil {
panic(err)
}
fmt.Printf("%d", hash)
// Output:
// 2307517237273902113
```

View File

@ -1 +0,0 @@
module github.com/mitchellh/hashstructure

View File

@ -1,358 +0,0 @@
package hashstructure
import (
"encoding/binary"
"fmt"
"hash"
"hash/fnv"
"reflect"
)
// ErrNotStringer is returned when there's an error with hash:"string"
type ErrNotStringer struct {
Field string
}
// Error implements error for ErrNotStringer
func (ens *ErrNotStringer) Error() string {
return fmt.Sprintf("hashstructure: %s has hash:\"string\" set, but does not implement fmt.Stringer", ens.Field)
}
// HashOptions are options that are available for hashing.
type HashOptions struct {
// Hasher is the hash function to use. If this isn't set, it will
// default to FNV.
Hasher hash.Hash64
// TagName is the struct tag to look at when hashing the structure.
// By default this is "hash".
TagName string
// ZeroNil is flag determining if nil pointer should be treated equal
// to a zero value of pointed type. By default this is false.
ZeroNil bool
}
// Hash returns the hash value of an arbitrary value.
//
// If opts is nil, then default options will be used. See HashOptions
// for the default values. The same *HashOptions value cannot be used
// concurrently. None of the values within a *HashOptions struct are
// safe to read/write while hashing is being done.
//
// Notes on the value:
//
// * Unexported fields on structs are ignored and do not affect the
// hash value.
//
// * Adding an exported field to a struct with the zero value will change
// the hash value.
//
// For structs, the hashing can be controlled using tags. For example:
//
// struct {
// Name string
// UUID string `hash:"ignore"`
// }
//
// The available tag values are:
//
// * "ignore" or "-" - The field will be ignored and not affect the hash code.
//
// * "set" - The field will be treated as a set, where ordering doesn't
// affect the hash code. This only works for slices.
//
// * "string" - The field will be hashed as a string, only works when the
// field implements fmt.Stringer
//
func Hash(v interface{}, opts *HashOptions) (uint64, error) {
// Create default options
if opts == nil {
opts = &HashOptions{}
}
if opts.Hasher == nil {
opts.Hasher = fnv.New64()
}
if opts.TagName == "" {
opts.TagName = "hash"
}
// Reset the hash
opts.Hasher.Reset()
// Create our walker and walk the structure
w := &walker{
h: opts.Hasher,
tag: opts.TagName,
zeronil: opts.ZeroNil,
}
return w.visit(reflect.ValueOf(v), nil)
}
type walker struct {
h hash.Hash64
tag string
zeronil bool
}
type visitOpts struct {
// Flags are a bitmask of flags to affect behavior of this visit
Flags visitFlag
// Information about the struct containing this field
Struct interface{}
StructField string
}
func (w *walker) visit(v reflect.Value, opts *visitOpts) (uint64, error) {
t := reflect.TypeOf(0)
// Loop since these can be wrapped in multiple layers of pointers
// and interfaces.
for {
// If we have an interface, dereference it. We have to do this up
// here because it might be a nil in there and the check below must
// catch that.
if v.Kind() == reflect.Interface {
v = v.Elem()
continue
}
if v.Kind() == reflect.Ptr {
if w.zeronil {
t = v.Type().Elem()
}
v = reflect.Indirect(v)
continue
}
break
}
// If it is nil, treat it like a zero.
if !v.IsValid() {
v = reflect.Zero(t)
}
// Binary writing can use raw ints, we have to convert to
// a sized-int, we'll choose the largest...
switch v.Kind() {
case reflect.Int:
v = reflect.ValueOf(int64(v.Int()))
case reflect.Uint:
v = reflect.ValueOf(uint64(v.Uint()))
case reflect.Bool:
var tmp int8
if v.Bool() {
tmp = 1
}
v = reflect.ValueOf(tmp)
}
k := v.Kind()
// We can shortcut numeric values by directly binary writing them
if k >= reflect.Int && k <= reflect.Complex64 {
// A direct hash calculation
w.h.Reset()
err := binary.Write(w.h, binary.LittleEndian, v.Interface())
return w.h.Sum64(), err
}
switch k {
case reflect.Array:
var h uint64
l := v.Len()
for i := 0; i < l; i++ {
current, err := w.visit(v.Index(i), nil)
if err != nil {
return 0, err
}
h = hashUpdateOrdered(w.h, h, current)
}
return h, nil
case reflect.Map:
var includeMap IncludableMap
if opts != nil && opts.Struct != nil {
if v, ok := opts.Struct.(IncludableMap); ok {
includeMap = v
}
}
// Build the hash for the map. We do this by XOR-ing all the key
// and value hashes. This makes it deterministic despite ordering.
var h uint64
for _, k := range v.MapKeys() {
v := v.MapIndex(k)
if includeMap != nil {
incl, err := includeMap.HashIncludeMap(
opts.StructField, k.Interface(), v.Interface())
if err != nil {
return 0, err
}
if !incl {
continue
}
}
kh, err := w.visit(k, nil)
if err != nil {
return 0, err
}
vh, err := w.visit(v, nil)
if err != nil {
return 0, err
}
fieldHash := hashUpdateOrdered(w.h, kh, vh)
h = hashUpdateUnordered(h, fieldHash)
}
return h, nil
case reflect.Struct:
parent := v.Interface()
var include Includable
if impl, ok := parent.(Includable); ok {
include = impl
}
t := v.Type()
h, err := w.visit(reflect.ValueOf(t.Name()), nil)
if err != nil {
return 0, err
}
l := v.NumField()
for i := 0; i < l; i++ {
if innerV := v.Field(i); v.CanSet() || t.Field(i).Name != "_" {
var f visitFlag
fieldType := t.Field(i)
if fieldType.PkgPath != "" {
// Unexported
continue
}
tag := fieldType.Tag.Get(w.tag)
if tag == "ignore" || tag == "-" {
// Ignore this field
continue
}
// if string is set, use the string value
if tag == "string" {
if impl, ok := innerV.Interface().(fmt.Stringer); ok {
innerV = reflect.ValueOf(impl.String())
} else {
return 0, &ErrNotStringer{
Field: v.Type().Field(i).Name,
}
}
}
// Check if we implement includable and check it
if include != nil {
incl, err := include.HashInclude(fieldType.Name, innerV)
if err != nil {
return 0, err
}
if !incl {
continue
}
}
switch tag {
case "set":
f |= visitFlagSet
}
kh, err := w.visit(reflect.ValueOf(fieldType.Name), nil)
if err != nil {
return 0, err
}
vh, err := w.visit(innerV, &visitOpts{
Flags: f,
Struct: parent,
StructField: fieldType.Name,
})
if err != nil {
return 0, err
}
fieldHash := hashUpdateOrdered(w.h, kh, vh)
h = hashUpdateUnordered(h, fieldHash)
}
}
return h, nil
case reflect.Slice:
// We have two behaviors here. If it isn't a set, then we just
// visit all the elements. If it is a set, then we do a deterministic
// hash code.
var h uint64
var set bool
if opts != nil {
set = (opts.Flags & visitFlagSet) != 0
}
l := v.Len()
for i := 0; i < l; i++ {
current, err := w.visit(v.Index(i), nil)
if err != nil {
return 0, err
}
if set {
h = hashUpdateUnordered(h, current)
} else {
h = hashUpdateOrdered(w.h, h, current)
}
}
return h, nil
case reflect.String:
// Directly hash
w.h.Reset()
_, err := w.h.Write([]byte(v.String()))
return w.h.Sum64(), err
default:
return 0, fmt.Errorf("unknown kind to hash: %s", k)
}
}
func hashUpdateOrdered(h hash.Hash64, a, b uint64) uint64 {
// For ordered updates, use a real hash function
h.Reset()
// We just panic if the binary writes fail because we are writing
// an int64 which should never be fail-able.
e1 := binary.Write(h, binary.LittleEndian, a)
e2 := binary.Write(h, binary.LittleEndian, b)
if e1 != nil {
panic(e1)
}
if e2 != nil {
panic(e2)
}
return h.Sum64()
}
func hashUpdateUnordered(a, b uint64) uint64 {
return a ^ b
}
// visitFlag is used as a bitmask for affecting visit behavior
type visitFlag uint
const (
visitFlagInvalid visitFlag = iota
visitFlagSet = iota << 1
)

View File

@ -1,15 +0,0 @@
package hashstructure
// Includable is an interface that can optionally be implemented by
// a struct. It will be called for each field in the struct to check whether
// it should be included in the hash.
type Includable interface {
HashInclude(field string, v interface{}) (bool, error)
}
// IncludableMap is an interface that can optionally be implemented by
// a struct. It will be called when a map-type field is found to ask the
// struct if the map item should be included in the hash.
type IncludableMap interface {
HashIncludeMap(field string, k, v interface{}) (bool, error)
}

12
vendor/modules.txt vendored
View File

@ -172,9 +172,6 @@ github.com/bgentry/go-netrc/netrc
# github.com/bgentry/speakeasy v0.1.0
## explicit
github.com/bgentry/speakeasy
# github.com/blang/semver v3.5.1+incompatible
## explicit
github.com/blang/semver
# github.com/bmatcuk/doublestar v1.1.5
## explicit
github.com/bmatcuk/doublestar
@ -399,12 +396,6 @@ github.com/hashicorp/hcl/v2/hclsyntax
github.com/hashicorp/hcl/v2/hcltest
github.com/hashicorp/hcl/v2/hclwrite
github.com/hashicorp/hcl/v2/json
# github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
## explicit
github.com/hashicorp/hil
github.com/hashicorp/hil/ast
github.com/hashicorp/hil/parser
github.com/hashicorp/hil/scanner
# github.com/hashicorp/memberlist v0.1.0
## explicit
# github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb
@ -510,9 +501,6 @@ github.com/mitchellh/go-wordwrap
# github.com/mitchellh/gox v1.0.1
## explicit
github.com/mitchellh/gox
# github.com/mitchellh/hashstructure v1.0.0
## explicit
github.com/mitchellh/hashstructure
# github.com/mitchellh/iochan v1.0.0
github.com/mitchellh/iochan
# github.com/mitchellh/mapstructure v1.1.2