command: Experimental "terraform test" command

This is just a prototype to gather some feedback in our ongoing research
on integration testing of Terraform modules. The hope is that by having a
command integrated into Terraform itself it'll be easier for interested
module authors to give it a try, and also easier for us to iterate quickly
based on feedback without having to coordinate across multiple codebases.

Everything about this is subject to change even in future patch releases.
Since it's a CLI command rather than a configuration language feature it's
not using the language experiments mechanism, but generates a warning
similar to the one language experiments generate in order to be clear that
backward compatibility is not guaranteed.
This commit is contained in:
Martin Atkins 2021-02-08 16:03:15 -08:00
parent 8330f8e991
commit 7f78342953
20 changed files with 1648 additions and 25 deletions

63
command/arguments/test.go Normal file
View File

@ -0,0 +1,63 @@
package arguments
import (
"flag"
"io/ioutil"
"github.com/hashicorp/terraform/tfdiags"
)
// Test represents the command line arguments for the "terraform test" command.
type Test struct {
Output TestOutput
}
// TestOutput represents a subset of the arguments for "terraform test"
// related to how it presents its results. That is, it's the arguments that
// are relevant to the command's view rather than its controller.
type TestOutput struct {
// If not an empty string, JUnitXMLFile gives a filename where JUnit-style
// XML test result output should be written, in addition to the normal
// output printed to the standard output and error streams.
// (The typical usage pattern for tools that can consume this file format
// is to configure them to look for a separate test result file on disk
// after running the tests.)
JUnitXMLFile string
}
// ParseTest interprets a slice of raw command line arguments into a
// Test value.
func ParseTest(args []string) (Test, tfdiags.Diagnostics) {
var ret Test
var diags tfdiags.Diagnostics
// NOTE: ParseTest should still return at least a partial
// Test even on error, containing enough information for the
// command to report error diagnostics in a suitable way.
f := flag.NewFlagSet("test", flag.ContinueOnError)
f.SetOutput(ioutil.Discard)
f.Usage = func() {}
f.StringVar(&ret.Output.JUnitXMLFile, "junit-xml", "", "Write a JUnit XML file describing the results")
err := f.Parse(args)
if err != nil {
diags = diags.Append(err)
return ret, diags
}
// We'll now discard all of the arguments that the flag package handled,
// and focus only on the positional arguments for the rest of the function.
args = f.Args()
if len(args) != 0 {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid command arguments",
"The test command doesn't expect any positional command-line arguments.",
))
return ret, diags
}
return ret, diags
}

View File

@ -0,0 +1,83 @@
package arguments
import (
"testing"
"github.com/apparentlymart/go-shquot/shquot"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform/tfdiags"
)
func TestParseTest(t *testing.T) {
tests := []struct {
Input []string
Want Test
WantError string
}{
{
nil,
Test{
Output: TestOutput{
JUnitXMLFile: "",
},
},
``,
},
{
[]string{"-invalid"},
Test{
Output: TestOutput{
JUnitXMLFile: "",
},
},
`flag provided but not defined: -invalid`,
},
{
[]string{"-junit-xml=result.xml"},
Test{
Output: TestOutput{
JUnitXMLFile: "result.xml",
},
},
``,
},
{
[]string{"baz"},
Test{
Output: TestOutput{
JUnitXMLFile: "",
},
},
`Invalid command arguments`,
},
}
baseCmdline := []string{"terraform", "test"}
for _, test := range tests {
name := shquot.POSIXShell(append(baseCmdline, test.Input...))
t.Run(name, func(t *testing.T) {
t.Log(name)
got, diags := ParseTest(test.Input)
if test.WantError != "" {
if len(diags) != 1 {
t.Fatalf("got %d diagnostics; want exactly 1\n%s", len(diags), diags.Err().Error())
}
if diags[0].Severity() != tfdiags.Error {
t.Fatalf("got a warning; want an error\n%s", diags.Err().Error())
}
if desc := diags[0].Description(); desc.Summary != test.WantError {
t.Fatalf("wrong error\ngot: %s\nwant: %s", desc.Summary, test.WantError)
}
} else {
if len(diags) != 0 {
t.Fatalf("got %d diagnostics; want none\n%s", len(diags), diags.Err().Error())
}
}
if diff := cmp.Diff(test.Want, got); diff != "" {
t.Errorf("wrong result\n%s", diff)
}
})
}
}

View File

@ -15,6 +15,7 @@ import (
terraformProvider "github.com/hashicorp/terraform/builtin/providers/terraform"
"github.com/hashicorp/terraform/internal/getproviders"
"github.com/hashicorp/terraform/internal/logging"
"github.com/hashicorp/terraform/internal/moduletest"
"github.com/hashicorp/terraform/internal/providercache"
tfplugin "github.com/hashicorp/terraform/plugin"
tfplugin6 "github.com/hashicorp/terraform/plugin6"
@ -326,6 +327,9 @@ func (m *Meta) internalProviders() map[string]providers.Factory {
"terraform": func() (providers.Interface, error) {
return terraformProvider.NewProvider(), nil
},
"test": func() (providers.Interface, error) {
return moduletest.NewProvider(), nil
},
}
}

707
command/test.go Normal file
View File

@ -0,0 +1,707 @@
package command
import (
"context"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
ctyjson "github.com/zclconf/go-cty/cty/json"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/command/arguments"
"github.com/hashicorp/terraform/command/format"
"github.com/hashicorp/terraform/command/views"
"github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/configs/configload"
"github.com/hashicorp/terraform/internal/depsfile"
"github.com/hashicorp/terraform/internal/initwd"
"github.com/hashicorp/terraform/internal/moduletest"
"github.com/hashicorp/terraform/internal/providercache"
"github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/providers"
"github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/terraform"
"github.com/hashicorp/terraform/tfdiags"
)
// TestCommand is the implementation of "terraform test".
type TestCommand struct {
Meta
}
func (c *TestCommand) Run(rawArgs []string) int {
// Parse and apply global view arguments
common, rawArgs := arguments.ParseView(rawArgs)
c.View.Configure(common)
args, diags := arguments.ParseTest(rawArgs)
view := views.NewTest(c.View, args.Output)
if diags.HasErrors() {
view.Diagnostics(diags)
return 1
}
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Warning,
`The "terraform test" command is experimental`,
"We'd like to invite adventurous module authors to write integration tests for their modules using this command, but all of the behaviors of this command are currently experimental and may change based on feedback.\n\nFor more information on the testing experiment, including ongoing research goals and avenues for feedback, see:\n https://www.terraform.io/docs/language/modules/testing-experiment.html",
))
ctx, cancel := c.InterruptibleContext()
defer cancel()
results, moreDiags := c.run(ctx, args)
diags = diags.Append(moreDiags)
initFailed := diags.HasErrors()
view.Diagnostics(diags)
diags = view.Results(results)
resultsFailed := diags.HasErrors()
view.Diagnostics(diags) // possible additional errors from saving the results
var testsFailed bool
for _, suite := range results {
for _, component := range suite.Components {
for _, assertion := range component.Assertions {
if !assertion.Outcome.SuiteCanPass() {
testsFailed = true
}
}
}
}
// Lots of things can possibly have failed
if initFailed || resultsFailed || testsFailed {
return 1
}
return 0
}
func (c *TestCommand) run(ctx context.Context, args arguments.Test) (results map[string]*moduletest.Suite, diags tfdiags.Diagnostics) {
suiteNames, err := c.collectSuiteNames()
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error while searching for test configurations",
fmt.Sprintf("While attempting to scan the 'tests' subdirectory for potential test configurations, Terraform encountered an error: %s.", err),
))
return nil, diags
}
ret := make(map[string]*moduletest.Suite, len(suiteNames))
for _, suiteName := range suiteNames {
if ctx.Err() != nil {
// If the context has already failed in some way then we'll
// halt early and report whatever's already happened.
break
}
suite, moreDiags := c.runSuite(ctx, suiteName)
diags = diags.Append(moreDiags)
ret[suiteName] = suite
}
return ret, diags
}
func (c *TestCommand) runSuite(ctx context.Context, suiteName string) (*moduletest.Suite, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
ret := moduletest.Suite{
Name: suiteName,
Components: map[string]*moduletest.Component{},
}
// In order to make this initial round of "terraform test" pretty self
// contained while it's experimental, it's largely just mimicking what
// would happen when running the main Terraform workflow commands, which
// comes at the expense of a few irritants that we'll hopefully resolve
// in future iterations as the design solidifies:
// - We need to install remote modules separately for each of the
// test suites, because we don't have any sense of a shared cache
// of modules that multiple configurations can refer to at once.
// - We _do_ have a sense of a cache of remote providers, but it's fixed
// at being specifically a two-level cache (global vs. directory-specific)
// and so we can't easily capture a third level of "all of the test suites
// for this module" that sits between the two. Consequently, we need to
// dynamically choose between creating a directory-specific "global"
// cache or using the user's existing global cache, to avoid any
// situation were we'd be re-downloading the same providers for every
// one of the test suites.
// - We need to do something a bit horrid in order to have our test
// provider instance persist between the plan and apply steps, because
// normally that is the exact opposite of what we want.
// The above notes are here mainly as an aid to someone who might be
// planning a subsequent phase of this R&D effort, to help distinguish
// between things we're doing here because they are valuable vs. things
// we're doing just to make it work without doing any disruptive
// refactoring.
suiteDirs, moreDiags := c.prepareSuiteDir(ctx, suiteName)
diags = diags.Append(moreDiags)
if diags.HasErrors() {
// Generate a special failure representing the test initialization
// having failed, since we therefore won'tbe able to run the actual
// tests defined inside.
ret.Components["(init)"] = &moduletest.Component{
Assertions: map[string]*moduletest.Assertion{
"(init)": {
Outcome: moduletest.Error,
Description: "terraform init",
Message: "failed to install test suite dependencies",
Diagnostics: diags,
},
},
}
return &ret, nil
}
// When we run the suite itself, we collect up diagnostics associated
// with individual components, so ret.Components may or may not contain
// failed/errored components after runTestSuite returns.
var finalState *states.State
ret.Components, finalState = c.runTestSuite(ctx, suiteDirs)
// Regardless of the success or failure of the test suite, if there are
// any objects left in the state then we'll generate a top-level error
// about each one to minimize the chance of the user failing to notice
// that there are leftover objects that might continue to cost money
// unless manually deleted.
for _, ms := range finalState.Modules {
for _, rs := range ms.Resources {
for instanceKey, is := range rs.Instances {
var objs []*states.ResourceInstanceObjectSrc
if is.Current != nil {
objs = append(objs, is.Current)
}
for _, obj := range is.Deposed {
objs = append(objs, obj)
}
for _, obj := range objs {
// Unfortunately we don't have provider schemas out here
// and so we're limited in what we can achieve with these
// ResourceInstanceObjectSrc values, but we can try some
// heuristicy things to try to give some useful information
// in common cases.
var k, v string
if ty, err := ctyjson.ImpliedType(obj.AttrsJSON); err == nil {
if approxV, err := ctyjson.Unmarshal(obj.AttrsJSON, ty); err == nil {
k, v = format.ObjectValueIDOrName(approxV)
}
}
var detail string
if k != "" {
// We can be more specific if we were able to infer
// an identifying attribute for this object.
detail = fmt.Sprintf(
"Due to errors during destroy, test suite %q has left behind an object for %s, with the following identity:\n %s = %q\n\nYou will need to delete this object manually in the remote system, or else it may have an ongoing cost.",
suiteName,
rs.Addr.Instance(instanceKey),
k, v,
)
} else {
// If our heuristics for finding a suitable identifier
// failed then unfortunately we must be more vague.
// (We can't just print the entire object, because it
// might be overly large and it might contain sensitive
// values.)
detail = fmt.Sprintf(
"Due to errors during destroy, test suite %q has left behind an object for %s. You will need to delete this object manually in the remote system, or else it may have an ongoing cost.",
suiteName,
rs.Addr.Instance(instanceKey),
)
}
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to clean up after tests",
detail,
))
}
}
}
}
return &ret, diags
}
func (c *TestCommand) prepareSuiteDir(ctx context.Context, suiteName string) (testCommandSuiteDirs, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
configDir := filepath.Join("tests", suiteName)
log.Printf("[TRACE] terraform test: Prepare directory for suite %q in %s", suiteName, configDir)
suiteDirs := testCommandSuiteDirs{
SuiteName: suiteName,
ConfigDir: configDir,
}
// Before we can run a test suite we need to make sure that we have all of
// its dependencies available, so the following is essentially an
// abbreviated form of what happens during "terraform init", with some
// extra trickery in places.
// First, module installation. This will include linking in the module
// under test, but also includes grabbing the dependencies of that module
// if it has any.
suiteDirs.ModulesDir = filepath.Join(configDir, ".terraform", "modules")
os.MkdirAll(suiteDirs.ModulesDir, 0755) // if this fails then we'll ignore it and let InstallModules below fail instead
reg := c.registryClient()
moduleInst := initwd.NewModuleInstaller(suiteDirs.ModulesDir, reg)
_, moreDiags := moduleInst.InstallModules(configDir, true, nil)
diags = diags.Append(moreDiags)
if diags.HasErrors() {
return suiteDirs, diags
}
// The installer puts the files in a suitable place on disk, but we
// still need to actually load the configuration. We need to do this
// with a separate config loader because the Meta.configLoader instance
// is intended for interacting with the current working directory, not
// with the test suite subdirectories.
loader, err := configload.NewLoader(&configload.Config{
ModulesDir: suiteDirs.ModulesDir,
Services: c.Services,
})
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to create test configuration loader",
fmt.Sprintf("Failed to prepare loader for test configuration %s: %s.", configDir, err),
))
return suiteDirs, diags
}
cfg, hclDiags := loader.LoadConfig(configDir)
diags = diags.Append(hclDiags)
if diags.HasErrors() {
return suiteDirs, diags
}
suiteDirs.Config = cfg
// With the full configuration tree available, we can now install
// the necessary providers. We'll use a separate local cache directory
// here, because the test configuration might have additional requirements
// compared to the module itself.
suiteDirs.ProvidersDir = filepath.Join(configDir, ".terraform", "providers")
os.MkdirAll(suiteDirs.ProvidersDir, 0755) // if this fails then we'll ignore it and operations below fail instead
localCacheDir := providercache.NewDir(suiteDirs.ProvidersDir)
providerInst := c.providerInstaller().Clone(localCacheDir)
if !providerInst.HasGlobalCacheDir() {
// If the user already configured a global cache directory then we'll
// just use it for caching the test providers too, because then we
// can potentially reuse cache entries they already have. However,
// if they didn't configure one then we'll still establish one locally
// in the working directory, which we'll then share across all tests
// to avoid downloading the same providers repeatedly.
cachePath := filepath.Join(c.DataDir(), "testing-providers") // note this is _not_ under the suite dir
err := os.MkdirAll(cachePath, 0755)
// If we were unable to create the directory for any reason then we'll
// just proceed without a cache, at the expense of repeated downloads.
// (With that said, later installing might end up failing for the
// same reason anyway...)
if err == nil || os.IsExist(err) {
cacheDir := providercache.NewDir(cachePath)
providerInst.SetGlobalCacheDir(cacheDir)
}
}
reqs, hclDiags := cfg.ProviderRequirements()
diags = diags.Append(hclDiags)
if diags.HasErrors() {
return suiteDirs, diags
}
// For test suites we only retain the "locks" in memory for the duration
// for one run, just to make sure that we use the same providers when we
// eventually run the test suite.
locks := depsfile.NewLocks()
evts := &providercache.InstallerEvents{
QueryPackagesFailure: func(provider addrs.Provider, err error) {
if err != nil && provider.IsDefault() && provider.Type == "test" {
// This is some additional context for the failure error
// we'll generate afterwards. Not the most ideal UX but
// good enough for this prototype implementation, to help
// hint about the special builtin provider we use here.
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Warning,
"Probably-unintended reference to \"hashicorp/test\" provider",
"For the purposes of this experimental implementation of module test suites, you must use the built-in test provider terraform.io/builtin/test, which requires an explicit required_providers declaration.",
))
}
},
}
ctx = evts.OnContext(ctx)
locks, err = providerInst.EnsureProviderVersions(ctx, locks, reqs, providercache.InstallUpgrades)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to install required providers",
fmt.Sprintf("Couldn't install necessary providers for test configuration %s: %s.", configDir, err),
))
return suiteDirs, diags
}
suiteDirs.ProviderLocks = locks
suiteDirs.ProviderCache = localCacheDir
return suiteDirs, diags
}
func (c *TestCommand) runTestSuite(ctx context.Context, suiteDirs testCommandSuiteDirs) (map[string]*moduletest.Component, *states.State) {
log.Printf("[TRACE] terraform test: Run test suite %q", suiteDirs.SuiteName)
ret := make(map[string]*moduletest.Component)
// To collect test results we'll use an instance of the special "test"
// provider, which records the intention to make a test assertion during
// planning and then hopefully updates that to an actual assertion result
// during apply, unless an apply error causes the graph walk to exit early.
// For this to work correctly, we must ensure we're using the same provider
// instance for both plan and apply.
testProvider := moduletest.NewProvider()
// synthError is a helper to return early with a synthetic failing
// component, for problems that prevent us from even discovering what an
// appropriate component and assertion name might be.
state := states.NewState()
synthError := func(name string, desc string, msg string, diags tfdiags.Diagnostics) (map[string]*moduletest.Component, *states.State) {
key := "(" + name + ")" // parens ensure this can't conflict with an actual component/assertion key
ret[key] = &moduletest.Component{
Assertions: map[string]*moduletest.Assertion{
key: {
Outcome: moduletest.Error,
Description: desc,
Message: msg,
Diagnostics: diags,
},
},
}
return ret, state
}
// NOTE: This function intentionally deviates from the usual pattern of
// gradually appending more diagnostics to the same diags, because
// here we're associating each set of diagnostics with the specific
// operation it belongs to.
providerFactories, diags := c.testSuiteProviders(suiteDirs, testProvider)
if diags.HasErrors() {
// It should be unusual to get in here, because testSuiteProviders
// should rely only on things guaranteed by prepareSuiteDir, but
// since we're doing external I/O here there is always the risk that
// the filesystem changes or fails between setting up and using the
// providers.
return synthError(
"init",
"terraform init",
"failed to resolve the required providers",
diags,
)
}
plan, diags := c.testSuitePlan(ctx, suiteDirs, providerFactories)
if diags.HasErrors() {
// It should be unusual to get in here, because testSuitePlan
// should rely only on things guaranteed by prepareSuiteDir, but
// since we're doing external I/O here there is always the risk that
// the filesystem changes or fails between setting up and using the
// providers.
return synthError(
"plan",
"terraform plan",
"failed to create a plan",
diags,
)
}
// Now we'll apply the plan. Once we try to apply, we might've created
// real remote objects, and so we must try to run destroy even if the
// apply returns errors, and we must return whatever state we end up
// with so the caller can generate additional loud errors if anything
// is left in it.
state, diags = c.testSuiteApply(ctx, plan, suiteDirs, providerFactories)
if diags.HasErrors() {
// We don't return here, unlike the others above, because we want to
// continue to the destroy below even if there are apply errors.
synthError(
"apply",
"terraform apply",
"failed to apply the created plan",
diags,
)
}
// By the time we get here, the test provider will have gathered up all
// of the planned assertions and the final results for any assertions that
// were not blocked by an error. This also resets the provider so that
// the destroy operation below won't get tripped up on stale results.
ret = testProvider.Reset()
state, diags = c.testSuiteDestroy(ctx, state, suiteDirs, providerFactories)
if diags.HasErrors() {
synthError(
"destroy",
"terraform destroy",
"failed to destroy objects created during test (NOTE: leftover remote objects may still exist)",
diags,
)
}
return ret, state
}
func (c *TestCommand) testSuiteProviders(suiteDirs testCommandSuiteDirs, testProvider *moduletest.Provider) (map[addrs.Provider]providers.Factory, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
ret := make(map[addrs.Provider]providers.Factory)
// We can safely use the internal providers returned by Meta here because
// the built-in provider versions can never vary based on the configuration
// and thus we don't need to worry about potential version differences
// between main module and test suite modules.
for name, factory := range c.internalProviders() {
ret[addrs.NewBuiltInProvider(name)] = factory
}
// For the remaining non-builtin providers, we'll just take whatever we
// recorded earlier in the in-memory-only "lock file". All of these should
// typically still be available because we would've only just installed
// them, but this could fail if e.g. the filesystem has been somehow
// damaged in the meantime.
for provider, lock := range suiteDirs.ProviderLocks.AllProviders() {
version := lock.Version()
cached := suiteDirs.ProviderCache.ProviderVersion(provider, version)
if cached == nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Required provider not found",
fmt.Sprintf("Although installation previously succeeded for %s v%s, it no longer seems to be present in the cache directory.", provider.ForDisplay(), version.String()),
))
continue // potentially collect up multiple errors
}
// NOTE: We don't consider the checksums for test suite dependencies,
// because we're creating a fresh "lock file" each time we run anyway
// and so they wouldn't actually guarantee anything useful.
ret[provider] = providerFactory(cached)
}
// We'll replace the test provider instance with the one our caller
// provided, so it'll be able to interrogate the test results directly.
ret[addrs.NewBuiltInProvider("test")] = func() (providers.Interface, error) {
return testProvider, nil
}
return ret, diags
}
func (c *TestCommand) testSuiteContext(suiteDirs testCommandSuiteDirs, providerFactories map[addrs.Provider]providers.Factory, state *states.State, plan *plans.Plan, destroy bool) (*terraform.Context, tfdiags.Diagnostics) {
var changes *plans.Changes
if plan != nil {
changes = plan.Changes
}
return terraform.NewContext(&terraform.ContextOpts{
Config: suiteDirs.Config,
Providers: providerFactories,
// We just use the provisioners from the main Meta here, because
// unlike providers provisioner plugins are not automatically
// installable anyway, and so we'll need to hunt for them in the same
// legacy way that normal Terraform operations do.
Provisioners: c.provisionerFactories(),
Meta: &terraform.ContextMeta{
Env: "test_" + suiteDirs.SuiteName,
},
State: state,
Changes: changes,
Destroy: destroy,
})
}
func (c *TestCommand) testSuitePlan(ctx context.Context, suiteDirs testCommandSuiteDirs, providerFactories map[addrs.Provider]providers.Factory) (*plans.Plan, tfdiags.Diagnostics) {
log.Printf("[TRACE] terraform test: create plan for suite %q", suiteDirs.SuiteName)
tfCtx, diags := c.testSuiteContext(suiteDirs, providerFactories, nil, nil, false)
if diags.HasErrors() {
return nil, diags
}
// We'll also validate as part of planning, since the "terraform plan"
// command would typically do that and so inconsistencies we detect only
// during planning typically produce error messages saying that they are
// a bug in Terraform.
// (It's safe to use the same context for both validate and plan, because
// validate doesn't generate any new sticky content inside the context
// as plan and apply both do.)
moreDiags := tfCtx.Validate()
diags = diags.Append(moreDiags)
if diags.HasErrors() {
return nil, diags
}
plan, moreDiags := tfCtx.Plan()
diags = diags.Append(moreDiags)
return plan, diags
}
func (c *TestCommand) testSuiteApply(ctx context.Context, plan *plans.Plan, suiteDirs testCommandSuiteDirs, providerFactories map[addrs.Provider]providers.Factory) (*states.State, tfdiags.Diagnostics) {
log.Printf("[TRACE] terraform test: apply plan for suite %q", suiteDirs.SuiteName)
tfCtx, diags := c.testSuiteContext(suiteDirs, providerFactories, nil, plan, false)
if diags.HasErrors() {
// To make things easier on the caller, we'll return a valid empty
// state even in this case.
return states.NewState(), diags
}
state, moreDiags := tfCtx.Apply()
diags = diags.Append(moreDiags)
return state, diags
}
func (c *TestCommand) testSuiteDestroy(ctx context.Context, state *states.State, suiteDirs testCommandSuiteDirs, providerFactories map[addrs.Provider]providers.Factory) (*states.State, tfdiags.Diagnostics) {
log.Printf("[TRACE] terraform test: plan to destroy any existing objects for suite %q", suiteDirs.SuiteName)
tfCtx, diags := c.testSuiteContext(suiteDirs, providerFactories, state, nil, true)
if diags.HasErrors() {
return state, diags
}
plan, moreDiags := tfCtx.Plan()
diags = diags.Append(moreDiags)
if diags.HasErrors() {
return state, diags
}
log.Printf("[TRACE] terraform test: apply the plan to destroy any existing objects for suite %q", suiteDirs.SuiteName)
tfCtx, moreDiags = c.testSuiteContext(suiteDirs, providerFactories, state, plan, true)
diags = diags.Append(moreDiags)
if diags.HasErrors() {
return state, diags
}
state, moreDiags = tfCtx.Apply()
diags = diags.Append(moreDiags)
return state, diags
}
func (c *TestCommand) collectSuiteNames() ([]string, error) {
items, err := ioutil.ReadDir("tests")
if err != nil {
if os.IsNotExist(err) {
return nil, nil
}
return nil, err
}
ret := make([]string, 0, len(items))
for _, item := range items {
if !item.IsDir() {
continue
}
name := item.Name()
suitePath := filepath.Join("tests", name)
tfFiles, err := filepath.Glob(filepath.Join(suitePath, "*.tf"))
if err != nil {
// We'll just ignore it and treat it like a dir with no .tf files
tfFiles = nil
}
tfJSONFiles, err := filepath.Glob(filepath.Join(suitePath, "*.tf.json"))
if err != nil {
// We'll just ignore it and treat it like a dir with no .tf.json files
tfJSONFiles = nil
}
if (len(tfFiles) + len(tfJSONFiles)) == 0 {
// Not a test suite, then.
continue
}
ret = append(ret, name)
}
return ret, nil
}
func (c *TestCommand) Help() string {
helpText := `
Usage: terraform test [options]
This is an experimental command to help with automated integration
testing of shared modules. The usage and behavior of this command is
likely to change in breaking ways in subsequent releases, as we
are currently using this command primarily for research purposes.
In its current experimental form, "test" will look under the current
working directory for a subdirectory called "tests", and then within
that directory search for one or more subdirectories that contain
".tf" or ".tf.json" files. For any that it finds, it will perform
Terraform operations similar to the following sequence of commands
in each of those directories:
terraform validate
terraform apply
terraform destroy
The test configurations should not declare any input variables and
should at least contain a call to the module being tested, which
will always be available at the path ../.. due to the expected
filesystem layout.
The tests are considered to be successful if all of the above steps
succeed.
Test configurations may optionally include uses of the special
built-in test provider terraform.io/builtin/test, which allows
writing explicit test assertions which must also all pass in order
for the test run to be considered successful.
This initial implementation is intended as a minimally-viable
product to use for further research and experimentation, and in
particular it currently lacks the following capabilities that we
expect to consider in later iterations, based on feedback:
- Testing of subsequent updates to existing infrastructure,
where currently it only supports initial creation and
then destruction.
- Testing top-level modules that are intended to be used for
"real" environments, which typically have hard-coded values
that don't permit creating a separate "copy" for testing.
- Some sort of support for unit test runs that don't interact
with remote systems at all, e.g. for use in checking pull
requests from untrusted contributors.
In the meantime, we'd like to hear feedback from module authors
who have tried writing some experimental tests for their modules
about what sorts of tests you were able to write, what sorts of
tests you weren't able to write, and any tests that you were
able to write but that were difficult to model in some way.
Options:
-compact-warnings Use a more compact representation for warnings, if
this command produces only warnings and no errors.
-junit-xml=FILE In addition to the usual output, also write test
results to the given file path in JUnit XML format.
This format is commonly supported by CI systems, and
they typically expect to be given a filename to search
for in the test workspace after the test run finishes.
-no-color Don't include virtual terminal formatting sequences in
the output.
`
return strings.TrimSpace(helpText)
}
func (c *TestCommand) Synopsis() string {
return "Experimental support for module integration testing"
}
type testCommandSuiteDirs struct {
SuiteName string
ConfigDir string
ModulesDir string
ProvidersDir string
Config *configs.Config
ProviderCache *providercache.Dir
ProviderLocks *depsfile.Locks
}

166
command/test_test.go Normal file
View File

@ -0,0 +1,166 @@
package command
import (
"bytes"
"io/ioutil"
"os"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform/command/views"
"github.com/hashicorp/terraform/internal/terminal"
)
// These are the main tests for the "terraform test" command.
func TestTest(t *testing.T) {
t.Run("passes", func(t *testing.T) {
td := tempDir(t)
testCopyDir(t, testFixturePath("test-passes"), td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
streams, close := terminal.StreamsForTesting(t)
cmd := &TestCommand{
Meta: Meta{
Streams: streams,
View: views.NewView(streams),
},
}
exitStatus := cmd.Run([]string{"-junit-xml=junit.xml", "-no-color"})
outp := close(t)
if got, want := exitStatus, 0; got != want {
t.Fatalf("wrong exit status %d; want %d\nstderr:\n%s", got, want, outp.Stderr())
}
gotStdout := strings.TrimSpace(outp.Stdout())
wantStdout := strings.TrimSpace(`
Warning: The "terraform test" command is experimental
We'd like to invite adventurous module authors to write integration tests for
their modules using this command, but all of the behaviors of this command
are currently experimental and may change based on feedback.
For more information on the testing experiment, including ongoing research
goals and avenues for feedback, see:
https://www.terraform.io/docs/language/modules/testing-experiment.html
`)
if diff := cmp.Diff(wantStdout, gotStdout); diff != "" {
t.Errorf("wrong stdout\n%s", diff)
}
gotStderr := strings.TrimSpace(outp.Stderr())
wantStderr := strings.TrimSpace(`
Success! All of the test assertions passed.
`)
if diff := cmp.Diff(wantStderr, gotStderr); diff != "" {
t.Errorf("wrong stderr\n%s", diff)
}
gotXMLSrc, err := ioutil.ReadFile("junit.xml")
if err != nil {
t.Fatal(err)
}
gotXML := string(bytes.TrimSpace(gotXMLSrc))
wantXML := strings.TrimSpace(`
<testsuites>
<errors>0</errors>
<failures>0</failures>
<tests>1</tests>
<testsuite>
<name>hello</name>
<tests>1</tests>
<skipped>0</skipped>
<errors>0</errors>
<failures>0</failures>
<testcase>
<name>output</name>
<classname>foo</classname>
</testcase>
</testsuite>
</testsuites>
`)
if diff := cmp.Diff(wantXML, gotXML); diff != "" {
t.Errorf("wrong JUnit XML\n%s", diff)
}
})
t.Run("fails", func(t *testing.T) {
td := tempDir(t)
testCopyDir(t, testFixturePath("test-fails"), td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
streams, close := terminal.StreamsForTesting(t)
cmd := &TestCommand{
Meta: Meta{
Streams: streams,
View: views.NewView(streams),
},
}
exitStatus := cmd.Run([]string{"-junit-xml=junit.xml", "-no-color"})
outp := close(t)
if got, want := exitStatus, 1; got != want {
t.Fatalf("wrong exit status %d; want %d\nstderr:\n%s", got, want, outp.Stderr())
}
gotStdout := strings.TrimSpace(outp.Stdout())
wantStdout := strings.TrimSpace(`
Warning: The "terraform test" command is experimental
We'd like to invite adventurous module authors to write integration tests for
their modules using this command, but all of the behaviors of this command
are currently experimental and may change based on feedback.
For more information on the testing experiment, including ongoing research
goals and avenues for feedback, see:
https://www.terraform.io/docs/language/modules/testing-experiment.html
`)
if diff := cmp.Diff(wantStdout, gotStdout); diff != "" {
t.Errorf("wrong stdout\n%s", diff)
}
gotStderr := strings.TrimSpace(outp.Stderr())
wantStderr := strings.TrimSpace(`
Failed: hello.foo.output (output "foo" value)
wrong value
got: "foo value boop"
want: "foo not boop"
`)
if diff := cmp.Diff(wantStderr, gotStderr); diff != "" {
t.Errorf("wrong stderr\n%s", diff)
}
gotXMLSrc, err := ioutil.ReadFile("junit.xml")
if err != nil {
t.Fatal(err)
}
gotXML := string(bytes.TrimSpace(gotXMLSrc))
wantXML := strings.TrimSpace(`
<testsuites>
<errors>0</errors>
<failures>1</failures>
<tests>1</tests>
<testsuite>
<name>hello</name>
<tests>1</tests>
<skipped>0</skipped>
<errors>0</errors>
<failures>1</failures>
<testcase>
<name>output</name>
<classname>foo</classname>
<failure>
<message>wrong value&#xA; got: &#34;foo value boop&#34;&#xA; want: &#34;foo not boop&#34;&#xA;</message>
</failure>
</testcase>
</testsuite>
</testsuites>
`)
if diff := cmp.Diff(wantXML, gotXML); diff != "" {
t.Errorf("wrong JUnit XML\n%s", diff)
}
})
}

View File

@ -0,0 +1,7 @@
variable "input" {
type = string
}
output "foo" {
value = "foo value ${var.input}"
}

View File

@ -0,0 +1,23 @@
terraform {
required_providers {
test = {
source = "terraform.io/builtin/test"
}
}
}
module "main" {
source = "../.."
input = "boop"
}
resource "test_assertions" "foo" {
component = "foo"
equal "output" {
description = "output \"foo\" value"
got = module.main.foo
want = "foo not boop"
}
}

View File

@ -0,0 +1,7 @@
variable "input" {
type = string
}
output "foo" {
value = "foo value ${var.input}"
}

View File

@ -0,0 +1,23 @@
terraform {
required_providers {
test = {
source = "terraform.io/builtin/test"
}
}
}
module "main" {
source = "../.."
input = "boop"
}
resource "test_assertions" "foo" {
component = "foo"
equal "output" {
description = "output \"foo\" value"
got = module.main.foo
want = "foo value boop"
}
}

373
command/views/test.go Normal file
View File

@ -0,0 +1,373 @@
package views
import (
"encoding/xml"
"fmt"
"io/ioutil"
"sort"
"strings"
"github.com/hashicorp/terraform/command/arguments"
"github.com/hashicorp/terraform/command/format"
"github.com/hashicorp/terraform/internal/moduletest"
"github.com/hashicorp/terraform/internal/terminal"
"github.com/hashicorp/terraform/tfdiags"
"github.com/mitchellh/colorstring"
)
// Test is the view interface for the "terraform test" command.
type Test interface {
// Results presents the given test results.
Results(map[string]*moduletest.Suite) tfdiags.Diagnostics
// Diagnostics is for reporting warnings or errors that occurred with the
// mechanics of running tests. For this command in particular, some
// errors are considered to be test failures rather than mechanism failures,
// and so those will be reported via Results rather than via Diagnostics.
Diagnostics(tfdiags.Diagnostics)
}
// NewTest returns an implementation of Test configured to respect the
// settings described in the given arguments.
func NewTest(base *View, args arguments.TestOutput) Test {
return &testHuman{
streams: base.streams,
showDiagnostics: base.Diagnostics,
colorize: base.colorize,
junitXMLFile: args.JUnitXMLFile,
}
}
type testHuman struct {
// This is the subset of functionality we need from the base view.
streams *terminal.Streams
showDiagnostics func(diags tfdiags.Diagnostics)
colorize *colorstring.Colorize
// If junitXMLFile is not empty then results will be written to
// the given file path in addition to the usual output.
junitXMLFile string
}
func (v *testHuman) Results(results map[string]*moduletest.Suite) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
// FIXME: Due to how this prototype command evolved concurrently with
// establishing the idea of command views, the handling of JUnit output
// as part of the "human" view rather than as a separate view in its
// own right is a little odd and awkward. We should refactor this
// prior to making "terraform test" a real supported command to make
// it be structured more like the other commands that use the views
// package.
v.humanResults(results)
if v.junitXMLFile != "" {
moreDiags := v.junitXMLResults(results, v.junitXMLFile)
diags = diags.Append(moreDiags)
}
return diags
}
func (v *testHuman) Diagnostics(diags tfdiags.Diagnostics) {
if len(diags) == 0 {
return
}
v.showDiagnostics(diags)
}
func (v *testHuman) humanResults(results map[string]*moduletest.Suite) {
failCount := 0
width := v.streams.Stderr.Columns()
suiteNames := make([]string, 0, len(results))
for suiteName := range results {
suiteNames = append(suiteNames, suiteName)
}
sort.Strings(suiteNames)
for _, suiteName := range suiteNames {
suite := results[suiteName]
componentNames := make([]string, 0, len(suite.Components))
for componentName := range suite.Components {
componentNames = append(componentNames, componentName)
}
for _, componentName := range componentNames {
component := suite.Components[componentName]
assertionNames := make([]string, 0, len(component.Assertions))
for assertionName := range component.Assertions {
assertionNames = append(assertionNames, assertionName)
}
sort.Strings(assertionNames)
for _, assertionName := range assertionNames {
assertion := component.Assertions[assertionName]
fullName := fmt.Sprintf("%s.%s.%s", suiteName, componentName, assertionName)
if strings.HasPrefix(componentName, "(") {
// parenthesis-prefixed components are placeholders that
// the test harness generates to represent problems that
// prevented checking any assertions at all, so we'll
// just hide them and show the suite name.
fullName = suiteName
}
headingExtra := fmt.Sprintf("%s (%s)", fullName, assertion.Description)
switch assertion.Outcome {
case moduletest.Failed:
// Failed means that the assertion was successfully
// excecuted but that the assertion condition didn't hold.
v.eprintRuleHeading("yellow", "Failed", headingExtra)
case moduletest.Error:
// Error means that the system encountered an unexpected
// error when trying to evaluate the assertion.
v.eprintRuleHeading("red", "Error", headingExtra)
default:
// We don't do anything for moduletest.Passed or
// moduletest.Skipped. Perhaps in future we'll offer a
// -verbose option to include information about those.
continue
}
failCount++
if len(assertion.Message) > 0 {
dispMsg := format.WordWrap(assertion.Message, width)
v.streams.Eprintln(dispMsg)
}
if len(assertion.Diagnostics) > 0 {
// We'll do our own writing of the diagnostics in this
// case, rather than using v.Diagnostics, because we
// specifically want all of these diagnostics to go to
// Stderr along with all of the other output we've
// generated.
for _, diag := range assertion.Diagnostics {
diagStr := format.Diagnostic(diag, nil, v.colorize, width)
v.streams.Eprint(diagStr)
}
}
}
}
}
if failCount > 0 {
// If we've printed at least one failure then we'll have printed at
// least one horizontal rule across the terminal, and so we'll balance
// that with another horizontal rule.
if width > 1 {
rule := strings.Repeat("─", width-1)
v.streams.Eprintln(v.colorize.Color("[dark_gray]" + rule))
}
}
if failCount == 0 {
if len(results) > 0 {
// This is not actually an error, but it's convenient if all of our
// result output goes to the same stream for when this is running in
// automation that might be gathering this output via a pipe.
v.streams.Eprint(v.colorize.Color("[bold][green]Success![reset] All of the test assertions passed.\n\n"))
} else {
v.streams.Eprint(v.colorize.Color("[bold][yellow]No tests defined.[reset] This module doesn't have any test suites to run.\n\n"))
}
}
// Try to flush any buffering that might be happening. (This isn't always
// successful, depending on what sort of fd Stderr is connected to.)
v.streams.Stderr.File.Sync()
}
func (v *testHuman) junitXMLResults(results map[string]*moduletest.Suite, filename string) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
// "JUnit XML" is a file format that has become a de-facto standard for
// test reporting tools but that is not formally specified anywhere, and
// so each producer and consumer implementation unfortunately tends to
// differ in certain ways from others.
// With that in mind, this is a best effort sort of thing aimed at being
// broadly compatible with various consumers, but it's likely that
// some consumers will present these results better than others.
// This implementation is based mainly on the pseudo-specification of the
// format curated here, based on the Jenkins parser implementation:
// https://llg.cubic.org/docs/junit/
// An "Outcome" represents one of the various XML elements allowed inside
// a testcase element to indicate the test outcome.
type Outcome struct {
Message string `xml:"message,omitempty"`
}
// TestCase represents an individual test case as part of a suite. Note
// that a JUnit XML incorporates both the "component" and "assertion"
// levels of our model: we pretend that component is a class name and
// assertion is a method name in order to match with the Java-flavored
// expectations of JUnit XML, which are hopefully close enough to get
// a test result rendering that's useful to humans.
type TestCase struct {
AssertionName string `xml:"name"`
ComponentName string `xml:"classname"`
// These fields represent the different outcomes of a TestCase. Only one
// of these should be populated in each TestCase; this awkward
// structure is just to make this play nicely with encoding/xml's
// expecatations.
Skipped *Outcome `xml:"skipped,omitempty"`
Error *Outcome `xml:"error,omitempty"`
Failure *Outcome `xml:"failure,omitempty"`
Stderr string `xml:"system-out,omitempty"`
}
// TestSuite represents an individual test suite, of potentially many
// in a JUnit XML document.
type TestSuite struct {
Name string `xml:"name"`
TotalCount int `xml:"tests"`
SkippedCount int `xml:"skipped"`
ErrorCount int `xml:"errors"`
FailureCount int `xml:"failures"`
Cases []*TestCase `xml:"testcase"`
}
// TestSuites represents the root element of the XML document.
type TestSuites struct {
XMLName struct{} `xml:"testsuites"`
ErrorCount int `xml:"errors"`
FailureCount int `xml:"failures"`
TotalCount int `xml:"tests"`
Suites []*TestSuite `xml:"testsuite"`
}
xmlSuites := TestSuites{}
suiteNames := make([]string, 0, len(results))
for suiteName := range results {
suiteNames = append(suiteNames, suiteName)
}
sort.Strings(suiteNames)
for _, suiteName := range suiteNames {
suite := results[suiteName]
xmlSuite := &TestSuite{
Name: suiteName,
}
xmlSuites.Suites = append(xmlSuites.Suites, xmlSuite)
componentNames := make([]string, 0, len(suite.Components))
for componentName := range suite.Components {
componentNames = append(componentNames, componentName)
}
for _, componentName := range componentNames {
component := suite.Components[componentName]
assertionNames := make([]string, 0, len(component.Assertions))
for assertionName := range component.Assertions {
assertionNames = append(assertionNames, assertionName)
}
sort.Strings(assertionNames)
for _, assertionName := range assertionNames {
assertion := component.Assertions[assertionName]
xmlSuites.TotalCount++
xmlSuite.TotalCount++
xmlCase := &TestCase{
ComponentName: componentName,
AssertionName: assertionName,
}
xmlSuite.Cases = append(xmlSuite.Cases, xmlCase)
switch assertion.Outcome {
case moduletest.Pending:
// We represent "pending" cases -- cases blocked by
// upstream errors -- as if they were "skipped" in JUnit
// terms, because we didn't actually check them and so
// can't say whether they succeeded or not.
xmlSuite.SkippedCount++
xmlCase.Skipped = &Outcome{
Message: assertion.Message,
}
case moduletest.Failed:
xmlSuites.FailureCount++
xmlSuite.FailureCount++
xmlCase.Failure = &Outcome{
Message: assertion.Message,
}
case moduletest.Error:
xmlSuites.ErrorCount++
xmlSuite.ErrorCount++
xmlCase.Error = &Outcome{
Message: assertion.Message,
}
// We'll also include the diagnostics in the "stderr"
// portion of the output, so they'll hopefully be visible
// in a test log viewer in JUnit-XML-Consuming CI systems.
var buf strings.Builder
for _, diag := range assertion.Diagnostics {
diagStr := format.DiagnosticPlain(diag, nil, 68)
buf.WriteString(diagStr)
}
xmlCase.Stderr = buf.String()
}
}
}
}
xmlOut, err := xml.MarshalIndent(&xmlSuites, "", " ")
if err != nil {
// If marshalling fails then that's a bug in the code above,
// because we should always be producing a value that is
// accepted by encoding/xml.
panic(fmt.Sprintf("invalid values to marshal as JUnit XML: %s", err))
}
err = ioutil.WriteFile(filename, xmlOut, 0644)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to write JUnit XML file",
fmt.Sprintf(
"Could not create %s to record the test results in JUnit XML format: %s.",
filename,
err,
),
))
}
return diags
}
func (v *testHuman) eprintRuleHeading(color, prefix, extra string) {
const lineCell string = "─"
textLen := len(prefix) + len(": ") + len(extra)
spacingLen := 2
leftLineLen := 3
rightLineLen := 0
width := v.streams.Stderr.Columns()
if (textLen + spacingLen + leftLineLen) < (width - 1) {
// (we allow an extra column at the end because some terminals can't
// print in the final column without wrapping to the next line)
rightLineLen = width - (textLen + spacingLen + leftLineLen) - 1
}
colorCode := "[" + color + "]"
// We'll prepare what we're going to print in memory first, so that we can
// send it all to stderr in one write in case other programs are also
// concurrently trying to write to the terminal for some reason.
var buf strings.Builder
buf.WriteString(v.colorize.Color(colorCode + strings.Repeat(lineCell, leftLineLen)))
buf.WriteByte(' ')
buf.WriteString(v.colorize.Color("[bold]" + colorCode + prefix + ":"))
buf.WriteByte(' ')
buf.WriteString(extra)
if rightLineLen > 0 {
buf.WriteByte(' ')
buf.WriteString(v.colorize.Color(colorCode + strings.Repeat(lineCell, rightLineLen)))
}
v.streams.Eprintln(buf.String())
}

View File

@ -0,0 +1,32 @@
package views
import (
"strings"
"testing"
"github.com/hashicorp/terraform/command/arguments"
"github.com/hashicorp/terraform/internal/moduletest"
"github.com/hashicorp/terraform/internal/terminal"
)
func TestTest(t *testing.T) {
streams, close := terminal.StreamsForTesting(t)
baseView := NewView(streams)
view := NewTest(baseView, arguments.TestOutput{
JUnitXMLFile: "",
})
results := map[string]*moduletest.Suite{}
view.Results(results)
output := close(t)
gotOutput := strings.TrimSpace(output.All())
wantOutput := `No tests defined. This module doesn't have any test suites to run.`
if gotOutput != wantOutput {
t.Errorf("wrong output\ngot:\n%s\nwant:\n%s", gotOutput, wantOutput)
}
// TODO: Test more at this layer. For now, the main UI output tests for
// the "terraform test" command are in the command package as part of
// the overall command tests.
}

View File

@ -266,6 +266,12 @@ func initCommands(
}, nil
},
"test": func() (cli.Command, error) {
return &command.TestCommand{
Meta: meta,
}, nil
},
"validate": func() (cli.Command, error) {
return &command.ValidateCommand{
Meta: meta,

1
go.mod
View File

@ -13,6 +13,7 @@ require (
github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible
github.com/apparentlymart/go-cidr v1.1.0
github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0
github.com/apparentlymart/go-shquot v0.0.1
github.com/apparentlymart/go-userdirs v0.0.0-20200915174352-b0c018a67c13
github.com/apparentlymart/go-versions v1.0.1
github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2

2
go.sum
View File

@ -109,6 +109,8 @@ github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/Y
github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM=
github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0 h1:MzVXffFUye+ZcSR6opIgz9Co7WcDx6ZcY+RjfFHoA0I=
github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM=
github.com/apparentlymart/go-shquot v0.0.1 h1:MGV8lwxF4zw75lN7e0MGs7o6AFYn7L6AZaExUpLh0Mo=
github.com/apparentlymart/go-shquot v0.0.1/go.mod h1:lw58XsE5IgUXZ9h0cxnypdx31p9mPFIVEQ9P3c7MlrU=
github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0=
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
github.com/apparentlymart/go-textseg/v12 v12.0.0 h1:bNEQyAGak9tojivJNkoqWErVCQbjdL7GzRt3F8NvfJ0=

View File

@ -33,6 +33,8 @@ type Component struct {
// Status is an enumeration of possible outcomes of a test assertion.
type Status rune
//go:generate go run golang.org/x/tools/cmd/stringer -type=Status assertion.go
const (
// Pending indicates that the test was registered (during planning)
// but didn't register an outcome during apply, perhaps due to being
@ -50,3 +52,15 @@ const (
// test report failed in some other way.
Error Status = 'E'
)
// SuiteCanPass returns true if a suite containing an assertion with this
// status could possibly succeed. The suite as a whole succeeds only if all
// of its assertions have statuses where SuiteCanPass returns true.
func (s Status) SuiteCanPass() bool {
switch s {
case Failed, Error:
return false
default:
return true
}
}

View File

@ -2,6 +2,7 @@ package moduletest
import (
"fmt"
"log"
"sync"
"github.com/zclconf/go-cty/cty"
@ -67,29 +68,48 @@ func (p *Provider) TestResults() map[string]*Component {
return p.components
}
// GetSchema returns the complete schema for the provider.
func (p *Provider) GetSchema() providers.GetSchemaResponse {
return providers.GetSchemaResponse{
// Reset returns the recieving provider back to its original state, with no
// recorded test results.
//
// It additionally detaches the instance from any data structure previously
// returned by method TestResults, freeing the caller from the constraints
// in its documentation about mutability and storage.
//
// For convenience in the presumed common case of resetting as part of
// capturing the results for storage, this method also returns the result
// that method TestResults would've returned if called prior to the call
// to Reset.
func (p *Provider) Reset() map[string]*Component {
p.mutex.Lock()
log.Print("[TRACE] moduletest.Provider: Reset")
ret := p.components
p.components = make(map[string]*Component)
p.mutex.Unlock()
return ret
}
// GetProviderSchema returns the complete schema for the provider.
func (p *Provider) GetProviderSchema() providers.GetProviderSchemaResponse {
return providers.GetProviderSchemaResponse{
ResourceTypes: map[string]providers.Schema{
"test_assertions": testAssertionsSchema,
},
}
}
// PrepareProviderConfig is used to tweak the configuration values.
func (p *Provider) PrepareProviderConfig(req providers.PrepareProviderConfigRequest) providers.PrepareProviderConfigResponse {
// This provider has no configurable settings.
var res providers.PrepareProviderConfigResponse
res.PreparedConfig = req.Config
// ValidateProviderConfig validates the provider configuration.
func (p *Provider) ValidateProviderConfig(req providers.ValidateProviderConfigRequest) providers.ValidateProviderConfigResponse {
// This provider has no configurable settings, so nothing to validate.
var res providers.ValidateProviderConfigResponse
return res
}
// Configure configures and initializes the provider.
func (p *Provider) Configure(providers.ConfigureRequest) providers.ConfigureResponse {
// ConfigureProvider configures and initializes the provider.
func (p *Provider) ConfigureProvider(providers.ConfigureProviderRequest) providers.ConfigureProviderResponse {
// This provider has no configurable settings, but we use the configure
// request as an opportunity to generate a warning about it being
// experimental.
var res providers.ConfigureResponse
var res providers.ConfigureProviderResponse
res.Diagnostics = res.Diagnostics.Append(tfdiags.AttributeValue(
tfdiags.Warning,
"The test provider is experimental",
@ -99,9 +119,11 @@ func (p *Provider) Configure(providers.ConfigureRequest) providers.ConfigureResp
return res
}
// ValidateResourceTypeConfig is used to validate configuration values for a resource.
func (p *Provider) ValidateResourceTypeConfig(req providers.ValidateResourceTypeConfigRequest) providers.ValidateResourceTypeConfigResponse {
var res providers.ValidateResourceTypeConfigResponse
// ValidateResourceConfig is used to validate configuration values for a resource.
func (p *Provider) ValidateResourceConfig(req providers.ValidateResourceConfigRequest) providers.ValidateResourceConfigResponse {
log.Print("[TRACE] moduletest.Provider: ValidateResourceConfig")
var res providers.ValidateResourceConfigResponse
if req.TypeName != "test_assertions" { // we only have one resource type
res.Diagnostics = res.Diagnostics.Append(fmt.Errorf("unsupported resource type %s", req.TypeName))
return res
@ -168,6 +190,8 @@ func (p *Provider) ValidateResourceTypeConfig(req providers.ValidateResourceType
// ReadResource refreshes a resource and returns its current state.
func (p *Provider) ReadResource(req providers.ReadResourceRequest) providers.ReadResourceResponse {
log.Print("[TRACE] moduletest.Provider: ReadResource")
var res providers.ReadResourceResponse
if req.TypeName != "test_assertions" { // we only have one resource type
res.Diagnostics = res.Diagnostics.Append(fmt.Errorf("unsupported resource type %s", req.TypeName))
@ -183,6 +207,8 @@ func (p *Provider) ReadResource(req providers.ReadResourceRequest) providers.Rea
// stored in the state in case the schema has changed since it was originally
// written.
func (p *Provider) UpgradeResourceState(req providers.UpgradeResourceStateRequest) providers.UpgradeResourceStateResponse {
log.Print("[TRACE] moduletest.Provider: UpgradeResourceState")
var res providers.UpgradeResourceStateResponse
if req.TypeName != "test_assertions" { // we only have one resource type
res.Diagnostics = res.Diagnostics.Append(fmt.Errorf("unsupported resource type %s", req.TypeName))
@ -215,6 +241,8 @@ func (p *Provider) UpgradeResourceState(req providers.UpgradeResourceStateReques
// PlanResourceChange takes the current state and proposed state of a
// resource, and returns the planned final state.
func (p *Provider) PlanResourceChange(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
log.Print("[TRACE] moduletest.Provider: PlanResourceChange")
var res providers.PlanResourceChangeResponse
if req.TypeName != "test_assertions" { // we only have one resource type
res.Diagnostics = res.Diagnostics.Append(fmt.Errorf("unsupported resource type %s", req.TypeName))
@ -239,15 +267,25 @@ func (p *Provider) PlanResourceChange(req providers.PlanResourceChangeRequest) p
componentName := proposed.GetAttr("component").AsString() // proven known during validate
p.mutex.Lock()
defer p.mutex.Unlock()
if _, exists := p.components[componentName]; exists {
res.Diagnostics = res.Diagnostics.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Duplicate test component",
fmt.Sprintf("Another test_assertions resource already declared assertions for the component name %q.", componentName),
cty.GetAttrPath("component"),
))
return res
}
// NOTE: Ideally we'd do something here to verify if two assertions
// resources in the configuration attempt to declare the same component,
// but we can't actually do that because Terraform calls PlanResourceChange
// during both plan and apply, and so the second one would always fail.
// Since this is just providing a temporary pseudo-syntax for writing tests
// anyway, we'll live with this for now and aim to solve it with a future
// iteration of testing that's better integrated into the Terraform
// language.
/*
if _, exists := p.components[componentName]; exists {
res.Diagnostics = res.Diagnostics.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Duplicate test component",
fmt.Sprintf("Another test_assertions resource already declared assertions for the component name %q.", componentName),
cty.GetAttrPath("component"),
))
return res
}
*/
component := Component{
Assertions: make(map[string]*Assertion),
@ -328,6 +366,8 @@ func (p *Provider) PlanResourceChange(req providers.PlanResourceChangeRequest) p
// yet contain unknown computed values, and applies the changes returning
// the final state.
func (p *Provider) ApplyResourceChange(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
log.Print("[TRACE] moduletest.Provider: ApplyResourceChange")
var res providers.ApplyResourceChangeResponse
if req.TypeName != "test_assertions" { // we only have one resource type
res.Diagnostics = res.Diagnostics.Append(fmt.Errorf("unsupported resource type %s", req.TypeName))
@ -343,6 +383,11 @@ func (p *Provider) ApplyResourceChange(req providers.ApplyResourceChangeRequest)
planned := req.PlannedState
res.NewState = planned
if res.NewState.IsNull() {
// If we're destroying then we'll just quickly return success to
// allow the test process to clean up after itself.
return res
}
componentName := planned.GetAttr("component").AsString() // proven known during validate
p.mutex.Lock()

View File

@ -48,7 +48,7 @@ func TestProvider(t *testing.T) {
p := NewProvider()
configureResp := p.Configure(providers.ConfigureRequest{
configureResp := p.ConfigureProvider(providers.ConfigureProviderRequest{
Config: cty.EmptyObjectVal,
})
if got, want := len(configureResp.Diagnostics), 1; got != want {
@ -58,7 +58,7 @@ func TestProvider(t *testing.T) {
t.Fatalf("wrong diagnostic message\ngot: %s\nwant: %s", got, want)
}
validateResp := p.ValidateResourceTypeConfig(providers.ValidateResourceTypeConfigRequest{
validateResp := p.ValidateResourceConfig(providers.ValidateResourceConfigRequest{
TypeName: "test_assertions",
Config: assertionConfig,
})

View File

@ -0,0 +1,39 @@
// Code generated by "stringer -type=Status assertion.go"; DO NOT EDIT.
package moduletest
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Pending-63]
_ = x[Passed-80]
_ = x[Failed-70]
_ = x[Error-69]
}
const (
_Status_name_0 = "Pending"
_Status_name_1 = "ErrorFailed"
_Status_name_2 = "Passed"
)
var (
_Status_index_1 = [...]uint8{0, 5, 11}
)
func (i Status) String() string {
switch {
case i == 63:
return _Status_name_0
case 69 <= i && i <= 70:
i -= 69
return _Status_name_1[_Status_index_1[i]:_Status_index_1[i+1]]
case i == 80:
return _Status_name_2
default:
return "Status(" + strconv.FormatInt(int64(i), 10) + ")"
}
}

View File

@ -0,0 +1,7 @@
package moduletest
// A Suite is a set of tests run together as a single Terraform configuration.
type Suite struct {
Name string
Components map[string]*Component
}

View File

@ -65,6 +65,21 @@ func NewInstaller(targetDir *Dir, source getproviders.Source) *Installer {
}
}
// Clone returns a new Installer which has the a new target directory but
// the same optional global cache directory, the same installation sources,
// and the same built-in/unmanaged providers. The result can be mutated further
// using the various setter methods without affecting the original.
func (i *Installer) Clone(targetDir *Dir) *Installer {
// For now all of our setter methods just overwrite field values in
// their entirety, rather than mutating things on the other side of
// the shared pointers, and so we can safely just shallow-copy the
// root. We might need to be more careful here if in future we add
// methods that allow deeper mutations through the stored pointers.
ret := *i
ret.targetDir = targetDir
return &ret
}
// ProviderSource returns the getproviders.Source that the installer would
// use for installing any new providers.
func (i *Installer) ProviderSource() getproviders.Source {
@ -88,6 +103,12 @@ func (i *Installer) SetGlobalCacheDir(cacheDir *Dir) {
i.globalCacheDir = cacheDir
}
// HasGlobalCacheDir returns true if someone has previously called
// SetGlobalCacheDir to configure a global cache directory for this installer.
func (i *Installer) HasGlobalCacheDir() bool {
return i.globalCacheDir != nil
}
// SetBuiltInProviderTypes tells the receiver to consider the type names in the
// given slice to be valid as providers in the special special
// terraform.io/builtin/... namespace that we use for providers that are