Merge branch 'master' into master

This commit is contained in:
James Bardin 2019-06-10 15:50:59 -04:00 committed by GitHub
commit 0c91d227fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
476 changed files with 58770 additions and 12130 deletions

View File

@ -11,7 +11,7 @@ Here are some of the most common:
* [AWS](https://github.com/terraform-providers/terraform-provider-aws) * [AWS](https://github.com/terraform-providers/terraform-provider-aws)
* [Azure](https://github.com/terraform-providers/terraform-provider-azurerm) * [Azure](https://github.com/terraform-providers/terraform-provider-azurerm)
* [Google](https://github.com/terraform-providers/terraform-provider-aws) * [Google](https://github.com/terraform-providers/terraform-provider-google)
* [Oracle](https://github.com/terraform-providers/terraform-provider-oci) * [Oracle](https://github.com/terraform-providers/terraform-provider-oci)
* [Kubernetes](https://github.com/terraform-providers/terraform-provider-kubernetes) * [Kubernetes](https://github.com/terraform-providers/terraform-provider-kubernetes)

View File

@ -1 +1 @@
1.12.1 1.12.4

8
.tfdev Normal file
View File

@ -0,0 +1,8 @@
version_info {
commit_var = "main.GitCommit"
version_var = "github.com/hashicorp/terraform/version.Version"
prerelease_var = "github.com/hashicorp/terraform/version.Prerelease"
}
version_exec = false
disable_provider_requirements = true

View File

@ -4,7 +4,7 @@ services:
- docker - docker
language: go language: go
go: go:
- "1.12.1" - "1.12.4"
# add TF_CONSUL_TEST=1 to run consul tests # add TF_CONSUL_TEST=1 to run consul tests
# they were causing timouts in travis # they were causing timouts in travis

File diff suppressed because it is too large Load Diff

92
Vagrantfile vendored
View File

@ -1,92 +0,0 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = "2"
# Software version variables
GOVERSION = "1.11.5"
UBUNTUVERSION = "16.04"
# CPU and RAM can be adjusted depending on your system
CPUCOUNT = "2"
RAM = "4096"
$script = <<SCRIPT
GOVERSION="#{GOVERSION}"
SRCROOT="/opt/go"
SRCPATH="/opt/gopath"
# Get the ARCH
ARCH="$(uname -m | sed 's|i686|386|' | sed 's|x86_64|amd64|')"
# Install Prereq Packages
export DEBIAN_PRIORITY=critical
export DEBIAN_FRONTEND=noninteractive
export DEBCONF_NONINTERACTIVE_SEEN=true
APT_OPTS="--assume-yes --no-install-suggests --no-install-recommends -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\""
echo "Upgrading packages ..."
apt-get update ${APT_OPTS} >/dev/null
apt-get upgrade ${APT_OPTS} >/dev/null
echo "Installing prerequisites ..."
apt-get install ${APT_OPTS} build-essential curl git-core libpcre3-dev mercurial pkg-config zip >/dev/null
# Install Go
echo "Downloading go (${GOVERSION}) ..."
wget -P /tmp --quiet "https://storage.googleapis.com/golang/go${GOVERSION}.linux-${ARCH}.tar.gz"
echo "Setting up go (${GOVERSION}) ..."
tar -C /opt -xf "/tmp/go${GOVERSION}.linux-${ARCH}.tar.gz"
chmod 775 "$SRCROOT"
chown vagrant:vagrant "$SRCROOT"
# Setup the GOPATH; even though the shared folder spec gives the working
# directory the right user/group, we need to set it properly on the
# parent path to allow subsequent "go get" commands to work.
mkdir -p "$SRCPATH"
chown -R vagrant:vagrant "$SRCPATH" 2>/dev/null || true
# ^^ silencing errors here because we expect this to fail for the shared folder
cat >/etc/profile.d/gopath.sh <<EOF
export GOPATH="$SRCPATH"
export GOROOT="$SRCROOT"
export PATH="$SRCROOT/bin:$SRCPATH/bin:\$PATH"
EOF
chmod 755 /etc/profile.d/gopath.sh
grep -q -F 'cd /opt/gopath/src/github.com/hashicorp/terraform' /home/vagrant/.bashrc || cat >>/home/vagrant/.bashrc <<EOF
## After login, change to terraform directory
cd /opt/gopath/src/github.com/hashicorp/terraform
EOF
SCRIPT
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "bento/ubuntu-#{UBUNTUVERSION}"
config.vm.hostname = "terraform"
config.vm.provision "prepare-shell", type: "shell", inline: "sudo sed -i '/tty/!s/mesg n/tty -s \\&\\& mesg n/' /root/.profile", privileged: false
config.vm.provision "initial-setup", type: "shell", inline: $script
config.vm.synced_folder '.', '/opt/gopath/src/github.com/hashicorp/terraform'
config.vm.provider "docker" do |v, override|
override.vm.box = "tknerr/baseimage-ubuntu-#{UBUNTUVERSION}"
end
["vmware_fusion", "vmware_workstation"].each do |p|
config.vm.provider p do |v|
v.vmx["memsize"] = "#{RAM}"
v.vmx["numvcpus"] = "#{CPUCOUNT}"
end
end
config.vm.provider "virtualbox" do |v|
v.memory = "#{RAM}"
v.cpus = "#{CPUCOUNT}"
end
config.vm.provider "parallels" do |prl|
prl.memory = "#{RAM}"
prl.cpus = "#{CPUCOUNT}"
end
end

View File

@ -260,9 +260,10 @@ func (b *Local) renderPlan(plan *plans.Plan, state *states.State, schemas *terra
// check if the change is due to a tainted resource // check if the change is due to a tainted resource
tainted := false tainted := false
if !state.Empty() { if !state.Empty() {
rs := state.ResourceInstance(rcs.Addr) if is := state.ResourceInstance(rcs.Addr); is != nil {
if rs != nil { if obj := is.GetGeneration(rcs.DeposedKey.Generation()); obj != nil {
tainted = rs.Current.Status == states.ObjectTainted tainted = obj.Status == states.ObjectTainted
}
} }
} }

View File

@ -193,6 +193,121 @@ Plan: 1 to add, 0 to change, 1 to destroy.`
} }
} }
func TestLocal_planDeposedOnly(t *testing.T) {
b, cleanup := TestLocal(t)
defer cleanup()
p := TestLocalProvider(t, b, "test", planFixtureSchema())
testStateFile(t, b.StatePath, states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceDeposed(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
states.DeposedKey("00000000"),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{
"ami": "bar",
"network_interface": [{
"device_index": 0,
"description": "Main network interface"
}]
}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
}))
b.CLI = cli.NewMockUi()
outDir := testTempDir(t)
defer os.RemoveAll(outDir)
planPath := filepath.Join(outDir, "plan.tfplan")
op, configCleanup := testOperationPlan(t, "./test-fixtures/plan")
defer configCleanup()
op.PlanRefresh = true
op.PlanOutPath = planPath
cfg := cty.ObjectVal(map[string]cty.Value{
"path": cty.StringVal(b.StatePath),
})
cfgRaw, err := plans.NewDynamicValue(cfg, cfg.Type())
if err != nil {
t.Fatal(err)
}
op.PlanOutBackend = &plans.Backend{
// Just a placeholder so that we can generate a valid plan file.
Type: "local",
Config: cfgRaw,
}
run, err := b.Operation(context.Background(), op)
if err != nil {
t.Fatalf("bad: %s", err)
}
<-run.Done()
if run.Result != backend.OperationSuccess {
t.Fatalf("plan operation failed")
}
if !p.ReadResourceCalled {
t.Fatal("ReadResource should be called")
}
if run.PlanEmpty {
t.Fatal("plan should not be empty")
}
// The deposed object and the current object are distinct, so our
// plan includes separate actions for each of them. This strange situation
// is not common: it should arise only if Terraform fails during
// a create-before-destroy when the create hasn't completed yet but
// in a severe way that prevents the previous object from being restored
// as "current".
//
// However, that situation was more common in some earlier Terraform
// versions where deposed objects were not managed properly, so this
// can arise when upgrading from an older version with deposed objects
// already in the state.
//
// This is one of the few cases where we expose the idea of "deposed" in
// the UI, including the user-unfriendly "deposed key" (00000000 in this
// case) just so that users can correlate this with what they might
// see in `terraform show` and in the subsequent apply output, because
// it's also possible for there to be _multiple_ deposed objects, in the
// unlikely event that create_before_destroy _keeps_ crashing across
// subsequent runs.
expectedOutput := `An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
- destroy
Terraform will perform the following actions:
# test_instance.foo will be created
+ resource "test_instance" "foo" {
+ ami = "bar"
+ network_interface {
+ description = "Main network interface"
+ device_index = 0
}
}
# test_instance.foo (deposed object 00000000) will be destroyed
- resource "test_instance" "foo" {
- ami = "bar" -> null
- network_interface {
- description = "Main network interface" -> null
- device_index = 0 -> null
}
}
Plan: 1 to add, 0 to change, 1 to destroy.`
output := b.CLI.(*cli.MockUi).OutputWriter.String()
if !strings.Contains(output, expectedOutput) {
t.Fatalf("Unexpected output:\n%s\n\nwant output containing:\n%s", output, expectedOutput)
}
}
func TestLocal_planTainted_createBeforeDestroy(t *testing.T) { func TestLocal_planTainted_createBeforeDestroy(t *testing.T) {
b, cleanup := TestLocal(t) b, cleanup := TestLocal(t)
defer cleanup() defer cleanup()

View File

@ -110,7 +110,7 @@ func (c *remoteClient) Lock(info *state.LockInfo) (string, error) {
func (c *remoteClient) Unlock(id string) error { func (c *remoteClient) Unlock(id string) error {
gen, err := strconv.ParseInt(id, 10, 64) gen, err := strconv.ParseInt(id, 10, 64)
if err != nil { if err != nil {
return err return fmt.Errorf("Lock ID should be numerical value, got '%s'", id)
} }
if err := c.lockFile().If(storage.Conditions{GenerationMatch: gen}).Delete(c.storageContext); err != nil { if err := c.lockFile().If(storage.Conditions{GenerationMatch: gen}).Delete(c.storageContext); err != nil {

View File

@ -6,8 +6,10 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"net/url" "net/url"
"time"
cleanhttp "github.com/hashicorp/go-cleanhttp" "github.com/hashicorp/go-cleanhttp"
"github.com/hashicorp/go-retryablehttp"
"github.com/hashicorp/terraform/backend" "github.com/hashicorp/terraform/backend"
"github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/state" "github.com/hashicorp/terraform/state"
@ -66,6 +68,24 @@ func New() backend.Backend {
Default: false, Default: false,
Description: "Whether to skip TLS verification.", Description: "Whether to skip TLS verification.",
}, },
"retry_max": &schema.Schema{
Type: schema.TypeInt,
Optional: true,
Default: 2,
Description: "The number of HTTP request retries.",
},
"retry_wait_min": &schema.Schema{
Type: schema.TypeInt,
Optional: true,
Default: 1,
Description: "The minimum time in seconds to wait between HTTP request attempts.",
},
"retry_wait_max": &schema.Schema{
Type: schema.TypeInt,
Optional: true,
Default: 30,
Description: "The maximum time in seconds to wait between HTTP request attempts.",
},
}, },
} }
@ -131,6 +151,12 @@ func (b *Backend) configure(ctx context.Context) error {
} }
} }
rClient := retryablehttp.NewClient()
rClient.HTTPClient = client
rClient.RetryMax = data.Get("retry_max").(int)
rClient.RetryWaitMin = time.Duration(data.Get("retry_wait_min").(int)) * time.Second
rClient.RetryWaitMax = time.Duration(data.Get("retry_wait_max").(int)) * time.Second
b.client = &httpClient{ b.client = &httpClient{
URL: updateURL, URL: updateURL,
UpdateMethod: updateMethod, UpdateMethod: updateMethod,
@ -144,7 +170,7 @@ func (b *Backend) configure(ctx context.Context) error {
Password: data.Get("password").(string), Password: data.Get("password").(string),
// accessible only for testing use // accessible only for testing use
Client: client, Client: rClient,
} }
return nil return nil
} }

View File

@ -2,6 +2,7 @@ package http
import ( import (
"testing" "testing"
"time"
"github.com/hashicorp/terraform/configs" "github.com/hashicorp/terraform/configs"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
@ -51,6 +52,9 @@ func TestHTTPClientFactory(t *testing.T) {
"unlock_method": cty.StringVal("BLOOP"), "unlock_method": cty.StringVal("BLOOP"),
"username": cty.StringVal("user"), "username": cty.StringVal("user"),
"password": cty.StringVal("pass"), "password": cty.StringVal("pass"),
"retry_max": cty.StringVal("999"),
"retry_wait_min": cty.StringVal("15"),
"retry_wait_max": cty.StringVal("150"),
} }
b = backend.TestBackendConfig(t, New(), configs.SynthBody("synth", conf)).(*Backend) b = backend.TestBackendConfig(t, New(), configs.SynthBody("synth", conf)).(*Backend)
@ -74,4 +78,13 @@ func TestHTTPClientFactory(t *testing.T) {
t.Fatalf("Unexpected username \"%s\" vs \"%s\" or password \"%s\" vs \"%s\"", client.Username, conf["username"], t.Fatalf("Unexpected username \"%s\" vs \"%s\" or password \"%s\" vs \"%s\"", client.Username, conf["username"],
client.Password, conf["password"]) client.Password, conf["password"])
} }
if client.Client.RetryMax != 999 {
t.Fatalf("Expected retry_max \"%d\", got \"%d\"", 999, client.Client.RetryMax)
}
if client.Client.RetryWaitMin != 15*time.Second {
t.Fatalf("Expected retry_wait_min \"%s\", got \"%s\"", 15*time.Second, client.Client.RetryWaitMin)
}
if client.Client.RetryWaitMax != 150*time.Second {
t.Fatalf("Expected retry_wait_max \"%s\", got \"%s\"", 150*time.Second, client.Client.RetryWaitMax)
}
} }

View File

@ -11,6 +11,7 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"github.com/hashicorp/go-retryablehttp"
"github.com/hashicorp/terraform/state" "github.com/hashicorp/terraform/state"
"github.com/hashicorp/terraform/state/remote" "github.com/hashicorp/terraform/state/remote"
) )
@ -28,7 +29,7 @@ type httpClient struct {
UnlockMethod string UnlockMethod string
// HTTP // HTTP
Client *http.Client Client *retryablehttp.Client
Username string Username string
Password string Password string
@ -44,7 +45,7 @@ func (c *httpClient) httpRequest(method string, url *url.URL, data *[]byte, what
} }
// Create the request // Create the request
req, err := http.NewRequest(method, url.String(), reader) req, err := retryablehttp.NewRequest(method, url.String(), reader)
if err != nil { if err != nil {
return nil, fmt.Errorf("Failed to make %s HTTP request: %s", what, err) return nil, fmt.Errorf("Failed to make %s HTTP request: %s", what, err)
} }

View File

@ -10,7 +10,7 @@ import (
"reflect" "reflect"
"testing" "testing"
cleanhttp "github.com/hashicorp/go-cleanhttp" "github.com/hashicorp/go-retryablehttp"
"github.com/hashicorp/terraform/state/remote" "github.com/hashicorp/terraform/state/remote"
) )
@ -30,14 +30,14 @@ func TestHTTPClient(t *testing.T) {
} }
// Test basic get/update // Test basic get/update
client := &httpClient{URL: url, Client: cleanhttp.DefaultClient()} client := &httpClient{URL: url, Client: retryablehttp.NewClient()}
remote.TestClient(t, client) remote.TestClient(t, client)
// test just a single PUT // test just a single PUT
p := &httpClient{ p := &httpClient{
URL: url, URL: url,
UpdateMethod: "PUT", UpdateMethod: "PUT",
Client: cleanhttp.DefaultClient(), Client: retryablehttp.NewClient(),
} }
remote.TestClient(t, p) remote.TestClient(t, p)
@ -49,7 +49,7 @@ func TestHTTPClient(t *testing.T) {
LockMethod: "LOCK", LockMethod: "LOCK",
UnlockURL: url, UnlockURL: url,
UnlockMethod: "UNLOCK", UnlockMethod: "UNLOCK",
Client: cleanhttp.DefaultClient(), Client: retryablehttp.NewClient(),
} }
b := &httpClient{ b := &httpClient{
URL: url, URL: url,
@ -58,7 +58,7 @@ func TestHTTPClient(t *testing.T) {
LockMethod: "LOCK", LockMethod: "LOCK",
UnlockURL: url, UnlockURL: url,
UnlockMethod: "UNLOCK", UnlockMethod: "UNLOCK",
Client: cleanhttp.DefaultClient(), Client: retryablehttp.NewClient(),
} }
remote.TestRemoteLocks(t, a, b) remote.TestRemoteLocks(t, a, b)
@ -68,13 +68,23 @@ func TestHTTPClient(t *testing.T) {
defer ts.Close() defer ts.Close()
url, err = url.Parse(ts.URL) url, err = url.Parse(ts.URL)
c := &httpClient{ client = &httpClient{
URL: url, URL: url,
UpdateMethod: "PUT", UpdateMethod: "PUT",
Client: cleanhttp.DefaultClient(), Client: retryablehttp.NewClient(),
} }
remote.TestClient(t, c) // first time through: 201 remote.TestClient(t, client) // first time through: 201
remote.TestClient(t, c) // second time, with identical data: 204 remote.TestClient(t, client) // second time, with identical data: 204
// test a broken backend
brokenHandler := new(testBrokenHTTPHandler)
brokenHandler.handler = new(testHTTPHandler)
ts = httptest.NewServer(http.HandlerFunc(brokenHandler.Handle))
defer ts.Close()
url, err = url.Parse(ts.URL)
client = &httpClient{URL: url, Client: retryablehttp.NewClient()}
remote.TestClient(t, client)
} }
func assertError(t *testing.T, err error, expected string) { func assertError(t *testing.T, err error, expected string) {
@ -149,3 +159,18 @@ func (h *testHTTPHandler) HandleWebDAV(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(fmt.Sprintf("Unknown method: %s", r.Method))) w.Write([]byte(fmt.Sprintf("Unknown method: %s", r.Method)))
} }
} }
type testBrokenHTTPHandler struct {
lastRequestWasBroken bool
handler *testHTTPHandler
}
func (h *testBrokenHTTPHandler) Handle(w http.ResponseWriter, r *http.Request) {
if h.lastRequestWasBroken {
h.lastRequestWasBroken = false
h.handler.Handle(w, r)
} else {
h.lastRequestWasBroken = true
w.WriteHeader(500)
}
}

View File

@ -65,7 +65,7 @@ func (b *Backend) configure(ctx context.Context) error {
// Prepare database schema, tables, & indexes. // Prepare database schema, tables, & indexes.
var query string var query string
query = `CREATE SCHEMA IF NOT EXISTS %s` query = `CREATE SCHEMA IF NOT EXISTS %s`
if _, err := db.Query(fmt.Sprintf(query, b.schemaName)); err != nil { if _, err := db.Exec(fmt.Sprintf(query, b.schemaName)); err != nil {
return err return err
} }
query = `CREATE TABLE IF NOT EXISTS %s.%s ( query = `CREATE TABLE IF NOT EXISTS %s.%s (
@ -73,11 +73,11 @@ func (b *Backend) configure(ctx context.Context) error {
name TEXT, name TEXT,
data TEXT data TEXT
)` )`
if _, err := db.Query(fmt.Sprintf(query, b.schemaName, statesTableName)); err != nil { if _, err := db.Exec(fmt.Sprintf(query, b.schemaName, statesTableName)); err != nil {
return err return err
} }
query = `CREATE UNIQUE INDEX IF NOT EXISTS %s ON %s.%s (name)` query = `CREATE UNIQUE INDEX IF NOT EXISTS %s ON %s.%s (name)`
if _, err := db.Query(fmt.Sprintf(query, statesIndexName, b.schemaName, statesTableName)); err != nil { if _, err := db.Exec(fmt.Sprintf(query, statesIndexName, b.schemaName, statesTableName)); err != nil {
return err return err
} }

View File

@ -227,6 +227,57 @@ func (b *Remote) parseVariableValues(op *backend.Operation) (terraform.InputValu
return result, diags return result, diags
} }
func (b *Remote) costEstimation(stopCtx, cancelCtx context.Context, op *backend.Operation, r *tfe.Run) error {
if r.CostEstimation == nil {
return nil
}
if b.CLI != nil {
b.CLI.Output("\n------------------------------------------------------------------------\n")
}
logs, err := b.client.CostEstimations.Logs(stopCtx, r.CostEstimation.ID)
if err != nil {
return generalError("Failed to retrieve cost estimation logs", err)
}
scanner := bufio.NewScanner(logs)
// Retrieve the cost estimation to get its current status.
ce, err := b.client.CostEstimations.Read(stopCtx, r.CostEstimation.ID)
if err != nil {
return generalError("Failed to retrieve cost estimation", err)
}
msgPrefix := "Cost estimation"
if b.CLI != nil {
b.CLI.Output(b.Colorize().Color(msgPrefix + ":\n"))
}
for scanner.Scan() {
if b.CLI != nil {
b.CLI.Output(b.Colorize().Color(scanner.Text()))
}
}
if err := scanner.Err(); err != nil {
return generalError("Failed to read logs", err)
}
switch ce.Status {
case tfe.CostEstimationFinished:
if len(r.PolicyChecks) == 0 && r.HasChanges && op.Type == backend.OperationTypeApply && b.CLI != nil {
b.CLI.Output("\n------------------------------------------------------------------------")
}
return nil
case tfe.CostEstimationErrored:
return fmt.Errorf(msgPrefix + " errored.")
case tfe.CostEstimationCanceled:
return fmt.Errorf(msgPrefix + " canceled.")
default:
return fmt.Errorf("Unknown or unexpected cost estimation state: %s", ce.Status)
}
}
func (b *Remote) checkPolicy(stopCtx, cancelCtx context.Context, op *backend.Operation, r *tfe.Run) error { func (b *Remote) checkPolicy(stopCtx, cancelCtx context.Context, op *backend.Operation, r *tfe.Run) error {
if b.CLI != nil { if b.CLI != nil {
b.CLI.Output("\n------------------------------------------------------------------------\n") b.CLI.Output("\n------------------------------------------------------------------------\n")

View File

@ -89,6 +89,9 @@ func (b *Remote) Context(op *backend.Operation) (*terraform.Context, statemgr.Fu
} }
if tfeVariables != nil { if tfeVariables != nil {
if op.Variables == nil {
op.Variables = make(map[string]backend.UnparsedVariableValue)
}
for _, v := range tfeVariables.Items { for _, v := range tfeVariables.Items {
if v.Sensitive { if v.Sensitive {
v.Value = "<sensitive>" v.Value = "<sensitive>"
@ -100,12 +103,12 @@ func (b *Remote) Context(op *backend.Operation) (*terraform.Context, statemgr.Fu
} }
} }
variables, varDiags := backend.ParseVariableValues(op.Variables, config.Module.Variables)
diags = diags.Append(varDiags)
if diags.HasErrors() {
return nil, nil, diags
}
if op.Variables != nil { if op.Variables != nil {
variables, varDiags := backend.ParseVariableValues(op.Variables, config.Module.Variables)
diags = diags.Append(varDiags)
if diags.HasErrors() {
return nil, nil, diags
}
opts.Variables = variables opts.Variables = variables
} }

View File

@ -21,6 +21,7 @@ import (
type mockClient struct { type mockClient struct {
Applies *mockApplies Applies *mockApplies
ConfigurationVersions *mockConfigurationVersions ConfigurationVersions *mockConfigurationVersions
CostEstimations *mockCostEstimations
Organizations *mockOrganizations Organizations *mockOrganizations
Plans *mockPlans Plans *mockPlans
PolicyChecks *mockPolicyChecks PolicyChecks *mockPolicyChecks
@ -33,6 +34,7 @@ func newMockClient() *mockClient {
c := &mockClient{} c := &mockClient{}
c.Applies = newMockApplies(c) c.Applies = newMockApplies(c)
c.ConfigurationVersions = newMockConfigurationVersions(c) c.ConfigurationVersions = newMockConfigurationVersions(c)
c.CostEstimations = newMockCostEstimations(c)
c.Organizations = newMockOrganizations(c) c.Organizations = newMockOrganizations(c)
c.Plans = newMockPlans(c) c.Plans = newMockPlans(c)
c.PolicyChecks = newMockPolicyChecks(c) c.PolicyChecks = newMockPolicyChecks(c)
@ -212,6 +214,84 @@ func (m *mockConfigurationVersions) Upload(ctx context.Context, url, path string
return nil return nil
} }
type mockCostEstimations struct {
client *mockClient
estimations map[string]*tfe.CostEstimation
logs map[string]string
}
func newMockCostEstimations(client *mockClient) *mockCostEstimations {
return &mockCostEstimations{
client: client,
estimations: make(map[string]*tfe.CostEstimation),
logs: make(map[string]string),
}
}
// create is a helper function to create a mock cost estimation that uses the
// configured working directory to find the logfile.
func (m *mockCostEstimations) create(cvID, workspaceID string) (*tfe.CostEstimation, error) {
id := generateID("ce-")
ce := &tfe.CostEstimation{
ID: id,
Status: tfe.CostEstimationQueued,
}
w, ok := m.client.Workspaces.workspaceIDs[workspaceID]
if !ok {
return nil, tfe.ErrResourceNotFound
}
logfile := filepath.Join(
m.client.ConfigurationVersions.uploadPaths[cvID],
w.WorkingDirectory,
"ce.log",
)
if _, err := os.Stat(logfile); os.IsNotExist(err) {
return nil, nil
}
m.logs[ce.ID] = logfile
m.estimations[ce.ID] = ce
return ce, nil
}
func (m *mockCostEstimations) Read(ctx context.Context, costEstimationID string) (*tfe.CostEstimation, error) {
ce, ok := m.estimations[costEstimationID]
if !ok {
return nil, tfe.ErrResourceNotFound
}
return ce, nil
}
func (m *mockCostEstimations) Logs(ctx context.Context, costEstimationID string) (io.Reader, error) {
ce, ok := m.estimations[costEstimationID]
if !ok {
return nil, tfe.ErrResourceNotFound
}
logfile, ok := m.logs[ce.ID]
if !ok {
return nil, tfe.ErrResourceNotFound
}
if _, err := os.Stat(logfile); os.IsNotExist(err) {
return bytes.NewBufferString("logfile does not exist"), nil
}
logs, err := ioutil.ReadFile(logfile)
if err != nil {
return nil, err
}
ce.Status = tfe.CostEstimationFinished
return bytes.NewBuffer(logs), nil
}
// mockInput is a mock implementation of terraform.UIInput. // mockInput is a mock implementation of terraform.UIInput.
type mockInput struct { type mockInput struct {
answers map[string]string answers map[string]string
@ -652,19 +732,25 @@ func (m *mockRuns) Create(ctx context.Context, options tfe.RunCreateOptions) (*t
return nil, err return nil, err
} }
ce, err := m.client.CostEstimations.create(options.ConfigurationVersion.ID, options.Workspace.ID)
if err != nil {
return nil, err
}
pc, err := m.client.PolicyChecks.create(options.ConfigurationVersion.ID, options.Workspace.ID) pc, err := m.client.PolicyChecks.create(options.ConfigurationVersion.ID, options.Workspace.ID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
r := &tfe.Run{ r := &tfe.Run{
ID: generateID("run-"), ID: generateID("run-"),
Actions: &tfe.RunActions{IsCancelable: true}, Actions: &tfe.RunActions{IsCancelable: true},
Apply: a, Apply: a,
HasChanges: false, CostEstimation: ce,
Permissions: &tfe.RunPermissions{}, HasChanges: false,
Plan: p, Permissions: &tfe.RunPermissions{},
Status: tfe.RunPending, Plan: p,
Status: tfe.RunPending,
} }
if pc != nil { if pc != nil {

View File

@ -290,6 +290,14 @@ func (b *Remote) plan(stopCtx, cancelCtx context.Context, op *backend.Operation,
return r, nil return r, nil
} }
// Show any cost estimation output.
if r.CostEstimation != nil {
err = b.costEstimation(stopCtx, cancelCtx, op, r)
if err != nil {
return r, err
}
}
// Check any configured sentinel policies. // Check any configured sentinel policies.
if len(r.PolicyChecks) > 0 { if len(r.PolicyChecks) > 0 {
err = b.checkPolicy(stopCtx, cancelCtx, op, r) err = b.checkPolicy(stopCtx, cancelCtx, op, r)

View File

@ -655,6 +655,40 @@ func TestRemote_planWithWorkingDirectory(t *testing.T) {
} }
} }
func TestRemote_costEstimation(t *testing.T) {
b, bCleanup := testBackendDefault(t)
defer bCleanup()
op, configCleanup := testOperationPlan(t, "./test-fixtures/plan-cost-estimation")
defer configCleanup()
op.Workspace = backend.DefaultStateName
run, err := b.Operation(context.Background(), op)
if err != nil {
t.Fatalf("error starting operation: %v", err)
}
<-run.Done()
if run.Result != backend.OperationSuccess {
t.Fatalf("operation failed: %s", b.CLI.(*cli.MockUi).ErrorWriter.String())
}
if run.PlanEmpty {
t.Fatalf("expected a non-empty plan")
}
output := b.CLI.(*cli.MockUi).OutputWriter.String()
if !strings.Contains(output, "Running plan in the remote backend") {
t.Fatalf("expected remote backend header in output: %s", output)
}
if !strings.Contains(output, "SKU") {
t.Fatalf("expected cost estimation result in output: %s", output)
}
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summary in output: %s", output)
}
}
func TestRemote_planPolicyPass(t *testing.T) { func TestRemote_planPolicyPass(t *testing.T) {
b, bCleanup := testBackendDefault(t) b, bCleanup := testBackendDefault(t)
defer bCleanup() defer bCleanup()
@ -681,12 +715,12 @@ func TestRemote_planPolicyPass(t *testing.T) {
if !strings.Contains(output, "Running plan in the remote backend") { if !strings.Contains(output, "Running plan in the remote backend") {
t.Fatalf("expected remote backend header in output: %s", output) t.Fatalf("expected remote backend header in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
if !strings.Contains(output, "Sentinel Result: true") { if !strings.Contains(output, "Sentinel Result: true") {
t.Fatalf("expected policy check result in output: %s", output) t.Fatalf("expected policy check result in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
} }
func TestRemote_planPolicyHardFail(t *testing.T) { func TestRemote_planPolicyHardFail(t *testing.T) {
@ -720,12 +754,12 @@ func TestRemote_planPolicyHardFail(t *testing.T) {
if !strings.Contains(output, "Running plan in the remote backend") { if !strings.Contains(output, "Running plan in the remote backend") {
t.Fatalf("expected remote backend header in output: %s", output) t.Fatalf("expected remote backend header in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
if !strings.Contains(output, "Sentinel Result: false") { if !strings.Contains(output, "Sentinel Result: false") {
t.Fatalf("expected policy check result in output: %s", output) t.Fatalf("expected policy check result in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
} }
func TestRemote_planPolicySoftFail(t *testing.T) { func TestRemote_planPolicySoftFail(t *testing.T) {
@ -759,12 +793,12 @@ func TestRemote_planPolicySoftFail(t *testing.T) {
if !strings.Contains(output, "Running plan in the remote backend") { if !strings.Contains(output, "Running plan in the remote backend") {
t.Fatalf("expected remote backend header in output: %s", output) t.Fatalf("expected remote backend header in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
if !strings.Contains(output, "Sentinel Result: false") { if !strings.Contains(output, "Sentinel Result: false") {
t.Fatalf("expected policy check result in output: %s", output) t.Fatalf("expected policy check result in output: %s", output)
} }
if !strings.Contains(output, "1 to add, 0 to change, 0 to destroy") {
t.Fatalf("expected plan summery in output: %s", output)
}
} }
func TestRemote_planWithRemoteError(t *testing.T) { func TestRemote_planWithRemoteError(t *testing.T) {

View File

@ -14,11 +14,12 @@ import (
) )
type remoteClient struct { type remoteClient struct {
client *tfe.Client client *tfe.Client
lockInfo *state.LockInfo lockInfo *state.LockInfo
organization string organization string
runID string runID string
workspace *tfe.Workspace stateUploadErr bool
workspace *tfe.Workspace
} }
// Get the remote state. // Get the remote state.
@ -31,12 +32,12 @@ func (r *remoteClient) Get() (*remote.Payload, error) {
// If no state exists, then return nil. // If no state exists, then return nil.
return nil, nil return nil, nil
} }
return nil, fmt.Errorf("Error retrieving remote state: %v", err) return nil, fmt.Errorf("Error retrieving state: %v", err)
} }
state, err := r.client.StateVersions.Download(ctx, sv.DownloadURL) state, err := r.client.StateVersions.Download(ctx, sv.DownloadURL)
if err != nil { if err != nil {
return nil, fmt.Errorf("Error downloading remote state: %v", err) return nil, fmt.Errorf("Error downloading state: %v", err)
} }
// If the state is empty, then return nil. // If the state is empty, then return nil.
@ -79,7 +80,8 @@ func (r *remoteClient) Put(state []byte) error {
// Create the new state. // Create the new state.
_, err = r.client.StateVersions.Create(ctx, r.workspace.ID, options) _, err = r.client.StateVersions.Create(ctx, r.workspace.ID, options)
if err != nil { if err != nil {
return fmt.Errorf("Error creating remote state: %v", err) r.stateUploadErr = true
return fmt.Errorf("Error uploading state: %v", err)
} }
return nil return nil
@ -106,6 +108,9 @@ func (r *remoteClient) Lock(info *state.LockInfo) (string, error) {
Reason: tfe.String("Locked by Terraform"), Reason: tfe.String("Locked by Terraform"),
}) })
if err != nil { if err != nil {
if err == tfe.ErrWorkspaceLocked {
err = fmt.Errorf("%s (lock ID: \"%s/%s\")", err, r.organization, r.workspace.Name)
}
lockErr.Err = err lockErr.Err = err
return "", lockErr return "", lockErr
} }
@ -119,6 +124,13 @@ func (r *remoteClient) Lock(info *state.LockInfo) (string, error) {
func (r *remoteClient) Unlock(id string) error { func (r *remoteClient) Unlock(id string) error {
ctx := context.Background() ctx := context.Background()
// We first check if there was an error while uploading the latest
// state. If so, we will not unlock the workspace to prevent any
// changes from being applied until the correct state is uploaded.
if r.stateUploadErr {
return nil
}
lockErr := &state.LockError{Info: r.lockInfo} lockErr := &state.LockError{Info: r.lockInfo}
// With lock info this should be treated as a normal unlock. // With lock info this should be treated as a normal unlock.
@ -141,7 +153,12 @@ func (r *remoteClient) Unlock(id string) error {
// Verify the optional force-unlock lock ID. // Verify the optional force-unlock lock ID.
if r.organization+"/"+r.workspace.Name != id { if r.organization+"/"+r.workspace.Name != id {
lockErr.Err = fmt.Errorf("lock ID does not match existing lock") lockErr.Err = fmt.Errorf(
"lock ID %q does not match existing lock ID \"%s/%s\"",
id,
r.organization,
r.workspace.Name,
)
return lockErr return lockErr
} }

View File

@ -0,0 +1,6 @@
+---------+------+-----+-------------+----------------------+
| PRODUCT | NAME | SKU | DESCRIPTION | DELTA |
+---------+------+-----+-------------+----------------------+
+---------+------+-----+-------------+----------------------+
| TOTAL | $0.000 USD / 720 HRS |
+---------+------+-----+-------------+----------------------+

View File

@ -0,0 +1 @@
resource "null_resource" "foo" {}

View File

@ -0,0 +1,21 @@
Terraform v0.11.7
Configuring remote state backend...
Initializing Terraform configuration...
Refreshing Terraform state in-memory prior to plan...
The refreshed state will be used to calculate this plan, but will not be
persisted to local or remote state storage.
------------------------------------------------------------------------
An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
+ null_resource.foo
id: <computed>
Plan: 1 to add, 0 to change, 0 to destroy.

View File

@ -115,6 +115,7 @@ func testBackend(t *testing.T, obj cty.Value) (*Remote, func()) {
b.CLI = cli.NewMockUi() b.CLI = cli.NewMockUi()
b.client.Applies = mc.Applies b.client.Applies = mc.Applies
b.client.ConfigurationVersions = mc.ConfigurationVersions b.client.ConfigurationVersions = mc.ConfigurationVersions
b.client.CostEstimations = mc.CostEstimations
b.client.Organizations = mc.Organizations b.client.Organizations = mc.Organizations
b.client.Plans = mc.Plans b.client.Plans = mc.Plans
b.client.PolicyChecks = mc.PolicyChecks b.client.PolicyChecks = mc.PolicyChecks

View File

@ -42,79 +42,65 @@ func dataSourceRemoteStateGetSchema() providers.Schema {
} }
} }
func dataSourceRemoteStateRead(d *cty.Value) (cty.Value, tfdiags.Diagnostics) { func dataSourceRemoteStateValidate(cfg cty.Value) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics var diags tfdiags.Diagnostics
// Getting the backend implicitly validates the configuration for it,
// but we can only do that if it's all known already.
if cfg.GetAttr("config").IsWhollyKnown() && cfg.GetAttr("backend").IsKnown() {
_, moreDiags := getBackend(cfg)
diags = diags.Append(moreDiags)
} else {
// Otherwise we'll just type-check the config object itself.
configTy := cfg.GetAttr("config").Type()
if configTy != cty.DynamicPseudoType && !(configTy.IsObjectType() || configTy.IsMapType()) {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid backend configuration",
"The configuration must be an object value.",
cty.GetAttrPath("config"),
))
}
}
{
defaultsTy := cfg.GetAttr("defaults").Type()
if defaultsTy != cty.DynamicPseudoType && !(defaultsTy.IsObjectType() || defaultsTy.IsMapType()) {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid default values",
"Defaults must be given in an object value.",
cty.GetAttrPath("defaults"),
))
}
}
return diags
}
func dataSourceRemoteStateRead(d cty.Value) (cty.Value, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
b, moreDiags := getBackend(d)
diags = diags.Append(moreDiags)
if diags.HasErrors() {
return cty.NilVal, diags
}
newState := make(map[string]cty.Value) newState := make(map[string]cty.Value)
newState["backend"] = d.GetAttr("backend") newState["backend"] = d.GetAttr("backend")
newState["config"] = d.GetAttr("config")
backendType := d.GetAttr("backend").AsString() workspaceName := backend.DefaultStateName
// Don't break people using the old _local syntax - but note warning above
if backendType == "_local" {
log.Println(`[INFO] Switching old (unsupported) backend "_local" to "local"`)
backendType = "local"
}
// Create the client to access our remote state
log.Printf("[DEBUG] Initializing remote state backend: %s", backendType)
f := backendInit.Backend(backendType)
if f == nil {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid backend configuration",
fmt.Sprintf("Unknown backend type: %s", backendType),
cty.Path(nil).GetAttr("backend"),
))
return cty.NilVal, diags
}
b := f()
config := d.GetAttr("config")
if config.IsNull() {
// We'll treat this as an empty configuration and see if the backend's
// schema and validation code will accept it.
config = cty.EmptyObjectVal
}
newState["config"] = config
schema := b.ConfigSchema()
// Try to coerce the provided value into the desired configuration type.
configVal, err := schema.CoerceValue(config)
if err != nil {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid backend configuration",
fmt.Sprintf("The given configuration is not valid for backend %q: %s.", backendType,
tfdiags.FormatError(err)),
cty.Path(nil).GetAttr("config"),
))
return cty.NilVal, diags
}
newVal, validateDiags := b.PrepareConfig(configVal)
diags = diags.Append(validateDiags)
if validateDiags.HasErrors() {
return cty.NilVal, diags
}
configVal = newVal
configureDiags := b.Configure(configVal)
if configureDiags.HasErrors() {
diags = diags.Append(configureDiags.Err())
return cty.NilVal, diags
}
name := backend.DefaultStateName
if workspaceVal := d.GetAttr("workspace"); !workspaceVal.IsNull() { if workspaceVal := d.GetAttr("workspace"); !workspaceVal.IsNull() {
newState["workspace"] = workspaceVal newState["workspace"] = workspaceVal
name = workspaceVal.AsString() workspaceName = workspaceVal.AsString()
} }
newState["workspace"] = cty.StringVal(name) newState["workspace"] = cty.StringVal(workspaceName)
state, err := b.StateMgr(name) state, err := b.StateMgr(workspaceName)
if err != nil { if err != nil {
diags = diags.Append(tfdiags.AttributeValue( diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error, tfdiags.Error,
@ -165,3 +151,69 @@ func dataSourceRemoteStateRead(d *cty.Value) (cty.Value, tfdiags.Diagnostics) {
return cty.ObjectVal(newState), diags return cty.ObjectVal(newState), diags
} }
func getBackend(cfg cty.Value) (backend.Backend, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
backendType := cfg.GetAttr("backend").AsString()
// Don't break people using the old _local syntax - but note warning above
if backendType == "_local" {
log.Println(`[INFO] Switching old (unsupported) backend "_local" to "local"`)
backendType = "local"
}
// Create the client to access our remote state
log.Printf("[DEBUG] Initializing remote state backend: %s", backendType)
f := backendInit.Backend(backendType)
if f == nil {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid backend configuration",
fmt.Sprintf("There is no backend type named %q.", backendType),
cty.Path(nil).GetAttr("backend"),
))
return nil, diags
}
b := f()
config := cfg.GetAttr("config")
if config.IsNull() {
// We'll treat this as an empty configuration and see if the backend's
// schema and validation code will accept it.
config = cty.EmptyObjectVal
}
if config.Type().IsMapType() { // The code below expects an object type, so we'll convert
config = cty.ObjectVal(config.AsValueMap())
}
schema := b.ConfigSchema()
// Try to coerce the provided value into the desired configuration type.
configVal, err := schema.CoerceValue(config)
if err != nil {
diags = diags.Append(tfdiags.AttributeValue(
tfdiags.Error,
"Invalid backend configuration",
fmt.Sprintf("The given configuration is not valid for backend %q: %s.", backendType,
tfdiags.FormatError(err)),
cty.Path(nil).GetAttr("config"),
))
return nil, diags
}
newVal, validateDiags := b.PrepareConfig(configVal)
diags = diags.Append(validateDiags)
if validateDiags.HasErrors() {
return nil, diags
}
configVal = newVal
configureDiags := b.Configure(configVal)
if configureDiags.HasErrors() {
diags = diags.Append(configureDiags.Err())
return nil, diags
}
return b, diags
}

View File

@ -1,6 +1,7 @@
package terraform package terraform
import ( import (
"github.com/hashicorp/terraform/tfdiags"
"testing" "testing"
"github.com/apparentlymart/go-dump/dump" "github.com/apparentlymart/go-dump/dump"
@ -138,6 +139,80 @@ func TestState_basic(t *testing.T) {
}), }),
true, true,
}, },
"wrong type for config": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.StringVal("nope"),
}),
cty.NilVal,
true,
},
"wrong type for config with unknown backend": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.UnknownVal(cty.String),
"config": cty.StringVal("nope"),
}),
cty.NilVal,
true,
},
"wrong type for config with unknown config": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.UnknownVal(cty.String),
}),
cty.NilVal,
true,
},
"wrong type for defaults": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.ObjectVal(map[string]cty.Value{
"path": cty.StringVal("./test-fixtures/basic.tfstate"),
}),
"defaults": cty.StringVal("nope"),
}),
cty.NilVal,
true,
},
"config as map": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.MapVal(map[string]cty.Value{
"path": cty.StringVal("./test-fixtures/empty.tfstate"),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.MapVal(map[string]cty.Value{
"path": cty.StringVal("./test-fixtures/empty.tfstate"),
}),
"defaults": cty.NullVal(cty.DynamicPseudoType),
"outputs": cty.EmptyObjectVal,
"workspace": cty.StringVal(backend.DefaultStateName),
}),
false,
},
"defaults as map": {
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.ObjectVal(map[string]cty.Value{
"path": cty.StringVal("./test-fixtures/basic.tfstate"),
}),
"defaults": cty.MapValEmpty(cty.String),
}),
cty.ObjectVal(map[string]cty.Value{
"backend": cty.StringVal("local"),
"config": cty.ObjectVal(map[string]cty.Value{
"path": cty.StringVal("./test-fixtures/basic.tfstate"),
}),
"defaults": cty.MapValEmpty(cty.String),
"outputs": cty.ObjectVal(map[string]cty.Value{
"foo": cty.StringVal("bar"),
}),
"workspace": cty.StringVal(backend.DefaultStateName),
}),
false,
},
} }
for name, test := range tests { for name, test := range tests {
t.Run(name, func(t *testing.T) { t.Run(name, func(t *testing.T) {
@ -146,7 +221,15 @@ func TestState_basic(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("unexpected error: %s", err) t.Fatalf("unexpected error: %s", err)
} }
got, diags := dataSourceRemoteStateRead(&config)
diags := dataSourceRemoteStateValidate(config)
var got cty.Value
if !diags.HasErrors() && config.IsWhollyKnown() {
var moreDiags tfdiags.Diagnostics
got, moreDiags = dataSourceRemoteStateRead(config)
diags = diags.Append(moreDiags)
}
if test.Err { if test.Err {
if !diags.HasErrors() { if !diags.HasErrors() {
@ -156,8 +239,8 @@ func TestState_basic(t *testing.T) {
t.Fatalf("unexpected errors: %s", diags.Err()) t.Fatalf("unexpected errors: %s", diags.Err())
} }
if !test.Want.RawEquals(got) { if test.Want != cty.NilVal && !test.Want.RawEquals(got) {
t.Errorf("wrong result\nconfig: %sgot: %swant: %s", dump.Value(config), dump.Value(got), dump.Value(test.Want)) t.Errorf("wrong result\nconfig: %sgot: %swant: %s", dump.Value(config), dump.Value(got), dump.Value(test.Want))
} }
}) })
} }

View File

@ -40,11 +40,21 @@ func (p *Provider) PrepareProviderConfig(req providers.PrepareProviderConfigRequ
} }
// ValidateDataSourceConfig is used to validate the data source configuration values. // ValidateDataSourceConfig is used to validate the data source configuration values.
func (p *Provider) ValidateDataSourceConfig(providers.ValidateDataSourceConfigRequest) providers.ValidateDataSourceConfigResponse { func (p *Provider) ValidateDataSourceConfig(req providers.ValidateDataSourceConfigRequest) providers.ValidateDataSourceConfigResponse {
// FIXME: move the backend configuration validate call that's currently // FIXME: move the backend configuration validate call that's currently
// inside the read method into here so that we can catch provider configuration // inside the read method into here so that we can catch provider configuration
// errors in terraform validate as well as during terraform plan. // errors in terraform validate as well as during terraform plan.
var res providers.ValidateDataSourceConfigResponse var res providers.ValidateDataSourceConfigResponse
// This should not happen
if req.TypeName != "terraform_remote_state" {
res.Diagnostics.Append(fmt.Errorf("Error: unsupported data source %s", req.TypeName))
return res
}
diags := dataSourceRemoteStateValidate(req.Config)
res.Diagnostics = diags
return res return res
} }
@ -67,7 +77,7 @@ func (p *Provider) ReadDataSource(req providers.ReadDataSourceRequest) providers
return res return res
} }
newState, diags := dataSourceRemoteStateRead(&req.Config) newState, diags := dataSourceRemoteStateRead(req.Config)
res.State = newState res.State = newState
res.Diagnostics = diags res.Diagnostics = diags

View File

@ -239,3 +239,53 @@ data "test_data_source" "two" {
}, },
}) })
} }
func TestDataSource_planUpdate(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: strings.TrimSpace(`
resource "test_resource" "a" {
required = "first"
required_map = {
key = "1"
}
optional_force_new = "first"
}
data "test_data_source" "a" {
input = "${test_resource.a.computed_from_required}"
}
output "out" {
value = "${data.test_data_source.a.output}"
}
`),
},
{
Config: strings.TrimSpace(`
resource "test_resource" "a" {
required = "second"
required_map = {
key = "1"
}
optional_force_new = "second"
}
data "test_data_source" "a" {
input = "${test_resource.a.computed_from_required}"
}
output "out" {
value = "${data.test_data_source.a.output}"
}
`),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("data.test_data_source.a", "output", "second"),
resource.TestCheckOutput("out", "second"),
),
},
},
})
}

View File

@ -4,6 +4,7 @@ import (
"reflect" "reflect"
"testing" "testing"
"github.com/davecgh/go-spew/spew"
"github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/terraform" "github.com/hashicorp/terraform/terraform"
) )
@ -130,12 +131,12 @@ func TestDiffApply_set(t *testing.T) {
"id": "testID", "id": "testID",
} }
attrs, err := diff.Apply(priorAttrs, schema.LegacyResourceSchema(&schema.Resource{Schema: resSchema}).CoreConfigSchema()) attrs, err := diff.Apply(priorAttrs, (&schema.Resource{Schema: resSchema}).CoreConfigSchema())
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if !reflect.DeepEqual(attrs, expected) { if !reflect.DeepEqual(attrs, expected) {
t.Fatalf("\nexpected: %#v\ngot: %#v\n", expected, attrs) t.Fatalf("wrong result\ngot: %s\nwant: %s\n", spew.Sdump(attrs), spew.Sdump(expected))
} }
} }

View File

@ -20,6 +20,7 @@ func Provider() terraform.ResourceProvider {
"test_resource": testResource(), "test_resource": testResource(),
"test_resource_gh12183": testResourceGH12183(), "test_resource_gh12183": testResourceGH12183(),
"test_resource_import_other": testResourceImportOther(), "test_resource_import_other": testResourceImportOther(),
"test_resource_import_removed": testResourceImportRemoved(),
"test_resource_with_custom_diff": testResourceCustomDiff(), "test_resource_with_custom_diff": testResourceCustomDiff(),
"test_resource_timeout": testResourceTimeout(), "test_resource_timeout": testResourceTimeout(),
"test_resource_diff_suppress": testResourceDiffSuppress(), "test_resource_diff_suppress": testResourceDiffSuppress(),
@ -35,6 +36,7 @@ func Provider() terraform.ResourceProvider {
"test_resource_computed_set": testResourceComputedSet(), "test_resource_computed_set": testResourceComputedSet(),
"test_resource_config_mode": testResourceConfigMode(), "test_resource_config_mode": testResourceConfigMode(),
"test_resource_nested_id": testResourceNestedId(), "test_resource_nested_id": testResourceNestedId(),
"test_undeleteable": testResourceUndeleteable(),
}, },
DataSourcesMap: map[string]*schema.Resource{ DataSourcesMap: map[string]*schema.Resource{
"test_data_source": testDataSource(), "test_data_source": testDataSource(),

View File

@ -54,6 +54,11 @@ func testResource() *schema.Resource {
Computed: true, Computed: true,
ForceNew: true, ForceNew: true,
}, },
"optional_computed": {
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"computed_read_only": { "computed_read_only": {
Type: schema.TypeString, Type: schema.TypeString,
Computed: true, Computed: true,
@ -141,6 +146,17 @@ func testResource() *schema.Resource {
Computed: true, Computed: true,
Description: "copied the required field during apply, and plans computed when changed", Description: "copied the required field during apply, and plans computed when changed",
}, },
// this should return unset from GetOkExists
"get_ok_exists_false": {
Type: schema.TypeBool,
Computed: true,
Optional: true,
Description: "do not set in config",
},
"int": {
Type: schema.TypeInt,
Optional: true,
},
}, },
} }
} }
@ -186,6 +202,12 @@ func testResourceRead(d *schema.ResourceData, meta interface{}) error {
d.Set("set", []interface{}{}) d.Set("set", []interface{}{})
} }
// This should not show as set unless it's set in the config
_, ok := d.GetOkExists("get_ok_exists_false")
if ok {
return errors.New("get_ok_exists_false should not be set")
}
return nil return nil
} }

View File

@ -18,6 +18,7 @@ func testResourceConfigMode() *schema.Resource {
Type: schema.TypeList, Type: schema.TypeList,
ConfigMode: schema.SchemaConfigModeAttr, ConfigMode: schema.SchemaConfigModeAttr,
Optional: true, Optional: true,
Computed: true,
Elem: &schema.Resource{ Elem: &schema.Resource{
Schema: map[string]*schema.Schema{ Schema: map[string]*schema.Schema{
"foo": { "foo": {
@ -27,17 +28,20 @@ func testResourceConfigMode() *schema.Resource {
}, },
}, },
}, },
"resource_as_attr_dynamic": { "nested_set": {
Type: schema.TypeList, Type: schema.TypeSet,
ConfigMode: schema.SchemaConfigModeAttr, Optional: true,
SkipCoreTypeCheck: true, ConfigMode: schema.SchemaConfigModeAttr,
Optional: true,
Elem: &schema.Resource{ Elem: &schema.Resource{
Schema: map[string]*schema.Schema{ Schema: map[string]*schema.Schema{
"foo": { "value": {
Type: schema.TypeString, Type: schema.TypeString,
Optional: true, Optional: true,
Default: "default", },
"set": {
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
}, },
}, },
}, },
@ -52,14 +56,12 @@ func testResourceConfigModeCreate(d *schema.ResourceData, meta interface{}) erro
} }
func testResourceConfigModeRead(d *schema.ResourceData, meta interface{}) error { func testResourceConfigModeRead(d *schema.ResourceData, meta interface{}) error {
for _, k := range []string{"resource_as_attr", "resource_as_attr_dynamic"} { if l, ok := d.Get("resource_as_attr").([]interface{}); !ok {
if l, ok := d.Get(k).([]interface{}); !ok { return fmt.Errorf("resource_as_attr should appear as []interface{}, not %T", l)
return fmt.Errorf("%s should appear as []interface{}, not %T", k, l) } else {
} else { for i, item := range l {
for i, item := range l { if _, ok := item.(map[string]interface{}); !ok {
if _, ok := item.(map[string]interface{}); !ok { return fmt.Errorf("resource_as_attr[%d] should appear as map[string]interface{}, not %T", i, item)
return fmt.Errorf("%s[%d] should appear as map[string]interface{}, not %T", k, i, item)
}
} }
} }
} }

View File

@ -23,22 +23,12 @@ resource "test_resource_config_mode" "foo" {
foo = "resource_as_attr 1" foo = "resource_as_attr 1"
}, },
] ]
resource_as_attr_dynamic = [
{
foo = "resource_as_attr_dynamic 0"
},
{
},
]
} }
`), `),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.#", "2"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.0.foo", "resource_as_attr_dynamic 0"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.1.foo", "default"),
), ),
}, },
resource.TestStep{ resource.TestStep{
@ -58,22 +48,12 @@ resource "test_resource_config_mode" "foo" {
resource_as_attr { resource_as_attr {
foo = "resource_as_attr 1" foo = "resource_as_attr 1"
} }
resource_as_attr_dynamic = [
{
foo = "resource_as_attr_dynamic 0"
},
{
},
]
} }
`), `),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.#", "2"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.0.foo", "resource_as_attr_dynamic 0"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.1.foo", "default"),
), ),
}, },
resource.TestStep{ resource.TestStep{
@ -84,29 +64,21 @@ resource "test_resource_config_mode" "foo" {
foo = "resource_as_attr 0 updated" foo = "resource_as_attr 0 updated"
}, },
] ]
resource_as_attr_dynamic = [
{
},
]
} }
`), `),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "1"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "1"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0 updated"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0 updated"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.#", "1"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.0.foo", "default"),
), ),
}, },
resource.TestStep{ resource.TestStep{
Config: strings.TrimSpace(` Config: strings.TrimSpace(`
resource "test_resource_config_mode" "foo" { resource "test_resource_config_mode" "foo" {
resource_as_attr = [] resource_as_attr = []
resource_as_attr_dynamic = []
} }
`), `),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "0"), resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "0"),
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.#", "0"),
), ),
}, },
resource.TestStep{ resource.TestStep{
@ -116,9 +88,33 @@ resource "test_resource_config_mode" "foo" {
`), `),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckNoResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#"), resource.TestCheckNoResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#"),
resource.TestCheckNoResourceAttr("test_resource_config_mode.foo", "resource_as_attr_dynamic.#"),
), ),
}, },
}, },
}) })
} }
func TestResourceConfigMode_nestedSet(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_config_mode" "foo" {
resource_as_attr = []
nested_set {
value = "a"
}
nested_set {
value = "b"
set = []
}
}
`),
Check: resource.ComposeTestCheckFunc(),
},
},
})
}

View File

@ -0,0 +1,60 @@
package test
import (
"github.com/hashicorp/terraform/helper/schema"
)
func testResourceImportRemoved() *schema.Resource {
return &schema.Resource{
Create: testResourceImportRemovedCreate,
Read: testResourceImportRemovedRead,
Delete: testResourceImportRemovedDelete,
Update: testResourceImportRemovedUpdate,
Importer: &schema.ResourceImporter{
State: testResourceImportRemovedImportState,
},
Schema: map[string]*schema.Schema{
"removed": {
Type: schema.TypeInt,
Optional: true,
Computed: true,
Removed: "do not use",
},
},
}
}
func testResourceImportRemovedImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
var results []*schema.ResourceData
results = append(results, d)
{
other := testResourceDefaults()
od := other.Data(nil)
od.SetType("test_resource_import_removed")
od.SetId("foo")
results = append(results, od)
}
return results, nil
}
func testResourceImportRemovedCreate(d *schema.ResourceData, meta interface{}) error {
d.SetId("foo")
return testResourceImportRemovedRead(d, meta)
}
func testResourceImportRemovedUpdate(d *schema.ResourceData, meta interface{}) error {
return testResourceImportRemovedRead(d, meta)
}
func testResourceImportRemovedRead(d *schema.ResourceData, meta interface{}) error {
return nil
}
func testResourceImportRemovedDelete(d *schema.ResourceData, meta interface{}) error {
return nil
}

View File

@ -0,0 +1,41 @@
package test
import (
"strings"
"testing"
"github.com/hashicorp/terraform/helper/resource"
)
func TestResourceImportRemoved(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_import_removed" "foo" {
}
`),
},
{
ImportState: true,
ResourceName: "test_resource_import_removed.foo",
// This is attempting to guard against regressions of:
// https://github.com/hashicorp/terraform/issues/20985
//
// Removed attributes are generally not populated during Create,
// Update, Read, or Import by provider code but due to our
// legacy diff format being lossy they end up getting populated
// with zero values during shimming in all cases except Import,
// which doesn't go through a diff.
//
// This is testing that the shimming inconsistency won't cause
// ImportStateVerify failures for these, since we now ignore
// attributes marked as Removed when comparing.
ImportStateVerify: true,
},
},
})
}

View File

@ -95,7 +95,19 @@ func testResourceList() *schema.Resource {
Computed: true, Computed: true,
Elem: &schema.Schema{Type: schema.TypeString}, Elem: &schema.Schema{Type: schema.TypeString},
}, },
"min_items": {
Type: schema.TypeList,
Optional: true,
MinItems: 2,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"val": {
Type: schema.TypeString,
Required: true,
},
},
},
},
"never_set": { "never_set": {
Type: schema.TypeList, Type: schema.TypeList,
MaxItems: 1, MaxItems: 1,

View File

@ -1,6 +1,7 @@
package test package test
import ( import (
"regexp"
"strings" "strings"
"testing" "testing"
@ -447,3 +448,88 @@ resource "test_resource_list" "bar" {
}, },
}) })
} }
func TestResourceList_dynamicList(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_list" "a" {
dependent_list {
val = "a"
}
dependent_list {
val = "b"
}
}
resource "test_resource_list" "b" {
list_block {
string = "constant"
}
dynamic "list_block" {
for_each = test_resource_list.a.computed_list
content {
string = list_block.value
}
}
}
`),
Check: resource.ComposeTestCheckFunc(),
},
},
})
}
func TestResourceList_dynamicMinItems(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
variable "a" {
type = list(number)
default = [1]
}
resource "test_resource_list" "b" {
dynamic "min_items" {
for_each = var.a
content {
val = "foo"
}
}
}
`),
ExpectError: regexp.MustCompile(`attribute supports 2`),
},
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_list" "a" {
dependent_list {
val = "a"
}
dependent_list {
val = "b"
}
}
resource "test_resource_list" "b" {
list_block {
string = "constant"
}
dynamic "min_items" {
for_each = test_resource_list.a.computed_list
content {
val = min_items.value
}
}
}
`),
},
},
})
}

View File

@ -3,6 +3,7 @@ package test
import ( import (
"fmt" "fmt"
"github.com/hashicorp/terraform/config"
"github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/schema"
) )
@ -22,6 +23,25 @@ func testResourceMap() *schema.Resource {
Type: schema.TypeMap, Type: schema.TypeMap,
Optional: true, Optional: true,
Elem: &schema.Schema{Type: schema.TypeString}, Elem: &schema.Schema{Type: schema.TypeString},
ValidateFunc: func(v interface{}, _ string) ([]string, []error) {
errs := []error{}
for k, v := range v.(map[string]interface{}) {
if v == config.UnknownVariableValue {
errs = append(errs, fmt.Errorf("unknown value in ValidateFunc: %q=%q", k, v))
}
}
return nil, errs
},
},
"map_values": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"computed_map": {
Type: schema.TypeMap,
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
}, },
}, },
} }
@ -35,15 +55,20 @@ func testResourceMapCreate(d *schema.ResourceData, meta interface{}) error {
} }
d.SetId("testId") d.SetId("testId")
return nil return testResourceMapRead(d, meta)
} }
func testResourceMapRead(d *schema.ResourceData, meta interface{}) error { func testResourceMapRead(d *schema.ResourceData, meta interface{}) error {
var computedMap map[string]interface{}
if v, ok := d.GetOk("map_values"); ok {
computedMap = v.(map[string]interface{})
}
d.Set("computed_map", computedMap)
return nil return nil
} }
func testResourceMapUpdate(d *schema.ResourceData, meta interface{}) error { func testResourceMapUpdate(d *schema.ResourceData, meta interface{}) error {
return nil return testResourceMapRead(d, meta)
} }
func testResourceMapDelete(d *schema.ResourceData, meta interface{}) error { func testResourceMapDelete(d *schema.ResourceData, meta interface{}) error {

View File

@ -30,3 +30,109 @@ resource "test_resource_map" "foobar" {
}, },
}) })
} }
func TestResourceMap_basicWithVars(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: `
variable "a" {
default = "a"
}
variable "b" {
default = "b"
}
resource "test_resource_map" "foobar" {
name = "test"
map_of_three = {
one = var.a
two = var.b
empty = ""
}
}`,
Check: resource.ComposeTestCheckFunc(),
},
},
})
}
func TestResourceMap_computedMap(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
{
Config: `
resource "test_resource_map" "foobar" {
name = "test"
map_of_three = {
one = "one"
two = "two"
empty = ""
}
map_values = {
a = "1"
b = "2"
}
}`,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
"test_resource_map.foobar", "computed_map.a", "1",
),
resource.TestCheckResourceAttr(
"test_resource_map.foobar", "computed_map.b", "2",
),
),
},
{
Config: `
resource "test_resource_map" "foobar" {
name = "test"
map_of_three = {
one = "one"
two = "two"
empty = ""
}
map_values = {
a = "3"
b = "4"
}
}`,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
"test_resource_map.foobar", "computed_map.a", "3",
),
resource.TestCheckResourceAttr(
"test_resource_map.foobar", "computed_map.b", "4",
),
),
},
{
Config: `
resource "test_resource_map" "foobar" {
name = "test"
map_of_three = {
one = "one"
two = "two"
empty = ""
}
map_values = {
a = "3"
}
}`,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
"test_resource_map.foobar", "computed_map.a", "3",
),
resource.TestCheckNoResourceAttr(
"test_resource_map.foobar", "computed_map.b",
),
),
},
},
})
}

View File

@ -973,3 +973,80 @@ resource "test_resource" "bar" {
}, },
}) })
} }
func TestResource_optionalComputedBool(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource" "foo" {
required = "yep"
required_map = {
key = "value"
}
}
`),
},
},
})
}
func TestResource_replacedOptionalComputed(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_nested" "a" {
}
resource "test_resource" "foo" {
required = "yep"
required_map = {
key = "value"
}
optional_computed = test_resource_nested.a.id
}
`),
},
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_nested" "b" {
}
resource "test_resource" "foo" {
required = "yep"
required_map = {
key = "value"
}
optional_computed = test_resource_nested.b.id
}
`),
},
},
})
}
func TestResource_floatInIntAttr(t *testing.T) {
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource" "foo" {
required = "yep"
required_map = {
key = "value"
}
int = 40.2
}
`),
ExpectError: regexp.MustCompile(`must be a whole number, got 40.2`),
},
},
})
}

View File

@ -27,6 +27,27 @@ resource "test_resource_timeout" "foo" {
}, },
}) })
} }
func TestResourceTimeout_delete(t *testing.T) {
// If the delete timeout isn't saved until destroy, the cleanup here will
// fail because the default is only 20m.
resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders,
CheckDestroy: testAccCheckResourceDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: strings.TrimSpace(`
resource "test_resource_timeout" "foo" {
delete_delay = "25m"
timeouts {
delete = "30m"
}
}
`),
},
},
})
}
func TestResourceTimeout_update(t *testing.T) { func TestResourceTimeout_update(t *testing.T) {
resource.UnitTest(t, resource.TestCase{ resource.UnitTest(t, resource.TestCase{
Providers: testAccProviders, Providers: testAccProviders,

View File

@ -0,0 +1,30 @@
package test
import (
"fmt"
"github.com/hashicorp/terraform/helper/schema"
)
func testResourceUndeleteable() *schema.Resource {
return &schema.Resource{
Create: testResourceUndeleteableCreate,
Read: testResourceUndeleteableRead,
Delete: testResourceUndeleteableDelete,
Schema: map[string]*schema.Schema{},
}
}
func testResourceUndeleteableCreate(d *schema.ResourceData, meta interface{}) error {
d.SetId("placeholder")
return testResourceUndeleteableRead(d, meta)
}
func testResourceUndeleteableRead(d *schema.ResourceData, meta interface{}) error {
return nil
}
func testResourceUndeleteableDelete(d *schema.ResourceData, meta interface{}) error {
return fmt.Errorf("test_undeleteable always fails deletion (use terraform state rm if you really want to delete it)")
}

View File

@ -15,6 +15,7 @@ import (
"time" "time"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/mitchellh/cli" "github.com/mitchellh/cli"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
@ -677,6 +678,9 @@ func TestApply_plan_remoteState(t *testing.T) {
"username": cty.NullVal(cty.String), "username": cty.NullVal(cty.String),
"password": cty.NullVal(cty.String), "password": cty.NullVal(cty.String),
"skip_cert_verification": cty.NullVal(cty.Bool), "skip_cert_verification": cty.NullVal(cty.Bool),
"retry_max": cty.NullVal(cty.String),
"retry_wait_min": cty.NullVal(cty.String),
"retry_wait_max": cty.NullVal(cty.String),
}) })
backendConfigRaw, err := plans.NewDynamicValue(backendConfig, backendConfig.Type()) backendConfigRaw, err := plans.NewDynamicValue(backendConfig, backendConfig.Type())
if err != nil { if err != nil {
@ -1389,7 +1393,7 @@ func TestApply_backup(t *testing.T) {
actual := backupState.RootModule().Resources["test_instance.foo"] actual := backupState.RootModule().Resources["test_instance.foo"]
expected := originalState.RootModule().Resources["test_instance.foo"] expected := originalState.RootModule().Resources["test_instance.foo"]
if !cmp.Equal(actual, expected) { if !cmp.Equal(actual, expected, cmpopts.EquateEmpty()) {
t.Fatalf( t.Fatalf(
"wrong aws_instance.foo state\n%s", "wrong aws_instance.foo state\n%s",
cmp.Diff(expected, actual, cmp.Transformer("bytesAsString", func(b []byte) string { cmp.Diff(expected, actual, cmp.Transformer("bytesAsString", func(b []byte) string {

View File

@ -7,8 +7,6 @@ import (
"encoding/json" "encoding/json"
"flag" "flag"
"fmt" "fmt"
"github.com/hashicorp/terraform/internal/initwd"
"github.com/hashicorp/terraform/registry"
"io" "io"
"io/ioutil" "io/ioutil"
"log" "log"
@ -21,6 +19,9 @@ import (
"syscall" "syscall"
"testing" "testing"
"github.com/hashicorp/terraform/internal/initwd"
"github.com/hashicorp/terraform/registry"
"github.com/hashicorp/terraform/addrs" "github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs" "github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/configs/configload" "github.com/hashicorp/terraform/configs/configload"
@ -266,7 +267,10 @@ func testState() *states.State {
Type: "test", Type: "test",
}.Absolute(addrs.RootModuleInstance), }.Absolute(addrs.RootModuleInstance),
) )
}) // DeepCopy is used here to ensure our synthetic state matches exactly
// with a state that will have been copied during the command
// operation, and all fields have been copied correctly.
}).DeepCopy()
} }
// writeStateForTesting is a helper that writes the given naked state to the // writeStateForTesting is a helper that writes the given naked state to the

View File

@ -7,7 +7,6 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/davecgh/go-spew/spew"
"github.com/hashicorp/terraform/e2e" "github.com/hashicorp/terraform/e2e"
) )
@ -41,11 +40,11 @@ func TestPlanApplyInAutomation(t *testing.T) {
// Make sure we actually downloaded the plugins, rather than picking up // Make sure we actually downloaded the plugins, rather than picking up
// copies that might be already installed globally on the system. // copies that might be already installed globally on the system.
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"template") {
t.Errorf("template provider download message is missing from init output:\n%s", stdout) t.Errorf("template provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"null") {
t.Errorf("null provider download message is missing from init output:\n%s", stdout) t.Errorf("null provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
@ -71,14 +70,11 @@ func TestPlanApplyInAutomation(t *testing.T) {
t.Fatalf("failed to read plan file: %s", err) t.Fatalf("failed to read plan file: %s", err)
} }
stateResources := plan.State.RootModule().Resources // stateResources := plan.Changes.Resources
diffResources := plan.Diff.RootModule().Resources diffResources := plan.Changes.Resources
if len(stateResources) != 1 || stateResources["data.template_file.test"] == nil { if len(diffResources) != 1 || diffResources[0].Addr.String() != "null_resource.test" {
t.Errorf("incorrect state in plan; want just data.template_file.test to have been rendered, but have:\n%s", spew.Sdump(stateResources)) t.Errorf("incorrect number of resources in plan")
}
if len(diffResources) != 1 || diffResources["null_resource.test"] == nil {
t.Errorf("incorrect diff in plan; want just null_resource.test to have been rendered, but have:\n%s", spew.Sdump(diffResources))
} }
//// APPLY //// APPLY
@ -96,9 +92,9 @@ func TestPlanApplyInAutomation(t *testing.T) {
t.Fatalf("failed to read state file: %s", err) t.Fatalf("failed to read state file: %s", err)
} }
stateResources = state.RootModule().Resources stateResources := state.RootModule().Resources
var gotResources []string var gotResources []string
for n := range stateResources { for n, _ := range stateResources {
gotResources = append(gotResources, n) gotResources = append(gotResources, n)
} }
sort.Strings(gotResources) sort.Strings(gotResources)
@ -139,11 +135,11 @@ func TestAutoApplyInAutomation(t *testing.T) {
// Make sure we actually downloaded the plugins, rather than picking up // Make sure we actually downloaded the plugins, rather than picking up
// copies that might be already installed globally on the system. // copies that might be already installed globally on the system.
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"template") {
t.Errorf("template provider download message is missing from init output:\n%s", stdout) t.Errorf("template provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"null") {
t.Errorf("null provider download message is missing from init output:\n%s", stdout) t.Errorf("null provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
@ -206,11 +202,11 @@ func TestPlanOnlyInAutomation(t *testing.T) {
// Make sure we actually downloaded the plugins, rather than picking up // Make sure we actually downloaded the plugins, rather than picking up
// copies that might be already installed globally on the system. // copies that might be already installed globally on the system.
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"template") {
t.Errorf("template provider download message is missing from init output:\n%s", stdout) t.Errorf("template provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"null") {
t.Errorf("null provider download message is missing from init output:\n%s", stdout) t.Errorf("null provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }

View File

@ -39,7 +39,7 @@ func TestInitProviders(t *testing.T) {
t.Errorf("success message is missing from output:\n%s", stdout) t.Errorf("success message is missing from output:\n%s", stdout)
} }
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"template\" (terraform-providers/template)") {
t.Errorf("provider download message is missing from output:\n%s", stdout) t.Errorf("provider download message is missing from output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
@ -112,10 +112,10 @@ func TestInitProviders_pluginCache(t *testing.T) {
stderr := cmd.Stderr.(*bytes.Buffer).String() stderr := cmd.Stderr.(*bytes.Buffer).String()
if stderr != "" { if stderr != "" {
t.Errorf("unexpected stderr output:\n%s", stderr) t.Errorf("unexpected stderr output:\n%s\n", stderr)
} }
path := fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-template_v0.1.0_x4", runtime.GOOS, runtime.GOARCH) path := fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-template_v2.1.0_x4", runtime.GOOS, runtime.GOARCH)
content, err := tf.ReadFile(path) content, err := tf.ReadFile(path)
if err != nil { if err != nil {
t.Fatalf("failed to read installed plugin from %s: %s", path, err) t.Fatalf("failed to read installed plugin from %s: %s", path, err)
@ -124,11 +124,11 @@ func TestInitProviders_pluginCache(t *testing.T) {
t.Errorf("template plugin was not installed from local cache") t.Errorf("template plugin was not installed from local cache")
} }
if !tf.FileExists(fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-null_v0.1.0_x4", runtime.GOOS, runtime.GOARCH)) { if !tf.FileExists(fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-null_v2.1.0_x4", runtime.GOOS, runtime.GOARCH)) {
t.Errorf("null plugin was not installed") t.Errorf("null plugin was not installed")
} }
if !tf.FileExists(fmt.Sprintf("cache/%s_%s/terraform-provider-null_v0.1.0_x4", runtime.GOOS, runtime.GOARCH)) { if !tf.FileExists(fmt.Sprintf("cache/%s_%s/terraform-provider-null_v2.1.0_x4", runtime.GOOS, runtime.GOARCH)) {
t.Errorf("null plugin is not in cache after install") t.Errorf("null plugin is not in cache after install")
} }
} }

View File

@ -56,10 +56,4 @@ func skipIfCannotAccessNetwork(t *testing.T) {
if !canAccessNetwork() { if !canAccessNetwork() {
t.Skip("network access not allowed; use TF_ACC=1 to enable") t.Skip("network access not allowed; use TF_ACC=1 to enable")
} }
// During the early part of the Terraform v0.12 release process, certain
// upstream resources are not yet ready to support it and so these
// tests cannot be run. These will be re-enabled prior to Terraform v0.12.0
// final.
t.Skip("all tests with external network access are temporarily disabled until upstream services are updated")
} }

View File

@ -38,11 +38,11 @@ func TestPrimarySeparatePlan(t *testing.T) {
// Make sure we actually downloaded the plugins, rather than picking up // Make sure we actually downloaded the plugins, rather than picking up
// copies that might be already installed globally on the system. // copies that might be already installed globally on the system.
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"template") {
t.Errorf("template provider download message is missing from init output:\n%s", stdout) t.Errorf("template provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") { if !strings.Contains(stdout, "- Downloading plugin for provider \"null") {
t.Errorf("null provider download message is missing from init output:\n%s", stdout) t.Errorf("null provider download message is missing from init output:\n%s", stdout)
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)") t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
} }
@ -69,13 +69,8 @@ func TestPrimarySeparatePlan(t *testing.T) {
t.Fatalf("failed to read plan file: %s", err) t.Fatalf("failed to read plan file: %s", err)
} }
stateResources := plan.State.RootModule().Resources diffResources := plan.Changes.Resources
diffResources := plan.Diff.RootModule().Resources if len(diffResources) != 1 || diffResources[0].Addr.String() != "null_resource.test" {
if len(stateResources) != 1 || stateResources["data.template_file.test"] == nil {
t.Errorf("incorrect state in plan; want just data.template_file.test to have been rendered, but have:\n%s", spew.Sdump(stateResources))
}
if len(diffResources) != 1 || diffResources["null_resource.test"] == nil {
t.Errorf("incorrect diff in plan; want just null_resource.test to have been rendered, but have:\n%s", spew.Sdump(diffResources)) t.Errorf("incorrect diff in plan; want just null_resource.test to have been rendered, but have:\n%s", spew.Sdump(diffResources))
} }
@ -94,9 +89,9 @@ func TestPrimarySeparatePlan(t *testing.T) {
t.Fatalf("failed to read state file: %s", err) t.Fatalf("failed to read state file: %s", err)
} }
stateResources = state.RootModule().Resources stateResources := state.RootModule().Resources
var gotResources []string var gotResources []string
for n := range stateResources { for n, _ := range stateResources {
gotResources = append(gotResources, n) gotResources = append(gotResources, n)
} }
sort.Strings(gotResources) sort.Strings(gotResources)

View File

@ -1,7 +1,7 @@
provider "template" { provider "template" {
version = "0.1.0" version = "2.1.0"
} }
provider "null" { provider "null" {
version = "0.1.0" version = "2.1.0"
} }

View File

@ -1101,8 +1101,8 @@ func ctySequenceDiff(old, new []cty.Value) []*plans.Change {
if lcsI < len(lcs) { if lcsI < len(lcs) {
ret = append(ret, &plans.Change{ ret = append(ret, &plans.Change{
Action: plans.NoOp, Action: plans.NoOp,
Before: new[newI], Before: lcs[lcsI],
After: new[newI], After: lcs[lcsI],
}) })
// All of our indexes advance together now, since the line // All of our indexes advance together now, since the line

View File

@ -2564,13 +2564,13 @@ func TestResourceChange_nestedSet(t *testing.T) {
~ ami = "ami-BEFORE" -> "ami-AFTER" ~ ami = "ami-BEFORE" -> "ami-AFTER"
id = "i-02ae66f368e8518a9" id = "i-02ae66f368e8518a9"
- root_block_device {
- volume_type = "gp2" -> null
}
+ root_block_device { + root_block_device {
+ new_field = "new_value" + new_field = "new_value"
+ volume_type = "gp2" + volume_type = "gp2"
} }
- root_block_device {
- volume_type = "gp2" -> null
}
} }
`, `,
}, },
@ -2624,12 +2624,12 @@ func TestResourceChange_nestedSet(t *testing.T) {
~ ami = "ami-BEFORE" -> "ami-AFTER" ~ ami = "ami-BEFORE" -> "ami-AFTER"
id = "i-02ae66f368e8518a9" id = "i-02ae66f368e8518a9"
- root_block_device { # forces replacement
- volume_type = "gp2" -> null
}
+ root_block_device { # forces replacement + root_block_device { # forces replacement
+ volume_type = "different" + volume_type = "different"
} }
- root_block_device { # forces replacement
- volume_type = "gp2" -> null
}
} }
`, `,
}, },
@ -3006,6 +3006,49 @@ func TestResourceChange_nestedMap(t *testing.T) {
- volume_type = "gp2" -> null - volume_type = "gp2" -> null
} }
} }
`,
},
"in-place sequence update - deletion": {
Action: plans.Update,
Mode: addrs.ManagedResourceMode,
Before: cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{"attr": cty.StringVal("x")}),
cty.ObjectVal(map[string]cty.Value{"attr": cty.StringVal("y")}),
}),
}),
After: cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{"attr": cty.StringVal("y")}),
cty.ObjectVal(map[string]cty.Value{"attr": cty.StringVal("z")}),
}),
}),
RequiredReplace: cty.NewPathSet(),
Tainted: false,
Schema: &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"list": {
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Required: true,
},
},
},
Nesting: configschema.NestingList,
},
},
},
ExpectedOutput: ` # test_instance.example will be updated in-place
~ resource "test_instance" "example" {
~ list {
~ attr = "x" -> "y"
}
~ list {
~ attr = "y" -> "z"
}
}
`, `,
}, },
} }

View File

@ -45,6 +45,12 @@ func (c *GraphCommand) Run(args []string) int {
return 1 return 1
} }
// Check for user-supplied plugin path
if c.pluginPath, err = c.loadPluginPath(); err != nil {
c.Ui.Error(fmt.Sprintf("Error loading plugin path: %s", err))
return 1
}
// Check if the path is a plan // Check if the path is a plan
var plan *plans.Plan var plan *plans.Plan
planFile, err := c.PlanFile(configPath) planFile, err := c.PlanFile(configPath)

View File

@ -299,7 +299,7 @@ func (c *InitCommand) Run(args []string) int {
if back == nil { if back == nil {
// If we didn't initialize a backend then we'll try to at least // If we didn't initialize a backend then we'll try to at least
// instantiate one. This might fail if it wasn't already initalized // instantiate one. This might fail if it wasn't already initialized
// by a previous run, so we must still expect that "back" may be nil // by a previous run, so we must still expect that "back" may be nil
// in code that follows. // in code that follows.
var backDiags tfdiags.Diagnostics var backDiags tfdiags.Diagnostics
@ -675,6 +675,12 @@ func (c *InitCommand) backendConfigOverrideBody(flags rawFlags, schema *configsc
synthVals = make(map[string]cty.Value) synthVals = make(map[string]cty.Value)
} }
if len(items) == 1 && items[0].Value == "" {
// Explicitly remove all -backend-config options.
// We do this by setting an empty but non-nil ConfigOverrides.
return configs.SynthBody("-backend-config=''", synthVals), diags
}
for _, item := range items { for _, item := range items {
eq := strings.Index(item.Value, "=") eq := strings.Index(item.Value, "=")

View File

@ -1,6 +1,7 @@
package command package command
import ( import (
"encoding/json"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"log" "log"
@ -410,6 +411,113 @@ func TestInit_backendConfigKV(t *testing.T) {
} }
} }
func TestInit_backendConfigKVReInit(t *testing.T) {
// Create a temporary working directory that is empty
td := tempDir(t)
copy.CopyDir(testFixturePath("init-backend-config-kv"), td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &InitCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
args := []string{"-backend-config", "path=test"}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
ui = new(cli.MockUi)
c = &InitCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
// a second init should require no changes, nor should it change the backend.
args = []string{"-input=false"}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
// make sure the backend is configured how we expect
configState := testDataStateRead(t, filepath.Join(DefaultDataDir, DefaultStateFilename))
cfg := map[string]interface{}{}
if err := json.Unmarshal(configState.Backend.ConfigRaw, &cfg); err != nil {
t.Fatal(err)
}
if cfg["path"] != "test" {
t.Fatalf(`expected backend path="test", got path="%v"`, cfg["path"])
}
// override the -backend-config options by settings
args = []string{"-input=false", "-backend-config", ""}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
// make sure the backend is configured how we expect
configState = testDataStateRead(t, filepath.Join(DefaultDataDir, DefaultStateFilename))
cfg = map[string]interface{}{}
if err := json.Unmarshal(configState.Backend.ConfigRaw, &cfg); err != nil {
t.Fatal(err)
}
if cfg["path"] != nil {
t.Fatalf(`expected backend path="<nil>", got path="%v"`, cfg["path"])
}
}
func TestInit_backendConfigKVReInitWithConfigDiff(t *testing.T) {
// Create a temporary working directory that is empty
td := tempDir(t)
copy.CopyDir(testFixturePath("init-backend"), td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &InitCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
args := []string{"-input=false"}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
ui = new(cli.MockUi)
c = &InitCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
// a second init with identical config should require no changes, nor
// should it change the backend.
args = []string{"-input=false", "-backend-config", "path=foo"}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
// make sure the backend is configured how we expect
configState := testDataStateRead(t, filepath.Join(DefaultDataDir, DefaultStateFilename))
cfg := map[string]interface{}{}
if err := json.Unmarshal(configState.Backend.ConfigRaw, &cfg); err != nil {
t.Fatal(err)
}
if cfg["path"] != "foo" {
t.Fatalf(`expected backend path="foo", got path="%v"`, cfg["foo"])
}
}
func TestInit_targetSubdir(t *testing.T) { func TestInit_targetSubdir(t *testing.T) {
// Create a temporary working directory that is empty // Create a temporary working directory that is empty
td := tempDir(t) td := tempDir(t)
@ -625,7 +733,7 @@ func TestInit_inputFalse(t *testing.T) {
} }
// A missing input=false should abort rather than loop infinitely // A missing input=false should abort rather than loop infinitely
args = []string{"-backend-config=path=bar"} args = []string{"-backend-config=path=baz"}
if code := c.Run(args); code == 0 { if code := c.Run(args); code == 0 {
t.Fatal("init should have failed", ui.OutputWriter) t.Fatal("init should have failed", ui.OutputWriter)
} }
@ -1310,3 +1418,33 @@ func TestInit_012UpgradeNeededInAutomation(t *testing.T) {
t.Errorf("looks like we incorrectly gave an upgrade command to run:\n%s", output) t.Errorf("looks like we incorrectly gave an upgrade command to run:\n%s", output)
} }
} }
func TestInit_syntaxErrorVersionSniff(t *testing.T) {
// Create a temporary working directory that is empty
td := tempDir(t)
copy.CopyDir(testFixturePath("init-sniff-version-error"), td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &InitCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
args := []string{}
if code := c.Run(args); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
// Check output.
// Currently, this lands in the "upgrade may be needed" codepath, because
// the intentional syntax error in our test fixture is something that
// "terraform 0.12upgrade" could fix.
output := ui.OutputWriter.String()
if got, want := output, "Terraform has initialized, but configuration upgrades may be needed"; !strings.Contains(got, want) {
t.Fatalf("wrong output\ngot:\n%s\n\nwant: message containing %q", got, want)
}
}

View File

@ -238,6 +238,11 @@ func marshalModuleCalls(c *configs.Config, schemas *terraform.Schemas) map[strin
} }
func marshalModuleCall(c *configs.Config, mc *configs.ModuleCall, schemas *terraform.Schemas) moduleCall { func marshalModuleCall(c *configs.Config, mc *configs.ModuleCall, schemas *terraform.Schemas) moduleCall {
// It is possible to have a module call with a nil config.
if c == nil {
return moduleCall{}
}
ret := moduleCall{ ret := moduleCall{
Source: mc.SourceAddr, Source: mc.SourceAddr,
VersionConstraint: mc.Version.Required.String(), VersionConstraint: mc.Version.Required.String(),

View File

@ -91,12 +91,7 @@ func Marshal(
p *plans.Plan, p *plans.Plan,
sf *statefile.File, sf *statefile.File,
schemas *terraform.Schemas, schemas *terraform.Schemas,
stateSchemas *terraform.Schemas,
) ([]byte, error) { ) ([]byte, error) {
if stateSchemas == nil {
stateSchemas = schemas
}
output := newPlan() output := newPlan()
output.TerraformVersion = version.String() output.TerraformVersion = version.String()
@ -125,7 +120,7 @@ func Marshal(
// output.PriorState // output.PriorState
if sf != nil && !sf.State.Empty() { if sf != nil && !sf.State.Empty() {
output.PriorState, err = jsonstate.Marshal(sf, stateSchemas) output.PriorState, err = jsonstate.Marshal(sf, schemas)
if err != nil { if err != nil {
return nil, fmt.Errorf("error marshaling prior state: %s", err) return nil, fmt.Errorf("error marshaling prior state: %s", err)
} }
@ -210,21 +205,7 @@ func (p *plan) marshalResourceChanges(changes *plans.Changes, schemas *terraform
if err != nil { if err != nil {
return err return err
} }
afterUnknown, _ = cty.Transform(changeV.After, func(path cty.Path, val cty.Value) (cty.Value, error) { afterUnknown = cty.EmptyObjectVal
if val.IsNull() {
return cty.False, nil
}
if !val.Type().IsPrimitiveType() {
return val, nil // just pass through non-primitives; they already contain our transform results
}
if val.IsKnown() {
return cty.False, nil
}
return cty.True, nil
})
} else { } else {
filteredAfter := omitUnknowns(changeV.After) filteredAfter := omitUnknowns(changeV.After)
if filteredAfter.IsNull() { if filteredAfter.IsNull() {
@ -351,22 +332,21 @@ func (p *plan) marshalPlannedValues(changes *plans.Changes, schemas *terraform.S
// omitUnknowns recursively walks the src cty.Value and returns a new cty.Value, // omitUnknowns recursively walks the src cty.Value and returns a new cty.Value,
// omitting any unknowns. // omitting any unknowns.
//
// The result also normalizes some types: all sequence types are turned into
// tuple types and all mapping types are converted to object types, since we
// assume the result of this is just going to be serialized as JSON (and thus
// lose those distinctions) anyway.
func omitUnknowns(val cty.Value) cty.Value { func omitUnknowns(val cty.Value) cty.Value {
if val.IsWhollyKnown() {
return val
}
ty := val.Type() ty := val.Type()
switch { switch {
case val.IsNull(): case val.IsNull():
return val return val
case !val.IsKnown(): case !val.IsKnown():
return cty.NilVal return cty.NilVal
case ty.IsPrimitiveType():
return val
case ty.IsListType() || ty.IsTupleType() || ty.IsSetType(): case ty.IsListType() || ty.IsTupleType() || ty.IsSetType():
if val.LengthInt() == 0 {
return val
}
var vals []cty.Value var vals []cty.Value
it := val.ElementIterator() it := val.ElementIterator()
for it.Next() { for it.Next() {
@ -379,29 +359,12 @@ func omitUnknowns(val cty.Value) cty.Value {
vals = append(vals, cty.NullVal(v.Type())) vals = append(vals, cty.NullVal(v.Type()))
} }
} }
if len(vals) == 0 { // We use tuple types always here, because the work we did above
return cty.NilVal // may have caused the individual elements to have different types,
} // and we're doing this work to produce JSON anyway and JSON marshalling
switch { // represents all of these sequence types as an array.
case ty.IsListType(): return cty.TupleVal(vals)
return cty.ListVal(vals)
case ty.IsTupleType():
return cty.TupleVal(vals)
default:
return cty.SetVal(vals)
}
case ty.IsMapType() || ty.IsObjectType(): case ty.IsMapType() || ty.IsObjectType():
var length int
switch {
case ty.IsMapType():
length = val.LengthInt()
default:
length = len(val.Type().AttributeTypes())
}
if length == 0 {
// If there are no elements then we can't have unknowns
return val
}
vals := make(map[string]cty.Value) vals := make(map[string]cty.Value)
it := val.ElementIterator() it := val.ElementIterator()
for it.Next() { for it.Next() {
@ -411,29 +374,24 @@ func omitUnknowns(val cty.Value) cty.Value {
vals[k.AsString()] = newVal vals[k.AsString()] = newVal
} }
} }
// We use object types always here, because the work we did above
if len(vals) == 0 { // may have caused the individual elements to have different types,
return cty.NilVal // and we're doing this work to produce JSON anyway and JSON marshalling
} // represents both of these mapping types as an object.
return cty.ObjectVal(vals)
switch { default:
case ty.IsMapType(): // Should never happen, since the above should cover all types
return cty.MapVal(vals) panic(fmt.Sprintf("omitUnknowns cannot handle %#v", val))
default:
return cty.ObjectVal(vals)
}
} }
return val
} }
// recursively iterate through a cty.Value, replacing known values (including // recursively iterate through a cty.Value, replacing unknown values (including
// null) with cty.True and unknown values with cty.False. // null) with cty.True and known values with cty.False.
// //
// TODO: // The result also normalizes some types: all sequence types are turned into
// In the future, we may choose to only return unknown values. At that point, // tuple types and all mapping types are converted to object types, since we
// this will need to convert lists/sets into tuples and maps into objects, so // assume the result of this is just going to be serialized as JSON (and thus
// that the result will have a valid type. // lose those distinctions) anyway.
func unknownAsBool(val cty.Value) cty.Value { func unknownAsBool(val cty.Value) cty.Value {
ty := val.Type() ty := val.Type()
switch { switch {
@ -450,7 +408,7 @@ func unknownAsBool(val cty.Value) cty.Value {
length := val.LengthInt() length := val.LengthInt()
if length == 0 { if length == 0 {
// If there are no elements then we can't have unknowns // If there are no elements then we can't have unknowns
return cty.False return cty.EmptyTupleVal
} }
vals := make([]cty.Value, 0, length) vals := make([]cty.Value, 0, length)
it := val.ElementIterator() it := val.ElementIterator()
@ -458,14 +416,12 @@ func unknownAsBool(val cty.Value) cty.Value {
_, v := it.Element() _, v := it.Element()
vals = append(vals, unknownAsBool(v)) vals = append(vals, unknownAsBool(v))
} }
switch { // The above transform may have changed the types of some of the
case ty.IsListType(): // elements, so we'll always use a tuple here in case we've now made
return cty.ListVal(vals) // different elements have different types. Our ultimate goal is to
case ty.IsTupleType(): // marshal to JSON anyway, and all of these sequence types are
return cty.TupleVal(vals) // indistinguishable in JSON.
default: return cty.TupleVal(vals)
return cty.SetVal(vals)
}
case ty.IsMapType() || ty.IsObjectType(): case ty.IsMapType() || ty.IsObjectType():
var length int var length int
switch { switch {
@ -476,23 +432,27 @@ func unknownAsBool(val cty.Value) cty.Value {
} }
if length == 0 { if length == 0 {
// If there are no elements then we can't have unknowns // If there are no elements then we can't have unknowns
return cty.False return cty.EmptyObjectVal
} }
vals := make(map[string]cty.Value) vals := make(map[string]cty.Value)
it := val.ElementIterator() it := val.ElementIterator()
for it.Next() { for it.Next() {
k, v := it.Element() k, v := it.Element()
vals[k.AsString()] = unknownAsBool(v) vAsBool := unknownAsBool(v)
} if !vAsBool.RawEquals(cty.False) { // all of the "false"s for known values for more compact serialization
switch { vals[k.AsString()] = unknownAsBool(v)
case ty.IsMapType(): }
return cty.MapVal(vals)
default:
return cty.ObjectVal(vals)
} }
// The above transform may have changed the types of some of the
// elements, so we'll always use an object here in case we've now made
// different elements have different types. Our ultimate goal is to
// marshal to JSON anyway, and all of these mapping types are
// indistinguishable in JSON.
return cty.ObjectVal(vals)
default:
// Should never happen, since the above should cover all types
panic(fmt.Sprintf("unknownAsBool cannot handle %#v", val))
} }
return val
} }
func actionString(action string) []string { func actionString(action string) []string {

View File

@ -26,30 +26,30 @@ func TestOmitUnknowns(t *testing.T) {
}, },
{ {
cty.ListValEmpty(cty.String), cty.ListValEmpty(cty.String),
cty.ListValEmpty(cty.String), cty.EmptyTupleVal,
}, },
{ {
cty.ListVal([]cty.Value{cty.StringVal("hello")}), cty.ListVal([]cty.Value{cty.StringVal("hello")}),
cty.ListVal([]cty.Value{cty.StringVal("hello")}), cty.TupleVal([]cty.Value{cty.StringVal("hello")}),
}, },
{ {
cty.ListVal([]cty.Value{cty.NullVal(cty.String)}), cty.ListVal([]cty.Value{cty.NullVal(cty.String)}),
cty.ListVal([]cty.Value{cty.NullVal(cty.String)}), cty.TupleVal([]cty.Value{cty.NullVal(cty.String)}),
}, },
{ {
cty.ListVal([]cty.Value{cty.UnknownVal(cty.String)}), cty.ListVal([]cty.Value{cty.UnknownVal(cty.String)}),
cty.ListVal([]cty.Value{cty.NullVal(cty.String)}), cty.TupleVal([]cty.Value{cty.NullVal(cty.String)}),
}, },
{ {
cty.ListVal([]cty.Value{cty.StringVal("hello")}), cty.ListVal([]cty.Value{cty.StringVal("hello")}),
cty.ListVal([]cty.Value{cty.StringVal("hello")}), cty.TupleVal([]cty.Value{cty.StringVal("hello")}),
}, },
// //
{ {
cty.ListVal([]cty.Value{ cty.ListVal([]cty.Value{
cty.StringVal("hello"), cty.StringVal("hello"),
cty.UnknownVal(cty.String)}), cty.UnknownVal(cty.String)}),
cty.ListVal([]cty.Value{ cty.TupleVal([]cty.Value{
cty.StringVal("hello"), cty.StringVal("hello"),
cty.NullVal(cty.String), cty.NullVal(cty.String),
}), }),
@ -59,7 +59,7 @@ func TestOmitUnknowns(t *testing.T) {
"hello": cty.True, "hello": cty.True,
"world": cty.UnknownVal(cty.Bool), "world": cty.UnknownVal(cty.Bool),
}), }),
cty.MapVal(map[string]cty.Value{ cty.ObjectVal(map[string]cty.Value{
"hello": cty.True, "hello": cty.True,
}), }),
}, },
@ -70,12 +70,28 @@ func TestOmitUnknowns(t *testing.T) {
cty.StringVal("stg"), cty.StringVal("stg"),
cty.UnknownVal(cty.String), cty.UnknownVal(cty.String),
}), }),
cty.SetVal([]cty.Value{ cty.TupleVal([]cty.Value{
cty.StringVal("dev"), cty.StringVal("dev"),
cty.StringVal("foo"), cty.StringVal("foo"),
cty.StringVal("stg"), cty.StringVal("stg"),
}), }),
}, },
{
cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"a": cty.UnknownVal(cty.String),
}),
cty.ObjectVal(map[string]cty.Value{
"a": cty.StringVal("known"),
}),
}),
cty.TupleVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"a": cty.StringVal("known"),
}),
cty.EmptyObjectVal,
}),
},
} }
for _, test := range tests { for _, test := range tests {
@ -122,39 +138,39 @@ func TestUnknownAsBool(t *testing.T) {
{ {
cty.ListValEmpty(cty.String), cty.ListValEmpty(cty.String),
cty.False, cty.EmptyTupleVal,
}, },
{ {
cty.ListVal([]cty.Value{cty.StringVal("hello")}), cty.ListVal([]cty.Value{cty.StringVal("hello")}),
cty.ListVal([]cty.Value{cty.False}), cty.TupleVal([]cty.Value{cty.False}),
}, },
{ {
cty.ListVal([]cty.Value{cty.NullVal(cty.String)}), cty.ListVal([]cty.Value{cty.NullVal(cty.String)}),
cty.ListVal([]cty.Value{cty.False}), cty.TupleVal([]cty.Value{cty.False}),
}, },
{ {
cty.ListVal([]cty.Value{cty.UnknownVal(cty.String)}), cty.ListVal([]cty.Value{cty.UnknownVal(cty.String)}),
cty.ListVal([]cty.Value{cty.True}), cty.TupleVal([]cty.Value{cty.True}),
}, },
{ {
cty.SetValEmpty(cty.String), cty.SetValEmpty(cty.String),
cty.False, cty.EmptyTupleVal,
}, },
{ {
cty.SetVal([]cty.Value{cty.StringVal("hello")}), cty.SetVal([]cty.Value{cty.StringVal("hello")}),
cty.SetVal([]cty.Value{cty.False}), cty.TupleVal([]cty.Value{cty.False}),
}, },
{ {
cty.SetVal([]cty.Value{cty.NullVal(cty.String)}), cty.SetVal([]cty.Value{cty.NullVal(cty.String)}),
cty.SetVal([]cty.Value{cty.False}), cty.TupleVal([]cty.Value{cty.False}),
}, },
{ {
cty.SetVal([]cty.Value{cty.UnknownVal(cty.String)}), cty.SetVal([]cty.Value{cty.UnknownVal(cty.String)}),
cty.SetVal([]cty.Value{cty.True}), cty.TupleVal([]cty.Value{cty.True}),
}, },
{ {
cty.EmptyTupleVal, cty.EmptyTupleVal,
cty.False, cty.EmptyTupleVal,
}, },
{ {
cty.TupleVal([]cty.Value{cty.StringVal("hello")}), cty.TupleVal([]cty.Value{cty.StringVal("hello")}),
@ -170,36 +186,70 @@ func TestUnknownAsBool(t *testing.T) {
}, },
{ {
cty.MapValEmpty(cty.String), cty.MapValEmpty(cty.String),
cty.False, cty.EmptyObjectVal,
}, },
{ {
cty.MapVal(map[string]cty.Value{"greeting": cty.StringVal("hello")}), cty.MapVal(map[string]cty.Value{"greeting": cty.StringVal("hello")}),
cty.MapVal(map[string]cty.Value{"greeting": cty.False}), cty.EmptyObjectVal,
}, },
{ {
cty.MapVal(map[string]cty.Value{"greeting": cty.NullVal(cty.String)}), cty.MapVal(map[string]cty.Value{"greeting": cty.NullVal(cty.String)}),
cty.MapVal(map[string]cty.Value{"greeting": cty.False}), cty.EmptyObjectVal,
}, },
{ {
cty.MapVal(map[string]cty.Value{"greeting": cty.UnknownVal(cty.String)}), cty.MapVal(map[string]cty.Value{"greeting": cty.UnknownVal(cty.String)}),
cty.MapVal(map[string]cty.Value{"greeting": cty.True}), cty.ObjectVal(map[string]cty.Value{"greeting": cty.True}),
}, },
{ {
cty.EmptyObjectVal, cty.EmptyObjectVal,
cty.False, cty.EmptyObjectVal,
}, },
{ {
cty.ObjectVal(map[string]cty.Value{"greeting": cty.StringVal("hello")}), cty.ObjectVal(map[string]cty.Value{"greeting": cty.StringVal("hello")}),
cty.ObjectVal(map[string]cty.Value{"greeting": cty.False}), cty.EmptyObjectVal,
}, },
{ {
cty.ObjectVal(map[string]cty.Value{"greeting": cty.NullVal(cty.String)}), cty.ObjectVal(map[string]cty.Value{"greeting": cty.NullVal(cty.String)}),
cty.ObjectVal(map[string]cty.Value{"greeting": cty.False}), cty.EmptyObjectVal,
}, },
{ {
cty.ObjectVal(map[string]cty.Value{"greeting": cty.UnknownVal(cty.String)}), cty.ObjectVal(map[string]cty.Value{"greeting": cty.UnknownVal(cty.String)}),
cty.ObjectVal(map[string]cty.Value{"greeting": cty.True}), cty.ObjectVal(map[string]cty.Value{"greeting": cty.True}),
}, },
{
cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"a": cty.UnknownVal(cty.String),
}),
cty.ObjectVal(map[string]cty.Value{
"a": cty.StringVal("known"),
}),
}),
cty.TupleVal([]cty.Value{
cty.EmptyObjectVal,
cty.ObjectVal(map[string]cty.Value{
"a": cty.True,
}),
}),
},
{
cty.SetVal([]cty.Value{
cty.MapValEmpty(cty.String),
cty.MapVal(map[string]cty.Value{
"a": cty.StringVal("known"),
}),
cty.MapVal(map[string]cty.Value{
"a": cty.UnknownVal(cty.String),
}),
}),
cty.TupleVal([]cty.Value{
cty.EmptyObjectVal,
cty.ObjectVal(map[string]cty.Value{
"a": cty.True,
}),
cty.EmptyObjectVal,
}),
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -166,14 +166,14 @@ func TestMarshalPlannedOutputs(t *testing.T) {
} }
func TestMarshalPlanResources(t *testing.T) { func TestMarshalPlanResources(t *testing.T) {
tests := []struct { tests := map[string]struct {
Action plans.Action Action plans.Action
Before cty.Value Before cty.Value
After cty.Value After cty.Value
Want []resource Want []resource
Err bool Err bool
}{ }{
{ "create with unknowns": {
Action: plans.Create, Action: plans.Create,
Before: cty.NullVal(cty.EmptyObject), Before: cty.NullVal(cty.EmptyObject),
After: cty.ObjectVal(map[string]cty.Value{ After: cty.ObjectVal(map[string]cty.Value{
@ -188,18 +188,18 @@ func TestMarshalPlanResources(t *testing.T) {
Index: addrs.InstanceKey(nil), Index: addrs.InstanceKey(nil),
ProviderName: "test", ProviderName: "test",
SchemaVersion: 1, SchemaVersion: 1,
AttributeValues: attributeValues(nil), AttributeValues: attributeValues{},
}}, }},
Err: false, Err: false,
}, },
{ "delete": {
Action: plans.Delete, Action: plans.Delete,
Before: cty.NullVal(cty.EmptyObject), Before: cty.NullVal(cty.EmptyObject),
After: cty.NilVal, After: cty.NilVal,
Want: nil, Want: nil,
Err: false, Err: false,
}, },
{ "update without unknowns": {
Action: plans.Update, Action: plans.Update,
Before: cty.ObjectVal(map[string]cty.Value{ Before: cty.ObjectVal(map[string]cty.Value{
"woozles": cty.StringVal("foo"), "woozles": cty.StringVal("foo"),
@ -227,50 +227,52 @@ func TestMarshalPlanResources(t *testing.T) {
}, },
} }
for _, test := range tests { for name, test := range tests {
before, err := plans.NewDynamicValue(test.Before, test.Before.Type()) t.Run(name, func(t *testing.T) {
if err != nil { before, err := plans.NewDynamicValue(test.Before, test.Before.Type())
t.Fatal(err) if err != nil {
} t.Fatal(err)
}
after, err := plans.NewDynamicValue(test.After, test.After.Type()) after, err := plans.NewDynamicValue(test.After, test.After.Type())
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
testChange := &plans.Changes{ testChange := &plans.Changes{
Resources: []*plans.ResourceInstanceChangeSrc{ Resources: []*plans.ResourceInstanceChangeSrc{
{ {
Addr: addrs.Resource{ Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode, Mode: addrs.ManagedResourceMode,
Type: "test_thing", Type: "test_thing",
Name: "example", Name: "example",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.ProviderConfig{Type: "test"}.Absolute(addrs.RootModuleInstance), ProviderAddr: addrs.ProviderConfig{Type: "test"}.Absolute(addrs.RootModuleInstance),
ChangeSrc: plans.ChangeSrc{ ChangeSrc: plans.ChangeSrc{
Action: test.Action, Action: test.Action,
Before: before, Before: before,
After: after, After: after,
},
}, },
}, },
},
}
ris := testResourceAddrs()
got, err := marshalPlanResources(testChange, ris, testSchemas())
if test.Err {
if err == nil {
t.Fatal("succeeded; want error")
} }
return
} else if err != nil {
t.Fatalf("unexpected error: %s", err)
}
eq := reflect.DeepEqual(got, test.Want) ris := testResourceAddrs()
if !eq {
t.Fatalf("wrong result:\nGot: %#v\nWant: %#v\n", got, test.Want) got, err := marshalPlanResources(testChange, ris, testSchemas())
} if test.Err {
if err == nil {
t.Fatal("succeeded; want error")
}
return
} else if err != nil {
t.Fatalf("unexpected error: %s", err)
}
eq := reflect.DeepEqual(got, test.Want)
if !eq {
t.Fatalf("wrong result:\nGot: %#v\nWant: %#v\n", got, test.Want)
}
})
} }
} }

View File

@ -85,6 +85,14 @@ func (m *Meta) Backend(opts *BackendOpts) (backend.Enhanced, tfdiags.Diagnostics
b, backendDiags = m.backendFromConfig(opts) b, backendDiags = m.backendFromConfig(opts)
diags = diags.Append(backendDiags) diags = diags.Append(backendDiags)
if opts.Init && b != nil && !diags.HasErrors() {
// Its possible that the currently selected workspace doesn't exist, so
// we call selectWorkspace to ensure an existing workspace is selected.
if err := m.selectWorkspace(b); err != nil {
diags = diags.Append(err)
}
}
if diags.HasErrors() { if diags.HasErrors() {
return nil, diags return nil, diags
} }
@ -156,6 +164,56 @@ func (m *Meta) Backend(opts *BackendOpts) (backend.Enhanced, tfdiags.Diagnostics
return local, nil return local, nil
} }
// selectWorkspace gets a list of existing workspaces and then checks
// if the currently selected workspace is valid. If not, it will ask
// the user to select a workspace from the list.
func (m *Meta) selectWorkspace(b backend.Backend) error {
workspaces, err := b.Workspaces()
if err == backend.ErrWorkspacesNotSupported {
return nil
}
if err != nil {
return fmt.Errorf("Failed to get existing workspaces: %s", err)
}
if len(workspaces) == 0 {
return fmt.Errorf(strings.TrimSpace(errBackendNoExistingWorkspaces))
}
// Get the currently selected workspace.
workspace := m.Workspace()
// Check if any of the existing workspaces matches the selected
// workspace and create a numbered list of existing workspaces.
var list strings.Builder
for i, w := range workspaces {
if w == workspace {
return nil
}
fmt.Fprintf(&list, "%d. %s\n", i+1, w)
}
// If the selected workspace doesn't exist, ask the user to select
// a workspace from the list of existing workspaces.
v, err := m.UIInput().Input(context.Background(), &terraform.InputOpts{
Id: "select-workspace",
Query: fmt.Sprintf(
"\n[reset][bold][yellow]The currently selected workspace (%s) does not exist.[reset]",
workspace),
Description: fmt.Sprintf(
strings.TrimSpace(inputBackendSelectWorkspace), list.String()),
})
if err != nil {
return fmt.Errorf("Failed to select workspace: %s", err)
}
idx, err := strconv.Atoi(v)
if err != nil || (idx < 1 || idx > len(workspaces)) {
return fmt.Errorf("Failed to select workspace: input not a valid number")
}
return m.SetWorkspace(workspaces[idx-1])
}
// BackendForPlan is similar to Backend, but uses backend settings that were // BackendForPlan is similar to Backend, but uses backend settings that were
// stored in a plan. // stored in a plan.
// //
@ -185,9 +243,8 @@ func (m *Meta) BackendForPlan(settings plans.Backend) (backend.Enhanced, tfdiags
if validateDiags.HasErrors() { if validateDiags.HasErrors() {
return nil, diags return nil, diags
} }
configVal = newVal
configureDiags := b.Configure(configVal) configureDiags := b.Configure(newVal)
diags = diags.Append(configureDiags) diags = diags.Append(configureDiags)
// If the backend supports CLI initialization, do it. // If the backend supports CLI initialization, do it.
@ -463,12 +520,11 @@ func (m *Meta) backendFromConfig(opts *BackendOpts) (backend.Backend, tfdiags.Di
// Potentially changing a backend configuration // Potentially changing a backend configuration
case c != nil && !s.Backend.Empty(): case c != nil && !s.Backend.Empty():
// If we're not initializing, then it's sufficient for the configuration // We are not going to migrate if were not initializing and the hashes
// hashes to match, since that suggests that the static backend // match indicating that the stored config is valid. If we are
// settings in the configuration files are unchanged. (The only // initializing, then we also assume the the backend config is OK if
// record we have of CLI overrides is in the settings cache in this // the hashes match, as long as we're not providing any new overrides.
// case, so we have no other source to compare with. if (uint64(cHash) == s.Backend.Hash) && (!opts.Init || opts.ConfigOverride == nil) {
if !opts.Init && uint64(cHash) == s.Backend.Hash {
log.Printf("[TRACE] Meta.Backend: using already-initialized, unchanged %q backend configuration", c.Type) log.Printf("[TRACE] Meta.Backend: using already-initialized, unchanged %q backend configuration", c.Type)
return m.backend_C_r_S_unchanged(c, cHash, sMgr) return m.backend_C_r_S_unchanged(c, cHash, sMgr)
} }
@ -731,68 +787,9 @@ func (m *Meta) backend_C_r_s(c *configs.Backend, cHash int, sMgr *state.LocalSta
m.Ui.Output(m.Colorize().Color(fmt.Sprintf( m.Ui.Output(m.Colorize().Color(fmt.Sprintf(
"[reset][green]\n"+strings.TrimSpace(successBackendSet), s.Backend.Type))) "[reset][green]\n"+strings.TrimSpace(successBackendSet), s.Backend.Type)))
// Its possible that the currently selected workspace is not migrated,
// so we call selectWorkspace to ensure a valid workspace is selected.
if err := m.selectWorkspace(b); err != nil {
diags = diags.Append(err)
return nil, diags
}
// Return the backend
return b, diags return b, diags
} }
// selectWorkspace gets a list of migrated workspaces and then checks
// if the currently selected workspace is valid. If not, it will ask
// the user to select a workspace from the list.
func (m *Meta) selectWorkspace(b backend.Backend) error {
workspaces, err := b.Workspaces()
if err != nil {
if err == backend.ErrWorkspacesNotSupported {
return nil
}
return fmt.Errorf("Failed to get migrated workspaces: %s", err)
}
if len(workspaces) == 0 {
return fmt.Errorf(strings.TrimSpace(errBackendNoMigratedWorkspaces))
}
// Get the currently selected workspace.
workspace := m.Workspace()
// Check if any of the migrated workspaces match the selected workspace
// and create a numbered list with migrated workspaces.
var list strings.Builder
for i, w := range workspaces {
if w == workspace {
return nil
}
fmt.Fprintf(&list, "%d. %s\n", i+1, w)
}
// If the selected workspace is not migrated, ask the user to select
// a workspace from the list of migrated workspaces.
v, err := m.UIInput().Input(context.Background(), &terraform.InputOpts{
Id: "select-workspace",
Query: fmt.Sprintf(
"\n[reset][bold][yellow]The currently selected workspace (%s) is not migrated.[reset]",
workspace),
Description: fmt.Sprintf(
strings.TrimSpace(inputBackendSelectWorkspace), list.String()),
})
if err != nil {
return fmt.Errorf("Failed to select workspace: %s", err)
}
idx, err := strconv.Atoi(v)
if err != nil || (idx < 1 || idx > len(workspaces)) {
return fmt.Errorf("Failed to select workspace: input not a valid number")
}
return m.SetWorkspace(workspaces[idx-1])
}
// Changing a previously saved backend. // Changing a previously saved backend.
func (m *Meta) backend_C_r_S_changed(c *configs.Backend, cHash int, sMgr *state.LocalState, output bool) (backend.Backend, tfdiags.Diagnostics) { func (m *Meta) backend_C_r_S_changed(c *configs.Backend, cHash int, sMgr *state.LocalState, output bool) (backend.Backend, tfdiags.Diagnostics) {
if output { if output {
@ -923,9 +920,8 @@ func (m *Meta) backend_C_r_S_unchanged(c *configs.Backend, cHash int, sMgr *stat
if validDiags.HasErrors() { if validDiags.HasErrors() {
return nil, diags return nil, diags
} }
configVal = newVal
configDiags := b.Configure(configVal) configDiags := b.Configure(newVal)
diags = diags.Append(configDiags) diags = diags.Append(configDiags)
if configDiags.HasErrors() { if configDiags.HasErrors() {
return nil, diags return nil, diags
@ -1052,9 +1048,8 @@ func (m *Meta) backendInitFromConfig(c *configs.Backend) (backend.Backend, cty.V
if validateDiags.HasErrors() { if validateDiags.HasErrors() {
return nil, cty.NilVal, diags return nil, cty.NilVal, diags
} }
configVal = newVal
configureDiags := b.Configure(configVal) configureDiags := b.Configure(newVal)
diags = diags.Append(configureDiags.InConfigBody(c.Config)) diags = diags.Append(configureDiags.InConfigBody(c.Config))
return b, configVal, diags return b, configVal, diags
@ -1083,9 +1078,8 @@ func (m *Meta) backendInitFromSaved(s *terraform.BackendState) (backend.Backend,
if validateDiags.HasErrors() { if validateDiags.HasErrors() {
return nil, diags return nil, diags
} }
configVal = newVal
configureDiags := b.Configure(configVal) configureDiags := b.Configure(newVal)
diags = diags.Append(configureDiags) diags = diags.Append(configureDiags)
return b, diags return b, diags
@ -1183,8 +1177,8 @@ If you'd like to run Terraform and store state locally, you can fix this
error by removing the backend configuration from your configuration. error by removing the backend configuration from your configuration.
` `
const errBackendNoMigratedWorkspaces = ` const errBackendNoExistingWorkspaces = `
No workspaces are migrated. No existing workspaces.
Use the "terraform workspace" command to create and select a new workspace. Use the "terraform workspace" command to create and select a new workspace.
If the backend already contains existing workspaces, you may need to update If the backend already contains existing workspaces, you may need to update

View File

@ -28,11 +28,11 @@ func (c *OutputCommand) Run(args []string) int {
return 1 return 1
} }
var module string var module, statePath string
var jsonOutput bool var jsonOutput bool
cmdFlags := c.Meta.defaultFlagSet("output") cmdFlags := c.Meta.defaultFlagSet("output")
cmdFlags.BoolVar(&jsonOutput, "json", false, "json") cmdFlags.BoolVar(&jsonOutput, "json", false, "json")
cmdFlags.StringVar(&c.Meta.statePath, "state", DefaultStateFilename, "path") cmdFlags.StringVar(&statePath, "state", "", "path")
cmdFlags.StringVar(&module, "module", "", "module") cmdFlags.StringVar(&module, "module", "", "module")
cmdFlags.Usage = func() { c.Ui.Error(c.Help()) } cmdFlags.Usage = func() { c.Ui.Error(c.Help()) }
if err := cmdFlags.Parse(args); err != nil { if err := cmdFlags.Parse(args); err != nil {
@ -53,6 +53,10 @@ func (c *OutputCommand) Run(args []string) int {
name = args[0] name = args[0]
} }
if statePath != "" {
c.Meta.statePath = statePath
}
var diags tfdiags.Diagnostics var diags tfdiags.Diagnostics
// Load the backend // Load the backend

View File

@ -12,6 +12,8 @@ import (
"testing" "testing"
"github.com/davecgh/go-spew/spew" "github.com/davecgh/go-spew/spew"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/mitchellh/cli" "github.com/mitchellh/cli"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
@ -557,8 +559,8 @@ func TestRefresh_backup(t *testing.T) {
} }
newState := testStateRead(t, statePath) newState := testStateRead(t, statePath)
if !reflect.DeepEqual(newState, state) { if !cmp.Equal(newState, state, cmpopts.EquateEmpty()) {
t.Fatalf("bad: %#v", newState) t.Fatalf("got:\n%s\nexpected:\n%s\n", newState, state)
} }
newState = testStateRead(t, outPath) newState = testStateRead(t, outPath)

View File

@ -115,7 +115,7 @@ func (c *ShowCommand) Run(args []string) int {
// if that fails, try to read the cli argument as a path to a statefile // if that fails, try to read the cli argument as a path to a statefile
if len(args) > 0 { if len(args) > 0 {
path := args[0] path := args[0]
plan, planErr = getPlanFromPath(path) plan, stateFile, planErr = getPlanFromPath(path)
if planErr != nil { if planErr != nil {
stateFile, stateErr = getStateFromPath(path) stateFile, stateErr = getStateFromPath(path)
if stateErr != nil { if stateErr != nil {
@ -129,9 +129,7 @@ func (c *ShowCommand) Run(args []string) int {
return 1 return 1
} }
} }
} } else {
if stateFile == nil {
env := c.Workspace() env := c.Workspace()
stateFile, stateErr = getStateFromEnv(b, env) stateFile, stateErr = getStateFromEnv(b, env)
if err != nil { if err != nil {
@ -143,29 +141,7 @@ func (c *ShowCommand) Run(args []string) int {
if plan != nil { if plan != nil {
if jsonOutput == true { if jsonOutput == true {
config := ctx.Config() config := ctx.Config()
jsonPlan, err := jsonplan.Marshal(config, plan, stateFile, schemas)
var err error
var jsonPlan []byte
// If there is no prior state, we have all the schemas needed.
if stateFile == nil {
jsonPlan, err = jsonplan.Marshal(config, plan, stateFile, schemas, nil)
} else {
// If there is state, we need the state-specific schemas, which
// may differ from the schemas loaded from the plan.
// This occurs if there is a data_source in the state that was
// removed from the configuration, because terraform core does
// not need to load the schema to remove a data source.
opReq.PlanFile = nil
ctx, _, ctxDiags := local.Context(opReq)
diags = diags.Append(ctxDiags)
if ctxDiags.HasErrors() {
c.showDiagnostics(diags)
return 1
}
stateSchemas := ctx.Schemas()
jsonPlan, err = jsonplan.Marshal(config, plan, stateFile, schemas, stateSchemas)
}
if err != nil { if err != nil {
c.Ui.Error(fmt.Sprintf("Failed to marshal plan to json: %s", err)) c.Ui.Error(fmt.Sprintf("Failed to marshal plan to json: %s", err))
@ -224,19 +200,21 @@ func (c *ShowCommand) Synopsis() string {
return "Inspect Terraform state or plan" return "Inspect Terraform state or plan"
} }
// getPlanFromPath returns a plan if the user-supplied path points to a planfile. // getPlanFromPath returns a plan and statefile if the user-supplied path points
// If both plan and error are nil, the path is likely a directory. // to a planfile. If both plan and error are nil, the path is likely a
// An error could suggest that the given path points to a statefile. // directory. An error could suggest that the given path points to a statefile.
func getPlanFromPath(path string) (*plans.Plan, error) { func getPlanFromPath(path string) (*plans.Plan, *statefile.File, error) {
pr, err := planfile.Open(path) pr, err := planfile.Open(path)
if err != nil { if err != nil {
return nil, err return nil, nil, err
} }
plan, err := pr.ReadPlan() plan, err := pr.ReadPlan()
if err != nil { if err != nil {
return nil, err return nil, nil, err
} }
return plan, nil
stateFile, err := pr.ReadStateFile()
return plan, stateFile, nil
} }
// getStateFromPath returns a statefile if the user-supplied path points to a statefile. // getStateFromPath returns a statefile if the user-supplied path points to a statefile.

View File

@ -2,7 +2,6 @@ package command
import ( import (
"encoding/json" "encoding/json"
"fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
@ -221,7 +220,6 @@ func TestShow_json_output(t *testing.T) {
json.Unmarshal([]byte(byteValue), &want) json.Unmarshal([]byte(byteValue), &want)
if !cmp.Equal(got, want) { if !cmp.Equal(got, want) {
fmt.Println(ui.OutputWriter.String())
t.Fatalf("wrong result:\n %v\n", cmp.Diff(got, want)) t.Fatalf("wrong result:\n %v\n", cmp.Diff(got, want))
} }
@ -414,6 +412,11 @@ type plan struct {
PlannedValues map[string]interface{} `json:"planned_values,omitempty"` PlannedValues map[string]interface{} `json:"planned_values,omitempty"`
ResourceChanges []interface{} `json:"resource_changes,omitempty"` ResourceChanges []interface{} `json:"resource_changes,omitempty"`
OutputChanges map[string]interface{} `json:"output_changes,omitempty"` OutputChanges map[string]interface{} `json:"output_changes,omitempty"`
PriorState map[string]interface{} `json:"prior_state,omitempty"` PriorState priorState `json:"prior_state,omitempty"`
Config map[string]interface{} `json:"configuration,omitempty"` Config map[string]interface{} `json:"configuration,omitempty"`
} }
type priorState struct {
FormatVersion string `json:"format_version,omitempty"`
Values map[string]interface{} `json:"values,omitempty"`
}

View File

@ -23,14 +23,19 @@ func (c *StateListCommand) Run(args []string) int {
return 1 return 1
} }
var statePath string
cmdFlags := c.Meta.defaultFlagSet("state list") cmdFlags := c.Meta.defaultFlagSet("state list")
cmdFlags.StringVar(&c.Meta.statePath, "state", "", "path") cmdFlags.StringVar(&statePath, "state", "", "path")
lookupId := cmdFlags.String("id", "", "Restrict output to paths with a resource having the specified ID.") lookupId := cmdFlags.String("id", "", "Restrict output to paths with a resource having the specified ID.")
if err := cmdFlags.Parse(args); err != nil { if err := cmdFlags.Parse(args); err != nil {
return cli.RunResultHelp return cli.RunResultHelp
} }
args = cmdFlags.Args() args = cmdFlags.Args()
if statePath != "" {
c.Meta.statePath = statePath
}
// Load the backend // Load the backend
b, backendDiags := c.Backend(nil) b, backendDiags := c.Backend(nil)
if backendDiags.HasErrors() { if backendDiags.HasErrors() {
@ -46,7 +51,7 @@ func (c *StateListCommand) Run(args []string) int {
return 1 return 1
} }
if err := stateMgr.RefreshState(); err != nil { if err := stateMgr.RefreshState(); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to refresh state: %s", err)) c.Ui.Error(fmt.Sprintf("Failed to load state: %s", err))
return 1 return 1
} }

View File

@ -0,0 +1,9 @@
# The following is invalid because we don't permit multiple nested blocks
# all one one line. Instead, we require the backend block to be on a line
# of its own.
# The purpose of this test case is to see that HCL still produces a valid-enough
# AST that we can try to sniff in this block for a terraform_version argument
# without crashing, since we do that during init to try to give a better
# error message if we detect that the configuration is for a newer Terraform
# version.
terraform { backend "local" {} }

View File

@ -53,6 +53,18 @@
] ]
} }
}, },
"prior_state": {
"format_version": "0.1",
"values": {
"outputs": {
"test": {
"sensitive": false,
"value": "bar"
}
},
"root_module": {}
}
},
"resource_changes": [ "resource_changes": [
{ {
"address": "test_instance.test[0]", "address": "test_instance.test[0]",
@ -67,7 +79,6 @@
], ],
"before": null, "before": null,
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
}, },
"after": { "after": {
@ -88,7 +99,6 @@
], ],
"before": null, "before": null,
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
}, },
"after": { "after": {
@ -109,7 +119,6 @@
], ],
"before": null, "before": null,
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
}, },
"after": { "after": {

View File

@ -48,10 +48,7 @@
"ami": "bar", "ami": "bar",
"id": "placeholder" "id": "placeholder"
}, },
"after_unknown": { "after_unknown": {}
"ami": false,
"id": false
}
} }
}, },
{ {
@ -69,7 +66,7 @@
"id": "placeholder" "id": "placeholder"
}, },
"after": null, "after": null,
"after_unknown": false "after_unknown": {}
} }
} }
], ],
@ -85,8 +82,13 @@
}, },
"prior_state": { "prior_state": {
"format_version": "0.1", "format_version": "0.1",
"terraform_version": "0.12.0",
"values": { "values": {
"outputs": {
"test": {
"sensitive": false,
"value": "bar"
}
},
"root_module": { "root_module": {
"resources": [ "resources": [
{ {

View File

@ -48,10 +48,7 @@
"ami": "bar", "ami": "bar",
"id": "placeholder" "id": "placeholder"
}, },
"after_unknown": { "after_unknown": {}
"ami": false,
"id": false
}
} }
} }
], ],
@ -67,8 +64,13 @@
}, },
"prior_state": { "prior_state": {
"format_version": "0.1", "format_version": "0.1",
"terraform_version": "0.12.0",
"values": { "values": {
"outputs": {
"test": {
"sensitive": false,
"value": "bar"
}
},
"root_module": { "root_module": {
"resources": [ "resources": [
{ {

View File

@ -69,6 +69,18 @@
] ]
} }
}, },
"prior_state": {
"format_version": "0.1",
"values": {
"outputs": {
"test": {
"sensitive": false,
"value": "baz"
}
},
"root_module": {}
}
},
"resource_changes": [ "resource_changes": [
{ {
"address": "module.module_test_bar.test_instance.test", "address": "module.module_test_bar.test_instance.test",
@ -86,7 +98,6 @@
"ami": "bar-var" "ami": "bar-var"
}, },
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
} }
} }
@ -108,7 +119,6 @@
"ami": "baz" "ami": "baz"
}, },
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
} }
} }
@ -130,7 +140,6 @@
"ami": "baz" "ami": "baz"
}, },
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
} }
} }
@ -152,7 +161,6 @@
"ami": "baz" "ami": "baz"
}, },
"after_unknown": { "after_unknown": {
"ami": false,
"id": true "id": true
} }
} }

View File

@ -0,0 +1,3 @@
module "my_module" {
source = "./modules"
}

View File

@ -0,0 +1,3 @@
module "more" {
source = "./more-modules"
}

View File

@ -0,0 +1,4 @@
variable "misspelled" {
default = "ehllo"
descriptoni = "I am a misspelled attribute"
}

View File

@ -0,0 +1,23 @@
{
"format_version": "0.1",
"terraform_version": "0.12.1-dev",
"planned_values": {
"root_module": {}
},
"configuration": {
"root_module": {
"module_calls": {
"my_module": {
"source": "./modules",
"module": {
"module_calls": {
"more": {
"module": {}
}
}
}
}
}
}
}
}

View File

@ -12,6 +12,7 @@ import (
"os/signal" "os/signal"
"strings" "strings"
"sync" "sync"
"sync/atomic"
"unicode" "unicode"
"github.com/hashicorp/terraform/terraform" "github.com/hashicorp/terraform/terraform"
@ -30,10 +31,13 @@ type UIInput struct {
Colorize *colorstring.Colorize Colorize *colorstring.Colorize
// Reader and Writer for IO. If these aren't set, they will default to // Reader and Writer for IO. If these aren't set, they will default to
// Stdout and Stderr respectively. // Stdin and Stdout respectively.
Reader io.Reader Reader io.Reader
Writer io.Writer Writer io.Writer
listening int32
result chan string
interrupted bool interrupted bool
l sync.Mutex l sync.Mutex
once sync.Once once sync.Once
@ -117,20 +121,24 @@ func (i *UIInput) Input(ctx context.Context, opts *terraform.InputOpts) (string,
} }
// Listen for the input in a goroutine. This will allow us to // Listen for the input in a goroutine. This will allow us to
// interrupt this if we are interrupted (SIGINT) // interrupt this if we are interrupted (SIGINT).
result := make(chan string, 1)
go func() { go func() {
if !atomic.CompareAndSwapInt32(&i.listening, 0, 1) {
return // We are already listening for input.
}
defer atomic.CompareAndSwapInt32(&i.listening, 1, 0)
buf := bufio.NewReader(r) buf := bufio.NewReader(r)
line, err := buf.ReadString('\n') line, err := buf.ReadString('\n')
if err != nil { if err != nil {
log.Printf("[ERR] UIInput scan err: %s", err) log.Printf("[ERR] UIInput scan err: %s", err)
} }
result <- strings.TrimRightFunc(line, unicode.IsSpace) i.result <- strings.TrimRightFunc(line, unicode.IsSpace)
}() }()
select { select {
case line := <-result: case line := <-i.result:
fmt.Fprint(w, "\n") fmt.Fprint(w, "\n")
if line == "" { if line == "" {
@ -157,6 +165,8 @@ func (i *UIInput) Input(ctx context.Context, opts *terraform.InputOpts) (string,
} }
func (i *UIInput) init() { func (i *UIInput) init() {
i.result = make(chan string)
if i.Colorize == nil { if i.Colorize == nil {
i.Colorize = &colorstring.Colorize{ i.Colorize = &colorstring.Colorize{
Colors: colorstring.DefaultColors, Colors: colorstring.DefaultColors,

View File

@ -3,7 +3,11 @@ package command
import ( import (
"bytes" "bytes"
"context" "context"
"fmt"
"io"
"sync/atomic"
"testing" "testing"
"time"
"github.com/hashicorp/terraform/terraform" "github.com/hashicorp/terraform/terraform"
) )
@ -20,11 +24,61 @@ func TestUIInputInput(t *testing.T) {
v, err := i.Input(context.Background(), &terraform.InputOpts{}) v, err := i.Input(context.Background(), &terraform.InputOpts{})
if err != nil { if err != nil {
t.Fatalf("err: %s", err) t.Fatalf("unexpected error: %v", err)
} }
if v != "foo" { if v != "foo" {
t.Fatalf("bad: %#v", v) t.Fatalf("unexpected input: %s", v)
}
}
func TestUIInputInput_canceled(t *testing.T) {
r, w := io.Pipe()
i := &UIInput{
Reader: r,
Writer: bytes.NewBuffer(nil),
}
// Make a context that can be canceled.
ctx, cancel := context.WithCancel(context.Background())
go func() {
// Cancel the context after 2 seconds.
time.Sleep(2 * time.Second)
cancel()
}()
// Get input until the context is canceled.
v, err := i.Input(ctx, &terraform.InputOpts{})
if err != context.Canceled {
t.Fatalf("expected a context.Canceled error, got: %v", err)
}
// As the context was canceled v should be empty.
if v != "" {
t.Fatalf("unexpected input: %s", v)
}
// As the context was canceled we should still be listening.
listening := atomic.LoadInt32(&i.listening)
if listening != 1 {
t.Fatalf("expected listening to be 1, got: %d", listening)
}
go func() {
// Fake input is given after 1 second.
time.Sleep(time.Second)
fmt.Fprint(w, "foo\n")
w.Close()
}()
v, err = i.Input(context.Background(), &terraform.InputOpts{})
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if v != "foo" {
t.Fatalf("unexpected input: %s", v)
} }
} }
@ -36,10 +90,10 @@ func TestUIInputInput_spaces(t *testing.T) {
v, err := i.Input(context.Background(), &terraform.InputOpts{}) v, err := i.Input(context.Background(), &terraform.InputOpts{})
if err != nil { if err != nil {
t.Fatalf("err: %s", err) t.Fatalf("unexpected error: %v", err)
} }
if v != "foo bar" { if v != "foo bar" {
t.Fatalf("bad: %#v", v) t.Fatalf("unexpected input: %s", v)
} }
} }

View File

@ -3,7 +3,9 @@ package module
import ( import (
"errors" "errors"
"fmt" "fmt"
"regexp"
"sort" "sort"
"strings"
version "github.com/hashicorp/go-version" version "github.com/hashicorp/go-version"
"github.com/hashicorp/terraform/registry/response" "github.com/hashicorp/terraform/registry/response"
@ -11,6 +13,8 @@ import (
const anyVersion = ">=0.0.0" const anyVersion = ">=0.0.0"
var explicitEqualityConstraint = regexp.MustCompile("^=[0-9]")
// return the newest version that satisfies the provided constraint // return the newest version that satisfies the provided constraint
func newest(versions []string, constraint string) (string, error) { func newest(versions []string, constraint string) (string, error) {
if constraint == "" { if constraint == "" {
@ -21,6 +25,30 @@ func newest(versions []string, constraint string) (string, error) {
return "", err return "", err
} }
// Find any build metadata in the constraints, and
// store whether the constraint is an explicit equality that
// contains a build metadata requirement, so we can return a specific,
// if requested, build metadata version
var constraintMetas []string
var equalsConstraint bool
for i := range cs {
constraintMeta := strings.SplitAfterN(cs[i].String(), "+", 2)
if len(constraintMeta) > 1 {
constraintMetas = append(constraintMetas, constraintMeta[1])
}
}
if len(cs) == 1 {
equalsConstraint = explicitEqualityConstraint.MatchString(cs.String())
}
// If the version string includes metadata, this is valid in go-version,
// However, it's confusing as to what expected behavior should be,
// so give an error so the user can do something more logical
if (len(cs) > 1 || !equalsConstraint) && len(constraintMetas) > 0 {
return "", fmt.Errorf("Constraints including build metadata must have explicit equality, or are otherwise too ambiguous: %s", cs.String())
}
switch len(versions) { switch len(versions) {
case 0: case 0:
return "", errors.New("no versions found") return "", errors.New("no versions found")
@ -58,6 +86,12 @@ func newest(versions []string, constraint string) (string, error) {
continue continue
} }
if cs.Check(v) { if cs.Check(v) {
// Constraint has metadata and is explicit equality
if equalsConstraint && len(constraintMetas) > 0 {
if constraintMetas[0] != v.Metadata() {
continue
}
}
return versions[i], nil return versions[i], nil
} }
} }

View File

@ -58,3 +58,33 @@ func TestNewestInvalidModuleVersion(t *testing.T) {
t.Fatalf("expected version %q, got %q", expected, m.Version) t.Fatalf("expected version %q, got %q", expected, m.Version)
} }
} }
func TestNewestModulesWithMetadata(t *testing.T) {
mpv := &response.ModuleProviderVersions{
Source: "registry/test/module",
Versions: []*response.ModuleVersion{
{Version: "0.9.0"},
{Version: "0.9.0+def"},
{Version: "0.9.0+abc"},
{Version: "0.9.0+xyz"},
},
}
// with metadata and explicit version request
expected := "0.9.0+def"
m, _ := newestVersion(mpv.Versions, "=0.9.0+def")
if m.Version != expected {
t.Fatalf("expected version %q, got %q", expected, m.Version)
}
// respect explicit equality, but >/</~, or metadata in multiple constraints, will give an error
_, err := newestVersion(mpv.Versions, "~>0.9.0+abc")
if err == nil {
t.Fatalf("expected an error, but did not get one")
}
_, err = newestVersion(mpv.Versions, ">0.8.0+abc, <1.0.0")
if err == nil {
t.Fatalf("expected an error, but did not get one")
}
}

View File

@ -64,7 +64,15 @@ func (l *Loader) moduleWalkerLoad(req *configs.ModuleRequest) (*configs.Module,
Subject: &req.SourceAddrRange, Subject: &req.SourceAddrRange,
}) })
} }
if !req.VersionConstraint.Required.Check(record.Version) { if len(req.VersionConstraint.Required) > 0 && record.Version == nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Module version requirements have changed",
Detail: "The version requirements have changed since this module was installed and the installed version is no longer acceptable. Run \"terraform init\" to install all modules required by this configuration.",
Subject: &req.SourceAddrRange,
})
}
if record.Version != nil && !req.VersionConstraint.Required.Check(record.Version) {
diags = append(diags, &hcl.Diagnostic{ diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError, Severity: hcl.DiagError,
Summary: "Module version requirements have changed", Summary: "Module version requirements have changed",

View File

@ -58,3 +58,25 @@ func TestLoaderLoadConfig_okay(t *testing.T) {
assertResultCtyEqual(t, got, cty.StringVal("Hello from child_d")) assertResultCtyEqual(t, got, cty.StringVal("Hello from child_d"))
}) })
} }
func TestLoaderLoadConfig_addVersion(t *testing.T) {
// This test is for what happens when there is a version constraint added
// to a module that previously didn't have one.
fixtureDir := filepath.Clean("test-fixtures/add-version-constraint")
loader, err := NewLoader(&Config{
ModulesDir: filepath.Join(fixtureDir, ".terraform/modules"),
})
if err != nil {
t.Fatalf("unexpected error from NewLoader: %s", err)
}
_, diags := loader.LoadConfig(fixtureDir)
if !diags.HasErrors() {
t.Fatalf("success; want error")
}
got := diags.Error()
want := "Module requirements have changed"
if strings.Contains(got, want) {
t.Fatalf("wrong error\ngot:\n%s\n\nwant: containing %q", got, want)
}
}

View File

@ -0,0 +1,14 @@
{
"Modules": [
{
"Key": "",
"Source": "",
"Dir": "test-fixtures/add-version-constraint"
},
{
"Key": "child",
"Source": "hashicorp/module-installer-acctest/aws",
"Dir": "test-fixtures/add-version-constraint/.terraform/modules/child"
}
]
}

View File

@ -0,0 +1,10 @@
# This fixture depends on a registry module, which indirectly refers to the
# following github repository:
#
# However, the test that uses it is testing for an error, so in practice the
# registry does not need to be accessed when this test is successful.
module "child" {
source = "hashicorp/module-installer-acctest/aws"
version = "0.0.1"
}

View File

@ -232,9 +232,13 @@ func (u *Upgrader) analyze(ms ModuleSources) (*analysis, error) {
} }
} }
providerFactories, err := u.Providers.ResolveProviders(m.PluginRequirements()) providerFactories, errs := u.Providers.ResolveProviders(m.PluginRequirements())
if err != nil { if len(errs) > 0 {
return nil, fmt.Errorf("error resolving providers: %s", err) var errorsMsg string
for _, err := range errs {
errorsMsg += fmt.Sprintf("\n- %s", err)
}
return nil, fmt.Errorf("error resolving providers:\n%s", errorsMsg)
} }
for name, fn := range providerFactories { for name, fn := range providerFactories {

View File

@ -0,0 +1,16 @@
resource "test_instance" "first_many" {
count = 2
}
resource "test_instance" "one" {
image = "${test_instance.first_many.*.id[0]}"
}
resource "test_instance" "splat_of_one" {
image = "${test_instance.one.*.id[0]}"
}
resource "test_instance" "second_many" {
count = "${length(test_instance.first_many)}"
security_groups = "${test_instance.first_many.*.id[count.index]}"
}

View File

@ -0,0 +1,16 @@
resource "test_instance" "first_many" {
count = 2
}
resource "test_instance" "one" {
image = test_instance.first_many[0].id
}
resource "test_instance" "splat_of_one" {
image = test_instance.one.*.id[0]
}
resource "test_instance" "second_many" {
count = length(test_instance.first_many)
security_groups = test_instance.first_many[count.index].id
}

View File

@ -0,0 +1,3 @@
terraform {
required_version = ">= 0.12"
}

View File

@ -0,0 +1,25 @@
variable "list" {
type = "list"
default = [
"foo", # I am a comment
"bar", # I am also a comment
"baz",
]
}
variable "list2" {
type = "list"
default = [
"foo",
"bar",
"baz",
]
}
variable "list_the_third" {
type = "list"
default = ["foo", "bar", "baz"]
}

View File

@ -0,0 +1,25 @@
variable "list" {
type = list(string)
default = [
"foo", # I am a comment
"bar", # I am also a comment
"baz",
]
}
variable "list2" {
type = list(string)
default = [
"foo",
"bar",
"baz",
]
}
variable "list_the_third" {
type = list(string)
default = ["foo", "bar", "baz"]
}

View File

@ -0,0 +1,3 @@
terraform {
required_version = ">= 0.12"
}

View File

@ -1,5 +1,10 @@
resource "test_instance" "example" { resource "test_instance" "example" {
tags = {
# Thingy thing
name = "foo bar baz" # this is a terrible name
}
connection { connection {
host = "127.0.0.1" host = "127.0.0.1"
} }

View File

@ -1,5 +1,10 @@
resource "test_instance" "example" { resource "test_instance" "example" {
tags = {
# Thingy thing
name = "foo bar baz" # this is a terrible name
}
connection { connection {
host = "127.0.0.1" host = "127.0.0.1"
} }

View File

@ -4,8 +4,12 @@ variable "s" {
variable "l" { variable "l" {
type = "list" type = "list"
default = []
} }
variable "m" { variable "m" {
type = "map" type = "map"
default = {}
} }

View File

@ -4,8 +4,12 @@ variable "s" {
variable "l" { variable "l" {
type = list(string) type = list(string)
default = []
} }
variable "m" { variable "m" {
type = map(string) type = map(string)
default = {}
} }

View File

@ -35,11 +35,11 @@ func (u *Upgrader) Upgrade(input ModuleSources, dir string) (ModuleSources, tfdi
var diags tfdiags.Diagnostics var diags tfdiags.Diagnostics
an, err := u.analyze(input) an, err := u.analyze(input)
an.ModuleDir = dir
if err != nil { if err != nil {
diags = diags.Append(err) diags = diags.Append(err)
return ret, diags return ret, diags
} }
an.ModuleDir = dir
for name, src := range input { for name, src := range input {
ext := fileExt(name) ext := fileExt(name)

View File

@ -168,15 +168,26 @@ Value:
src, moreDiags := upgradeExpr(node, filename, interp, an) src, moreDiags := upgradeExpr(node, filename, interp, an)
diags = diags.Append(moreDiags) diags = diags.Append(moreDiags)
buf.Write(src) buf.Write(src)
if multiline { if lit, ok := node.(*hcl1ast.LiteralType); ok && lit.LineComment != nil {
buf.WriteString(",\n") for _, comment := range lit.LineComment.List {
} else if i < len(tv.List)-1 { buf.WriteString(", " + comment.Text)
buf.WriteString(", ") buf.WriteString("\n")
}
} else {
if multiline {
buf.WriteString(",\n")
} else if i < len(tv.List)-1 {
buf.WriteString(", ")
}
} }
} }
buf.WriteString("]") buf.WriteString("]")
case *hcl1ast.ObjectType: case *hcl1ast.ObjectType:
if len(tv.List.Items) == 0 {
buf.WriteString("{}")
break
}
buf.WriteString("{\n") buf.WriteString("{\n")
for _, item := range tv.List.Items { for _, item := range tv.List.Items {
if len(item.Keys) != 1 { if len(item.Keys) != 1 {
@ -192,9 +203,22 @@ Value:
diags = diags.Append(moreDiags) diags = diags.Append(moreDiags)
valueSrc, moreDiags := upgradeExpr(item.Val, filename, interp, an) valueSrc, moreDiags := upgradeExpr(item.Val, filename, interp, an)
diags = diags.Append(moreDiags) diags = diags.Append(moreDiags)
if item.LeadComment != nil {
for _, c := range item.LeadComment.List {
buf.WriteString(c.Text)
buf.WriteByte('\n')
}
}
buf.Write(keySrc) buf.Write(keySrc)
buf.WriteString(" = ") buf.WriteString(" = ")
buf.Write(valueSrc) buf.Write(valueSrc)
if item.LineComment != nil {
for _, c := range item.LineComment.List {
buf.WriteByte(' ')
buf.WriteString(c.Text)
}
}
buf.WriteString("\n") buf.WriteString("\n")
} }
buf.WriteString("}") buf.WriteString("}")
@ -235,81 +259,21 @@ Value:
// safe to do so. // safe to do so.
parts := strings.Split(tv.Name, ".") parts := strings.Split(tv.Name, ".")
// First we need to deal with the .count pseudo-attributes that 0.11 and transformed := transformCountPseudoAttribute(&buf, parts, an)
// prior allowed for resources. These no longer exist, because they if transformed {
// don't do anything we can't do with the length(...) function. break Value
if len(parts) > 0 {
var rAddr addrs.Resource
switch parts[0] {
case "data":
if len(parts) == 4 && parts[3] == "count" {
rAddr.Mode = addrs.DataResourceMode
rAddr.Type = parts[1]
rAddr.Name = parts[2]
}
default:
if len(parts) == 3 && parts[2] == "count" {
rAddr.Mode = addrs.ManagedResourceMode
rAddr.Type = parts[0]
rAddr.Name = parts[1]
}
}
// We need to check if the thing being referenced is actually an
// existing resource, because other three-part traversals might
// coincidentally end with "count".
if hasCount, exists := an.ResourceHasCount[rAddr]; exists {
if hasCount {
buf.WriteString("length(")
buf.WriteString(rAddr.String())
buf.WriteString(")")
} else {
// If the resource does not have count, the .count
// attr would've always returned 1 before.
buf.WriteString("1")
}
break Value
}
} }
parts = upgradeTraversalParts(parts, an) // might add/remove/change parts parts = upgradeTraversalParts(parts, an) // might add/remove/change parts
first, remain := parts[0], parts[1:]
buf.WriteString(first)
seenSplat := false
for _, part := range remain {
if part == "*" {
seenSplat = true
buf.WriteString(".*")
continue
}
// Other special cases apply only if we've not previously vDiags := validateHilAddress(tv.Name, filename)
// seen a splat expression marker, since attribute vs. index if len(vDiags) > 0 {
// syntax have different interpretations after a simple splat. diags = diags.Append(vDiags)
if !seenSplat { break
if v, err := strconv.Atoi(part); err == nil {
// Looks like it's old-style index traversal syntax foo.0.bar
// so we'll replace with canonical index syntax foo[0].bar.
fmt.Fprintf(&buf, "[%d]", v)
continue
}
if !hcl2syntax.ValidIdentifier(part) {
// This should be rare since HIL's identifier syntax is _close_
// to HCL2's, but we'll get here if one of the intervening
// parts is not a valid identifier in isolation, since HIL
// did not consider these to be separate identifiers.
// e.g. foo.1bar would be invalid in HCL2; must instead be foo["1bar"].
buf.WriteByte('[')
printQuotedString(&buf, part)
buf.WriteByte(']')
continue
}
}
buf.WriteByte('.')
buf.WriteString(part)
} }
printHilTraversalPartsAsHcl2(&buf, parts)
case *hilast.Arithmetic: case *hilast.Arithmetic:
op, exists := hilArithmeticOpSyms[tv.Op] op, exists := hilArithmeticOpSyms[tv.Op]
if !exists { if !exists {
@ -540,14 +504,74 @@ Value:
buf.Write(falseSrc) buf.Write(falseSrc)
case *hilast.Index: case *hilast.Index:
targetSrc, exprDiags := upgradeExpr(tv.Target, filename, true, an) target, ok := tv.Target.(*hilast.VariableAccess)
diags = diags.Append(exprDiags) if !ok {
panic(fmt.Sprintf("Index node with unsupported target type (%T)", tv.Target))
}
parts := strings.Split(target.Name, ".")
keySrc, exprDiags := upgradeExpr(tv.Key, filename, true, an) keySrc, exprDiags := upgradeExpr(tv.Key, filename, true, an)
diags = diags.Append(exprDiags) diags = diags.Append(exprDiags)
buf.Write(targetSrc)
buf.WriteString("[") transformed := transformCountPseudoAttribute(&buf, parts, an)
buf.Write(keySrc) if transformed {
buf.WriteString("]") break Value
}
parts = upgradeTraversalParts(parts, an) // might add/remove/change parts
vDiags := validateHilAddress(target.Name, filename)
if len(vDiags) > 0 {
diags = diags.Append(vDiags)
break
}
first, remain := parts[0], parts[1:]
var rAddr addrs.Resource
switch parts[0] {
case "data":
if len(parts) == 5 && parts[3] == "*" {
rAddr.Mode = addrs.DataResourceMode
rAddr.Type = parts[1]
rAddr.Name = parts[2]
}
default:
if len(parts) == 4 && parts[2] == "*" {
rAddr.Mode = addrs.ManagedResourceMode
rAddr.Type = parts[0]
rAddr.Name = parts[1]
}
}
// We need to check if the thing being referenced has count
// to retain backward compatibility
hasCount := false
if v, exists := an.ResourceHasCount[rAddr]; exists {
hasCount = v
}
hasSplat := false
buf.WriteString(first)
for _, part := range remain {
// Attempt to convert old-style splat indexing to new one
// e.g. res.label.*.attr[idx] to res.label[idx].attr
if part == "*" && hasCount {
hasSplat = true
buf.WriteString(fmt.Sprintf("[%s]", keySrc))
continue
}
buf.WriteByte('.')
buf.WriteString(part)
}
if !hasSplat {
buf.WriteString("[")
buf.Write(keySrc)
buf.WriteString("]")
}
case *hilast.Output: case *hilast.Output:
if len(tv.Exprs) == 1 { if len(tv.Exprs) == 1 {
@ -601,6 +625,122 @@ Value:
return buf.Bytes(), diags return buf.Bytes(), diags
} }
func validateHilAddress(address, filename string) tfdiags.Diagnostics {
parts := strings.Split(address, ".")
var diags tfdiags.Diagnostics
label, ok := getResourceLabel(parts)
if ok && !hcl2syntax.ValidIdentifier(label) {
// We can't get any useful source location out of HIL unfortunately
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
fmt.Sprintf("Invalid address (%s) in ./%s", address, filename),
// The label could be invalid for another reason
// but this is the most likely, so we add it as hint
"Names of objects (resources, modules, etc) may no longer start with digits."))
}
return diags
}
func getResourceLabel(parts []string) (string, bool) {
if len(parts) < 1 {
return "", false
}
if parts[0] == "data" {
if len(parts) < 3 {
return "", false
}
return parts[2], true
}
if len(parts) < 2 {
return "", false
}
return parts[1], true
}
// transformCountPseudoAttribute deals with the .count pseudo-attributes
// that 0.11 and prior allowed for resources. These no longer exist,
// because they don't do anything we can't do with the length(...) function.
func transformCountPseudoAttribute(buf *bytes.Buffer, parts []string, an *analysis) (transformed bool) {
if len(parts) > 0 {
var rAddr addrs.Resource
switch parts[0] {
case "data":
if len(parts) == 4 && parts[3] == "count" {
rAddr.Mode = addrs.DataResourceMode
rAddr.Type = parts[1]
rAddr.Name = parts[2]
}
default:
if len(parts) == 3 && parts[2] == "count" {
rAddr.Mode = addrs.ManagedResourceMode
rAddr.Type = parts[0]
rAddr.Name = parts[1]
}
}
// We need to check if the thing being referenced is actually an
// existing resource, because other three-part traversals might
// coincidentally end with "count".
if hasCount, exists := an.ResourceHasCount[rAddr]; exists {
if hasCount {
buf.WriteString("length(")
buf.WriteString(rAddr.String())
buf.WriteString(")")
} else {
// If the resource does not have count, the .count
// attr would've always returned 1 before.
buf.WriteString("1")
}
transformed = true
return
}
}
return
}
func printHilTraversalPartsAsHcl2(buf *bytes.Buffer, parts []string) {
first, remain := parts[0], parts[1:]
buf.WriteString(first)
seenSplat := false
for _, part := range remain {
if part == "*" {
seenSplat = true
buf.WriteString(".*")
continue
}
// Other special cases apply only if we've not previously
// seen a splat expression marker, since attribute vs. index
// syntax have different interpretations after a simple splat.
if !seenSplat {
if v, err := strconv.Atoi(part); err == nil {
// Looks like it's old-style index traversal syntax foo.0.bar
// so we'll replace with canonical index syntax foo[0].bar.
fmt.Fprintf(buf, "[%d]", v)
continue
}
if !hcl2syntax.ValidIdentifier(part) {
// This should be rare since HIL's identifier syntax is _close_
// to HCL2's, but we'll get here if one of the intervening
// parts is not a valid identifier in isolation, since HIL
// did not consider these to be separate identifiers.
// e.g. foo.1bar would be invalid in HCL2; must instead be foo["1bar"].
buf.WriteByte('[')
printQuotedString(buf, part)
buf.WriteByte(']')
continue
}
}
buf.WriteByte('.')
buf.WriteString(part)
}
}
func upgradeHeredocBody(buf *bytes.Buffer, val *hilast.Output, filename string, an *analysis) tfdiags.Diagnostics { func upgradeHeredocBody(buf *bytes.Buffer, val *hilast.Output, filename string, an *analysis) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics var diags tfdiags.Diagnostics

View File

@ -2,6 +2,7 @@ package configupgrade
import ( import (
"bytes" "bytes"
"flag"
"io" "io"
"io/ioutil" "io/ioutil"
"log" "log"
@ -286,6 +287,7 @@ func init() {
} }
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
flag.Parse()
if testing.Verbose() { if testing.Verbose() {
// if we're verbose, use the logging requested by TF_LOG // if we're verbose, use the logging requested by TF_LOG
logging.SetOutput() logging.SetOutput()

View File

@ -40,11 +40,12 @@ var _ hcl.Body = mergeBody{}
func (b mergeBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { func (b mergeBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
var diags hcl.Diagnostics var diags hcl.Diagnostics
oSchema := schemaForOverrides(schema) baseSchema := schemaWithDynamic(schema)
overrideSchema := schemaWithDynamic(schemaForOverrides(schema))
baseContent, cDiags := b.Base.Content(schema) baseContent, _, cDiags := b.Base.PartialContent(baseSchema)
diags = append(diags, cDiags...) diags = append(diags, cDiags...)
overrideContent, cDiags := b.Override.Content(oSchema) overrideContent, _, cDiags := b.Override.PartialContent(overrideSchema)
diags = append(diags, cDiags...) diags = append(diags, cDiags...)
content := b.prepareContent(baseContent, overrideContent) content := b.prepareContent(baseContent, overrideContent)
@ -54,11 +55,12 @@ func (b mergeBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagno
func (b mergeBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { func (b mergeBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
var diags hcl.Diagnostics var diags hcl.Diagnostics
oSchema := schemaForOverrides(schema) baseSchema := schemaWithDynamic(schema)
overrideSchema := schemaWithDynamic(schemaForOverrides(schema))
baseContent, baseRemain, cDiags := b.Base.PartialContent(schema) baseContent, baseRemain, cDiags := b.Base.PartialContent(baseSchema)
diags = append(diags, cDiags...) diags = append(diags, cDiags...)
overrideContent, overrideRemain, cDiags := b.Override.PartialContent(oSchema) overrideContent, overrideRemain, cDiags := b.Override.PartialContent(overrideSchema)
diags = append(diags, cDiags...) diags = append(diags, cDiags...)
content := b.prepareContent(baseContent, overrideContent) content := b.prepareContent(baseContent, overrideContent)
@ -90,9 +92,21 @@ func (b mergeBody) prepareContent(base *hcl.BodyContent, override *hcl.BodyConte
overriddenBlockTypes := make(map[string]bool) overriddenBlockTypes := make(map[string]bool)
for _, block := range override.Blocks { for _, block := range override.Blocks {
if block.Type == "dynamic" {
overriddenBlockTypes[block.Labels[0]] = true
continue
}
overriddenBlockTypes[block.Type] = true overriddenBlockTypes[block.Type] = true
} }
for _, block := range base.Blocks { for _, block := range base.Blocks {
// We skip over dynamic blocks whose type label is an overridden type
// but note that below we do still leave them as dynamic blocks in
// the result because expanding the dynamic blocks that are left is
// done much later during the core graph walks, where we can safely
// evaluate the expressions.
if block.Type == "dynamic" && overriddenBlockTypes[block.Labels[0]] {
continue
}
if overriddenBlockTypes[block.Type] { if overriddenBlockTypes[block.Type] {
continue continue
} }

View File

@ -136,3 +136,66 @@ func TestModuleOverrideModule(t *testing.T) {
assertResultDeepEqual(t, gotArgs, wantArgs) assertResultDeepEqual(t, gotArgs, wantArgs)
} }
func TestModuleOverrideDynamic(t *testing.T) {
schema := &hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{Type: "foo"},
{Type: "dynamic", LabelNames: []string{"type"}},
},
}
t.Run("base is dynamic", func(t *testing.T) {
mod, diags := testModuleFromDir("test-fixtures/valid-modules/override-dynamic-block-base")
assertNoDiagnostics(t, diags)
if mod == nil {
t.Fatalf("module is nil")
}
if _, exists := mod.ManagedResources["test.foo"]; !exists {
t.Fatalf("no module 'example'")
}
if len(mod.ManagedResources) != 1 {
t.Fatalf("wrong number of managed resources in result %d; want 1", len(mod.ManagedResources))
}
body := mod.ManagedResources["test.foo"].Config
content, diags := body.Content(schema)
assertNoDiagnostics(t, diags)
if len(content.Blocks) != 1 {
t.Fatalf("wrong number of blocks in result %d; want 1", len(content.Blocks))
}
if got, want := content.Blocks[0].Type, "foo"; got != want {
t.Fatalf("wrong block type %q; want %q", got, want)
}
})
t.Run("override is dynamic", func(t *testing.T) {
mod, diags := testModuleFromDir("test-fixtures/valid-modules/override-dynamic-block-override")
assertNoDiagnostics(t, diags)
if mod == nil {
t.Fatalf("module is nil")
}
if _, exists := mod.ManagedResources["test.foo"]; !exists {
t.Fatalf("no module 'example'")
}
if len(mod.ManagedResources) != 1 {
t.Fatalf("wrong number of managed resources in result %d; want 1", len(mod.ManagedResources))
}
body := mod.ManagedResources["test.foo"].Config
content, diags := body.Content(schema)
assertNoDiagnostics(t, diags)
if len(content.Blocks) != 1 {
t.Fatalf("wrong number of blocks in result %d; want 1", len(content.Blocks))
}
if got, want := content.Blocks[0].Type, "dynamic"; got != want {
t.Fatalf("wrong block type %q; want %q", got, want)
}
if got, want := content.Blocks[0].Labels[0], "foo"; got != want {
t.Fatalf("wrong dynamic block label %q; want %q", got, want)
}
})
}

View File

@ -0,0 +1,6 @@
resource "test" "foo" {
foo {
from = "override"
}
}

Some files were not shown because too many files have changed in this diff Show More