Merge pull request #30199 from hashicorp/jbardin/apply-failure-diags

Apply graph failure handling
This commit is contained in:
James Bardin 2021-12-17 14:08:02 -05:00 committed by GitHub
commit 2c8edfb259
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 68 additions and 3 deletions

View File

@ -168,6 +168,14 @@ func (b *Local) opApply(
} }
diags = diags.Append(applyDiags) diags = diags.Append(applyDiags)
// Even on error with an empty state, the state value should not be nil.
// Return early here to prevent corrupting any existing state.
if diags.HasErrors() && applyState == nil {
log.Printf("[ERROR] backend/local: apply returned nil state")
op.ReportResult(runningOp, diags)
return
}
// Store the final state // Store the final state
runningOp.State = applyState runningOp.State = applyState
err := statemgr.WriteAndPersist(opState, applyState) err := statemgr.WriteAndPersist(opState, applyState)

View File

@ -22,12 +22,11 @@ import (
// resulting state which is likely to have been partially-updated. // resulting state which is likely to have been partially-updated.
func (c *Context) Apply(plan *plans.Plan, config *configs.Config) (*states.State, tfdiags.Diagnostics) { func (c *Context) Apply(plan *plans.Plan, config *configs.Config) (*states.State, tfdiags.Diagnostics) {
defer c.acquireRun("apply")() defer c.acquireRun("apply")()
var diags tfdiags.Diagnostics
log.Printf("[DEBUG] Building and walking apply graph for %s plan", plan.UIMode) log.Printf("[DEBUG] Building and walking apply graph for %s plan", plan.UIMode)
graph, operation, moreDiags := c.applyGraph(plan, config, true) graph, operation, diags := c.applyGraph(plan, config, true)
if moreDiags.HasErrors() { if diags.HasErrors() {
return nil, diags return nil, diags
} }

View File

@ -678,3 +678,61 @@ resource "test_object" "s" {
}) })
assertNoErrors(t, diags) assertNoErrors(t, diags)
} }
func TestContext2Apply_graphError(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_object" "a" {
test_string = "ok"
}
resource "test_object" "b" {
test_string = test_object.a.test_string
}
`,
})
p := simpleMockProvider()
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_object.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"test_string":"ok"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_object.b").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"test_string":"ok"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan(m, state, &PlanOpts{
Mode: plans.DestroyMode,
})
if diags.HasErrors() {
t.Fatalf("plan: %s", diags.Err())
}
// We're going to corrupt the stored state so that the dependencies will
// cause a cycle when building the apply graph.
testObjA := plan.PriorState.Modules[""].Resources["test_object.a"].Instances[addrs.NoKey].Current
testObjA.Dependencies = append(testObjA.Dependencies, mustResourceInstanceAddr("test_object.b").ContainingResource().Config())
_, diags = ctx.Apply(plan, m)
if !diags.HasErrors() {
t.Fatal("expected cycle error from apply")
}
}