terraform/states/statefile/version4.go

743 lines
21 KiB
Go
Raw Normal View History

statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
package statefile
import (
"encoding/json"
"fmt"
"io"
"sort"
version "github.com/hashicorp/go-version"
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
"github.com/zclconf/go-cty/cty"
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
ctyjson "github.com/zclconf/go-cty/cty/json"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/tfdiags"
)
func readStateV4(src []byte) (*File, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
sV4 := &stateV4{}
err := json.Unmarshal(src, sV4)
if err != nil {
diags = diags.Append(jsonUnmarshalDiags(err))
return nil, diags
}
file, prepDiags := prepareStateV4(sV4)
diags = diags.Append(prepDiags)
return file, diags
}
func prepareStateV4(sV4 *stateV4) (*File, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
var tfVersion *version.Version
if sV4.TerraformVersion != "" {
var err error
tfVersion, err = version.NewVersion(sV4.TerraformVersion)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid Terraform version string",
fmt.Sprintf("State file claims to have been written by Terraform version %q, which is not a valid version string.", sV4.TerraformVersion),
))
}
}
file := &File{
TerraformVersion: tfVersion,
Serial: sV4.Serial,
Lineage: sV4.Lineage,
}
state := states.NewState()
for _, rsV4 := range sV4.Resources {
rAddr := addrs.Resource{
Type: rsV4.Type,
Name: rsV4.Name,
}
switch rsV4.Mode {
case "managed":
rAddr.Mode = addrs.ManagedResourceMode
case "data":
rAddr.Mode = addrs.DataResourceMode
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid resource mode in state",
fmt.Sprintf("State contains a resource with mode %q (%q %q) which is not supported.", rsV4.Mode, rAddr.Type, rAddr.Name),
))
continue
}
moduleAddr := addrs.RootModuleInstance
if rsV4.Module != "" {
var addrDiags tfdiags.Diagnostics
moduleAddr, addrDiags = addrs.ParseModuleInstanceStr(rsV4.Module)
diags = diags.Append(addrDiags)
if addrDiags.HasErrors() {
continue
}
}
providerAddr, addrDiags := addrs.ParseAbsProviderConfigStr(rsV4.ProviderConfig)
diags.Append(addrDiags)
if addrDiags.HasErrors() {
// If ParseAbsProviderConfigStr returns an error, the state may have
// been written before Provider FQNs were introduced and the
// AbsProviderConfig string format will need normalization. If so,
// we treat it like a legacy provider (namespace "-") and let the
// provider installer handle detecting the FQN.
var legacyAddrDiags tfdiags.Diagnostics
providerAddr, legacyAddrDiags = addrs.ParseLegacyAbsProviderConfigStr(rsV4.ProviderConfig)
if legacyAddrDiags.HasErrors() {
continue
}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
ms := state.EnsureModule(moduleAddr)
// Ensure the resource container object is present in the state.
ms.SetResourceProvider(rAddr, providerAddr)
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
for _, isV4 := range rsV4.Instances {
keyRaw := isV4.IndexKey
var key addrs.InstanceKey
switch tk := keyRaw.(type) {
case int:
key = addrs.IntKey(tk)
case float64:
// Since JSON only has one number type, reading from encoding/json
// gives us a float64 here even if the number is whole.
// float64 has a smaller integer range than int, but in practice
// we rarely have more than a few tens of instances and so
// it's unlikely that we'll exhaust the 52 bits in a float64.
key = addrs.IntKey(int(tk))
case string:
key = addrs.StringKey(tk)
default:
if keyRaw != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid resource instance metadata in state",
fmt.Sprintf("Resource %s has an instance with the invalid instance key %#v.", rAddr.Absolute(moduleAddr), keyRaw),
))
continue
}
key = addrs.NoKey
}
instAddr := rAddr.Instance(key)
obj := &states.ResourceInstanceObjectSrc{
SchemaVersion: isV4.SchemaVersion,
CreateBeforeDestroy: isV4.CreateBeforeDestroy,
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
{
// Instance attributes
switch {
case isV4.AttributesRaw != nil:
obj.AttrsJSON = isV4.AttributesRaw
case isV4.AttributesFlat != nil:
obj.AttrsFlat = isV4.AttributesFlat
default:
// This is odd, but we'll accept it and just treat the
// object has being empty. In practice this should arise
// only from the contrived sort of state objects we tend
// to hand-write inline in tests.
obj.AttrsJSON = []byte{'{', '}'}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
}
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
// Sensitive paths
if isV4.AttributeSensitivePaths != nil {
paths, pathsDiags := unmarshalPaths([]byte(isV4.AttributeSensitivePaths))
diags = diags.Append(pathsDiags)
if pathsDiags.HasErrors() {
continue
}
var pvm []cty.PathValueMarks
for _, path := range paths {
pvm = append(pvm, cty.PathValueMarks{
Path: path,
Marks: cty.NewValueMarks("sensitive"),
})
}
obj.AttrSensitivePaths = pvm
}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
{
// Status
raw := isV4.Status
switch raw {
case "":
obj.Status = states.ObjectReady
case "tainted":
obj.Status = states.ObjectTainted
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid resource instance metadata in state",
fmt.Sprintf("Instance %s has invalid status %q.", instAddr.Absolute(moduleAddr), raw),
))
continue
}
}
if raw := isV4.PrivateRaw; len(raw) > 0 {
obj.Private = raw
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
{
depsRaw := isV4.Dependencies
deps := make([]addrs.ConfigResource, 0, len(depsRaw))
for _, depRaw := range depsRaw {
addr, addrDiags := addrs.ParseAbsResourceStr(depRaw)
diags = diags.Append(addrDiags)
if addrDiags.HasErrors() {
continue
}
deps = append(deps, addr.Config())
}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
obj.Dependencies = deps
}
switch {
case isV4.Deposed != "":
dk := states.DeposedKey(isV4.Deposed)
if len(dk) != 8 {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid resource instance metadata in state",
fmt.Sprintf("Instance %s has an object with deposed key %q, which is not correctly formatted.", instAddr.Absolute(moduleAddr), isV4.Deposed),
))
continue
}
is := ms.ResourceInstance(instAddr)
if is.HasDeposed(dk) {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Duplicate resource instance in state",
fmt.Sprintf("Instance %s deposed object %q appears multiple times in the state file.", instAddr.Absolute(moduleAddr), dk),
))
continue
}
ms.SetResourceInstanceDeposed(instAddr, dk, obj, providerAddr)
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
default:
is := ms.ResourceInstance(instAddr)
if is.HasCurrent() {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Duplicate resource instance in state",
fmt.Sprintf("Instance %s appears multiple times in the state file.", instAddr.Absolute(moduleAddr)),
))
continue
}
ms.SetResourceInstanceCurrent(instAddr, obj, providerAddr)
}
}
// We repeat this after creating the instances because
// SetResourceInstanceCurrent automatically resets this metadata based
// on the incoming objects. That behavior is useful when we're making
// piecemeal updates to the state during an apply, but when we're
// reading the state file we want to reflect its contents exactly.
ms.SetResourceProvider(rAddr, providerAddr)
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
// The root module is special in that we persist its attributes and thus
// need to reload them now. (For descendent modules we just re-calculate
// them based on the latest configuration on each run.)
{
rootModule := state.RootModule()
for name, fos := range sV4.RootOutputs {
os := &states.OutputValue{
Addr: addrs.AbsOutputValue{
OutputValue: addrs.OutputValue{
Name: name,
},
},
}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
os.Sensitive = fos.Sensitive
ty, err := ctyjson.UnmarshalType([]byte(fos.ValueTypeRaw))
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid output value type in state",
fmt.Sprintf("The state file has an invalid type specification for output %q: %s.", name, err),
))
continue
}
val, err := ctyjson.Unmarshal([]byte(fos.ValueRaw), ty)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Invalid output value saved in state",
fmt.Sprintf("The state file has an invalid value for output %q: %s.", name, err),
))
continue
}
os.Value = val
rootModule.OutputValues[name] = os
}
}
file.State = state
return file, diags
}
func writeStateV4(file *File, w io.Writer) tfdiags.Diagnostics {
// Here we'll convert back from the "File" representation to our
// stateV4 struct representation and write that.
//
// While we support legacy state formats for reading, we only support the
// latest for writing and so if a V5 is added in future then this function
// should be deleted and replaced with a writeStateV5, even though the
// read/prepare V4 functions above would stick around.
var diags tfdiags.Diagnostics
if file == nil || file.State == nil {
panic("attempt to write nil state to file")
}
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
var terraformVersion string
if file.TerraformVersion != nil {
terraformVersion = file.TerraformVersion.String()
}
sV4 := &stateV4{
TerraformVersion: terraformVersion,
Serial: file.Serial,
Lineage: file.Lineage,
RootOutputs: map[string]outputStateV4{},
Resources: []resourceStateV4{},
}
for name, os := range file.State.RootModule().OutputValues {
src, err := ctyjson.Marshal(os.Value, os.Value.Type())
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize output value in state",
fmt.Sprintf("An error occured while serializing output value %q: %s.", name, err),
))
continue
}
typeSrc, err := ctyjson.MarshalType(os.Value.Type())
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize output value in state",
fmt.Sprintf("An error occured while serializing the type of output value %q: %s.", name, err),
))
continue
}
sV4.RootOutputs[name] = outputStateV4{
Sensitive: os.Sensitive,
ValueRaw: json.RawMessage(src),
ValueTypeRaw: json.RawMessage(typeSrc),
}
}
for _, ms := range file.State.Modules {
moduleAddr := ms.Addr
for _, rs := range ms.Resources {
resourceAddr := rs.Addr.Resource
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
var mode string
switch resourceAddr.Mode {
case addrs.ManagedResourceMode:
mode = "managed"
case addrs.DataResourceMode:
mode = "data"
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize resource in state",
fmt.Sprintf("Resource %s has mode %s, which cannot be serialized in state", resourceAddr.Absolute(moduleAddr), resourceAddr.Mode),
))
continue
}
sV4.Resources = append(sV4.Resources, resourceStateV4{
Module: moduleAddr.String(),
Mode: mode,
Type: resourceAddr.Type,
Name: resourceAddr.Name,
ProviderConfig: rs.ProviderConfig.String(),
Instances: []instanceObjectStateV4{},
})
rsV4 := &(sV4.Resources[len(sV4.Resources)-1])
for key, is := range rs.Instances {
if is.HasCurrent() {
var objDiags tfdiags.Diagnostics
rsV4.Instances, objDiags = appendInstanceObjectStateV4(
rs, is, key, is.Current, states.NotDeposed,
rsV4.Instances,
)
diags = diags.Append(objDiags)
}
for dk, obj := range is.Deposed {
var objDiags tfdiags.Diagnostics
rsV4.Instances, objDiags = appendInstanceObjectStateV4(
rs, is, key, obj, dk,
rsV4.Instances,
)
diags = diags.Append(objDiags)
}
}
}
}
sV4.normalize()
src, err := json.MarshalIndent(sV4, "", " ")
if err != nil {
// Shouldn't happen if we do our conversion to *stateV4 correctly above.
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize state",
fmt.Sprintf("An error occured while serializing the state to save it. This is a bug in Terraform and should be reported: %s.", err),
))
return diags
}
src = append(src, '\n')
_, err = w.Write(src)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to write state",
fmt.Sprintf("An error occured while writing the serialized state: %s.", err),
))
return diags
}
return diags
}
func appendInstanceObjectStateV4(rs *states.Resource, is *states.ResourceInstance, key addrs.InstanceKey, obj *states.ResourceInstanceObjectSrc, deposed states.DeposedKey, isV4s []instanceObjectStateV4) ([]instanceObjectStateV4, tfdiags.Diagnostics) {
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
var diags tfdiags.Diagnostics
var status string
switch obj.Status {
case states.ObjectReady:
status = ""
case states.ObjectTainted:
status = "tainted"
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize resource instance in state",
fmt.Sprintf("Instance %s has status %s, which cannot be saved in state.", rs.Addr.Instance(key), obj.Status),
))
}
var privateRaw []byte
if len(obj.Private) > 0 {
privateRaw = obj.Private
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
deps := make([]string, len(obj.Dependencies))
for i, depAddr := range obj.Dependencies {
deps[i] = depAddr.String()
}
var rawKey interface{}
switch tk := key.(type) {
case addrs.IntKey:
rawKey = int(tk)
case addrs.StringKey:
rawKey = string(tk)
default:
if key != addrs.NoKey {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Failed to serialize resource instance in state",
fmt.Sprintf("Instance %s has an unsupported instance key: %#v.", rs.Addr.Instance(key), key),
))
}
}
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
// Extract paths from path value marks
var paths []cty.Path
for _, vm := range obj.AttrSensitivePaths {
paths = append(paths, vm.Path)
}
// Marshal paths to JSON
attributeSensitivePaths, pathsDiags := marshalPaths(paths)
diags = diags.Append(pathsDiags)
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
return append(isV4s, instanceObjectStateV4{
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
IndexKey: rawKey,
Deposed: string(deposed),
Status: status,
SchemaVersion: obj.SchemaVersion,
AttributesFlat: obj.AttrsFlat,
AttributesRaw: obj.AttrsJSON,
AttributeSensitivePaths: attributeSensitivePaths,
PrivateRaw: privateRaw,
Dependencies: deps,
CreateBeforeDestroy: obj.CreateBeforeDestroy,
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}), diags
}
type stateV4 struct {
Version stateVersionV4 `json:"version"`
TerraformVersion string `json:"terraform_version"`
Serial uint64 `json:"serial"`
Lineage string `json:"lineage"`
RootOutputs map[string]outputStateV4 `json:"outputs"`
Resources []resourceStateV4 `json:"resources"`
}
// normalize makes some in-place changes to normalize the way items are
// stored to ensure that two functionally-equivalent states will be stored
// identically.
func (s *stateV4) normalize() {
sort.Stable(sortResourcesV4(s.Resources))
for _, rs := range s.Resources {
sort.Stable(sortInstancesV4(rs.Instances))
}
}
type outputStateV4 struct {
ValueRaw json.RawMessage `json:"value"`
ValueTypeRaw json.RawMessage `json:"type"`
Sensitive bool `json:"sensitive,omitempty"`
}
type resourceStateV4 struct {
Module string `json:"module,omitempty"`
Mode string `json:"mode"`
Type string `json:"type"`
Name string `json:"name"`
EachMode string `json:"each,omitempty"`
ProviderConfig string `json:"provider"`
Instances []instanceObjectStateV4 `json:"instances"`
}
type instanceObjectStateV4 struct {
IndexKey interface{} `json:"index_key,omitempty"`
Status string `json:"status,omitempty"`
Deposed string `json:"deposed,omitempty"`
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
SchemaVersion uint64 `json:"schema_version"`
AttributesRaw json.RawMessage `json:"attributes,omitempty"`
AttributesFlat map[string]string `json:"attributes_flat,omitempty"`
AttributeSensitivePaths json.RawMessage `json:"sensitive_attributes,omitempty,"`
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
PrivateRaw []byte `json:"private,omitempty"`
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
Dependencies []string `json:"dependencies,omitempty"`
CreateBeforeDestroy bool `json:"create_before_destroy,omitempty"`
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
}
// stateVersionV4 is a weird special type we use to produce our hard-coded
// "version": 4 in the JSON serialization.
type stateVersionV4 struct{}
func (sv stateVersionV4) MarshalJSON() ([]byte, error) {
return []byte{'4'}, nil
}
func (sv stateVersionV4) UnmarshalJSON([]byte) error {
// Nothing to do: we already know we're version 4
return nil
}
type sortResourcesV4 []resourceStateV4
func (sr sortResourcesV4) Len() int { return len(sr) }
func (sr sortResourcesV4) Swap(i, j int) { sr[i], sr[j] = sr[j], sr[i] }
func (sr sortResourcesV4) Less(i, j int) bool {
switch {
case sr[i].Module != sr[j].Module:
return sr[i].Module < sr[j].Module
statefile: New package for loading and saving state files Whereas the parent directory "states" contains the models that represent state in memory, this package's responsibility is in serializing a subset of that data to a JSON-based file format and then reloading that data back into memory later. For reading, this package supports state file formats going back to version 1, using lightly-adapted versions of the migration code previously used in the "terraform" package. State data is upgraded to the latest version step by step and then transformed into the in-memory state representation, which is distinct from any of the file format structs in this package to enable these to evolve separately. For writing, only the latest version (4) is supported, which is a new format that is a slightly-flattened version of the new in-memory state models introduced in the prior commit. This format retains the outputs from only the root module and it flattens out the module and instance parts of the hierarchy by including the identifiers for these inside the child object. The loader then reconstructs the multi-layer structure we use for more convenient access in memory. For now, the only testing in this package is of round-tripping different versions of state through a read and a write, ensuring the output is as desired. This exercises all of the reading, upgrading, and writing functions but should be augmented in later commits to improve coverage and introduce more focused tests for specific parts of the functionality.
2018-06-08 02:35:55 +02:00
case sr[i].Mode != sr[j].Mode:
return sr[i].Mode < sr[j].Mode
case sr[i].Type != sr[j].Type:
return sr[i].Type < sr[j].Type
case sr[i].Name != sr[j].Name:
return sr[i].Name < sr[j].Name
default:
return false
}
}
type sortInstancesV4 []instanceObjectStateV4
func (si sortInstancesV4) Len() int { return len(si) }
func (si sortInstancesV4) Swap(i, j int) { si[i], si[j] = si[j], si[i] }
func (si sortInstancesV4) Less(i, j int) bool {
ki := si[i].IndexKey
kj := si[j].IndexKey
if ki != kj {
if (ki == nil) != (kj == nil) {
return ki == nil
}
if kii, isInt := ki.(int); isInt {
if kji, isInt := kj.(int); isInt {
return kii < kji
}
return true
}
if kis, isStr := ki.(string); isStr {
if kjs, isStr := kj.(string); isStr {
return kis < kjs
}
return true
}
}
if si[i].Deposed != si[j].Deposed {
return si[i].Deposed < si[j].Deposed
}
return false
}
Store sensitive attribute paths in state (#26338) * Add creation test and simplify in-place test * Add deletion test * Start adding marking from state Start storing paths that should be marked when pulled out of state. Implements deep copy for attr paths. This commit also includes some comment noise from investigations, and fixing the diff test * Fix apply stripping marks * Expand diff tests * Basic apply test * Update comments on equality checks to clarify current understanding * Add JSON serialization for sensitive paths We need to serialize a slice of cty.Path values to be used to re-mark the sensitive values of a resource instance when loading the state file. Paths consist of a list of steps, each of which may be either getting an attribute value by name, or indexing into a collection by string or number. To serialize these without building a complex parser for a compact string form, we render a nested array of small objects, like so: [ [ { type: "get_attr", value: "foo" }, { type: "index", value: { "type": "number", "value": 2 } } ] ] The above example is equivalent to a path `foo[2]`. * Format diffs with map types Comparisons need unmarked values to operate on, so create unmarked values for those operations. Additionally, change diff to cover map types * Remove debugging printing * Fix bug with marking non-sensitive values When pulling a sensitive value from state, we were previously using those marks to remark the planned new value, but that new value might *not* be sensitive, so let's not do that * Fix apply test Apply was not passing the second state through to the third pass at apply * Consistency in checking for length of paths vs inspecting into value * In apply, don't mark with before paths * AttrPaths test coverage for DeepCopy * Revert format changes Reverts format changes in format/diff for this branch so those changes can be discussed on a separate PR * Refactor name of AttrPaths to AttrSensitivePaths * Rename AttributePaths/attributePaths for naming consistency Co-authored-by: Alisdair McDiarmid <alisdair@users.noreply.github.com>
2020-09-24 18:40:17 +02:00
// pathStep is an intermediate representation of a cty.PathStep to facilitate
// consistent JSON serialization. The Value field can either be a cty.Value of
// dynamic type (for index steps), or a string (for get attr steps).
type pathStep struct {
Type string `json:"type"`
Value json.RawMessage `json:"value"`
}
const (
indexPathStepType = "index"
getAttrPathStepType = "get_attr"
)
func unmarshalPaths(buf []byte) ([]cty.Path, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
var jsonPaths [][]pathStep
err := json.Unmarshal(buf, &jsonPaths)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error unmarshaling path steps",
err.Error(),
))
}
paths := make([]cty.Path, 0, len(jsonPaths))
unmarshalOuter:
for _, jsonPath := range jsonPaths {
var path cty.Path
for _, jsonStep := range jsonPath {
switch jsonStep.Type {
case indexPathStepType:
key, err := ctyjson.Unmarshal(jsonStep.Value, cty.DynamicPseudoType)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error unmarshaling path step",
fmt.Sprintf("Failed to unmarshal index step key: %s", err),
))
continue unmarshalOuter
}
path = append(path, cty.IndexStep{Key: key})
case getAttrPathStepType:
var name string
if err := json.Unmarshal(jsonStep.Value, &name); err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error unmarshaling path step",
fmt.Sprintf("Failed to unmarshal get attr step name: %s", err),
))
continue unmarshalOuter
}
path = append(path, cty.GetAttrStep{Name: name})
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Unsupported path step",
fmt.Sprintf("Unsupported path step type %q", jsonStep.Type),
))
continue unmarshalOuter
}
}
paths = append(paths, path)
}
return paths, diags
}
func marshalPaths(paths []cty.Path) ([]byte, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
// cty.Path is a slice of cty.PathSteps, so our representation of a slice
// of paths is a nested slice of our intermediate pathStep struct
jsonPaths := make([][]pathStep, 0, len(paths))
marshalOuter:
for _, path := range paths {
jsonPath := make([]pathStep, 0, len(path))
for _, step := range path {
var jsonStep pathStep
switch s := step.(type) {
case cty.IndexStep:
key, err := ctyjson.Marshal(s.Key, cty.DynamicPseudoType)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error marshaling path step",
fmt.Sprintf("Failed to marshal index step key %#v: %s", s.Key, err),
))
continue marshalOuter
}
jsonStep.Type = indexPathStepType
jsonStep.Value = key
case cty.GetAttrStep:
name, err := json.Marshal(s.Name)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error marshaling path step",
fmt.Sprintf("Failed to marshal get attr step name %s: %s", s.Name, err),
))
continue marshalOuter
}
jsonStep.Type = getAttrPathStepType
jsonStep.Value = name
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Unsupported path step",
fmt.Sprintf("Unsupported path step %#v (%t)", step, step),
))
continue marshalOuter
}
jsonPath = append(jsonPath, jsonStep)
}
jsonPaths = append(jsonPaths, jsonPath)
}
buf, err := json.Marshal(jsonPaths)
if err != nil {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Error marshaling path steps",
fmt.Sprintf("Failed to marshal path steps: %s", err),
))
}
return buf, diags
}