cli: Use standard JSON diagnostics for validate

Now that we have a comprehensive JSON diagnostic structure, we can use
it in the `validate -json` output instead of the inline version. Note
that this changes the output of `validate -json` in two ways:

1. We fix some off-by-one errors caused by zero-width highlight ranges.
   This aligns the JSON diagnostic output with the text output seen by
   most Terraform users, so I consider this a bug fix.
2. We add the `snippet` field to the JSON diagnostics where available.
   This is purely additive and is permitted under our JSON format
   stability guarantees.
This commit is contained in:
Alisdair McDiarmid 2021-03-11 12:57:07 -05:00 committed by Alisdair McDiarmid
parent d9d88b9243
commit c92826c14d
10 changed files with 149 additions and 55 deletions

View File

@ -16,9 +16,17 @@
},
"end": {
"line": 1,
"column": 23,
"byte": 22
"column": 24,
"byte": 23
}
},
"snippet": {
"context": "module \"super#module\"",
"code": "module \"super#module\" {",
"start_line": 1,
"highlight_start_offset": 22,
"highlight_end_offset": 23,
"values": []
}
},
{
@ -37,6 +45,14 @@
"column": 22,
"byte": 21
}
},
"snippet": {
"context": "module \"super#module\"",
"code": "module \"super#module\" {",
"start_line": 1,
"highlight_start_offset": 7,
"highlight_end_offset": 21,
"values": []
}
},
{
@ -55,6 +71,14 @@
"column": 15,
"byte": 58
}
},
"snippet": {
"context": "module \"super\"",
"code": " source = var.modulename",
"start_line": 5,
"highlight_start_offset": 11,
"highlight_end_offset": 14,
"values": []
}
},
{
@ -73,6 +97,14 @@
"column": 26,
"byte": 69
}
},
"snippet": {
"context": "module \"super\"",
"code": " source = var.modulename",
"start_line": 5,
"highlight_start_offset": 11,
"highlight_end_offset": 25,
"values": []
}
},
{
@ -91,6 +123,14 @@
"column": 15,
"byte": 41
}
},
"snippet": {
"context": null,
"code": "module \"super\" {",
"start_line": 4,
"highlight_start_offset": 0,
"highlight_end_offset": 14,
"values": []
}
},
{
@ -109,6 +149,14 @@
"column": 22,
"byte": 21
}
},
"snippet": {
"context": null,
"code": "module \"super#module\" {",
"start_line": 1,
"highlight_start_offset": 0,
"highlight_end_offset": 21,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 19,
"byte": 125
}
},
"snippet": {
"context": "variable \"vairable_with_interpolation\"",
"code": " default = \"${var.otherresourcename}\"",
"start_line": 6,
"highlight_start_offset": 15,
"highlight_end_offset": 18,
"values": []
}
},
{
@ -37,6 +45,14 @@
"column": 44,
"byte": 224
}
},
"snippet": {
"context": "resource \"aws_instance\" \"web\"",
"code": " depends_on = [\"${var.otherresourcename}}\"]",
"start_line": 10,
"highlight_start_offset": 16,
"highlight_end_offset": 43,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 18,
"byte": 114
}
},
"snippet": {
"context": "resource \"test_instance\" \"foo\"",
"code": " name = test",
"start_line": 6,
"highlight_start_offset": 13,
"highlight_end_offset": 17,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 36,
"byte": 132
}
},
"snippet": {
"context": "resource \"test_instance\" \"foo\"",
"code": " description = var.description",
"start_line": 6,
"highlight_start_offset": 20,
"highlight_end_offset": 35,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 22,
"byte": 67
}
},
"snippet": {
"context": null,
"code": "module \"multi_module\" {",
"start_line": 5,
"highlight_start_offset": 0,
"highlight_end_offset": 21,
"values": []
}
},
{
@ -37,6 +45,14 @@
"column": 22,
"byte": 67
}
},
"snippet": {
"context": null,
"code": "module \"multi_module\" {",
"start_line": 5,
"highlight_start_offset": 0,
"highlight_end_offset": 21,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 15,
"byte": 99
}
},
"snippet": {
"context": null,
"code": "provider \"aws\" {",
"start_line": 7,
"highlight_start_offset": 0,
"highlight_end_offset": 14,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 30,
"byte": 64
}
},
"snippet": {
"context": null,
"code": "resource \"aws_instance\" \"web\" {",
"start_line": 4,
"highlight_start_offset": 0,
"highlight_end_offset": 29,
"values": []
}
}
]

View File

@ -19,6 +19,14 @@
"column": 8,
"byte": 7
}
},
"snippet": {
"context": null,
"code": "resorce \"test_instance\" \"foo\" { # Intentional typo to test error reporting",
"start_line": 1,
"highlight_start_offset": 0,
"highlight_end_offset": 7,
"values": []
}
}
]

View File

@ -16,9 +16,17 @@
},
"end": {
"line": 1,
"column": 18,
"byte": 17
"column": 19,
"byte": 18
}
},
"snippet": {
"context": "output \"myvalue\"",
"code": "output \"myvalue\" {",
"start_line": 1,
"highlight_start_offset": 17,
"highlight_end_offset": 18,
"values": []
}
},
{
@ -37,6 +45,14 @@
"column": 9,
"byte": 27
}
},
"snippet": {
"context": "output \"myvalue\"",
"code": " values = \"Some value\"",
"start_line": 2,
"highlight_start_offset": 2,
"highlight_end_offset": 8,
"values": []
}
}
]

View File

@ -8,6 +8,7 @@ import (
"github.com/zclconf/go-cty/cty"
viewsjson "github.com/hashicorp/terraform/command/views/json"
"github.com/hashicorp/terraform/terraform"
"github.com/hashicorp/terraform/tfdiags"
)
@ -118,77 +119,34 @@ func (c *ValidateCommand) validate(dir string) tfdiags.Diagnostics {
func (c *ValidateCommand) showResults(diags tfdiags.Diagnostics, jsonOutput bool) int {
switch {
case jsonOutput:
// FIXME: Eventually we'll probably want to factor this out somewhere
// to support machine-readable outputs for other commands too, but for
// now it's simplest to do this inline here.
type Pos struct {
Line int `json:"line"`
Column int `json:"column"`
Byte int `json:"byte"`
}
type Range struct {
Filename string `json:"filename"`
Start Pos `json:"start"`
End Pos `json:"end"`
}
type Diagnostic struct {
Severity string `json:"severity,omitempty"`
Summary string `json:"summary,omitempty"`
Detail string `json:"detail,omitempty"`
Range *Range `json:"range,omitempty"`
}
type Output struct {
// We include some summary information that is actually redundant
// with the detailed diagnostics, but avoids the need for callers
// to re-implement our logic for deciding these.
Valid bool `json:"valid"`
ErrorCount int `json:"error_count"`
WarningCount int `json:"warning_count"`
Diagnostics []Diagnostic `json:"diagnostics"`
Valid bool `json:"valid"`
ErrorCount int `json:"error_count"`
WarningCount int `json:"warning_count"`
Diagnostics []*viewsjson.Diagnostic `json:"diagnostics"`
}
var output Output
output.Valid = true // until proven otherwise
configSources := c.configSources()
for _, diag := range diags {
var jsonDiag Diagnostic
output.Diagnostics = append(output.Diagnostics, viewsjson.NewDiagnostic(diag, configSources))
switch diag.Severity() {
case tfdiags.Error:
jsonDiag.Severity = "error"
output.ErrorCount++
output.Valid = false
case tfdiags.Warning:
jsonDiag.Severity = "warning"
output.WarningCount++
}
desc := diag.Description()
jsonDiag.Summary = desc.Summary
jsonDiag.Detail = desc.Detail
ranges := diag.Source()
if ranges.Subject != nil {
subj := ranges.Subject
jsonDiag.Range = &Range{
Filename: subj.Filename,
Start: Pos{
Line: subj.Start.Line,
Column: subj.Start.Column,
Byte: subj.Start.Byte,
},
End: Pos{
Line: subj.End.Line,
Column: subj.End.Column,
Byte: subj.End.Byte,
},
}
}
output.Diagnostics = append(output.Diagnostics, jsonDiag)
}
if output.Diagnostics == nil {
// Make sure this always appears as an array in our output, since
// this is easier to consume for dynamically-typed languages.
output.Diagnostics = []Diagnostic{}
output.Diagnostics = []*viewsjson.Diagnostic{}
}
j, err := json.MarshalIndent(&output, "", " ")