feat: show Terraform error details (#6643)

This commit is contained in:
Marcin Tojek 2023-03-20 14:23:40 +01:00 committed by GitHub
parent a4d86e9d78
commit de83723310
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 137 additions and 10 deletions

View File

@ -0,0 +1,68 @@
package terraform
import (
"bytes"
"fmt"
"sort"
"strings"
tfjson "github.com/hashicorp/terraform-json"
)
// This implementation bases on the original Terraform formatter, which unfortunately is internal:
// https://github.com/hashicorp/terraform/blob/6b35927cf0988262739a5f0acea4790ae58a16d3/internal/command/format/diagnostic.go#L125
func FormatDiagnostic(diag *tfjson.Diagnostic) string {
var buf bytes.Buffer
appendSourceSnippets(&buf, diag)
_, _ = buf.WriteString(diag.Detail)
return buf.String()
}
func appendSourceSnippets(buf *bytes.Buffer, diag *tfjson.Diagnostic) {
if diag.Range == nil {
return
}
if diag.Snippet == nil {
// This should generally not happen, as long as sources are always
// loaded through the main loader. We may load things in other
// ways in weird cases, so we'll tolerate it at the expense of
// a not-so-helpful error message.
_, _ = fmt.Fprintf(buf, "on %s line %d:\n (source code not available)\n", diag.Range.Filename, diag.Range.Start.Line)
} else {
snippet := diag.Snippet
code := snippet.Code
var contextStr string
if snippet.Context != nil {
contextStr = fmt.Sprintf(", in %s", *snippet.Context)
}
_, _ = fmt.Fprintf(buf, "on %s line %d%s:\n", diag.Range.Filename, diag.Range.Start.Line, contextStr)
// Split the snippet into lines and render one at a time
lines := strings.Split(code, "\n")
for i, line := range lines {
_, _ = fmt.Fprintf(buf, " %d: %s\n", snippet.StartLine+i, line)
}
if len(snippet.Values) > 0 {
// The diagnostic may also have information about the dynamic
// values of relevant variables at the point of evaluation.
// This is particularly useful for expressions that get evaluated
// multiple times with different values, such as blocks using
// "count" and "for_each", or within "for" expressions.
values := make([]tfjson.DiagnosticExpressionValue, len(snippet.Values))
copy(values, snippet.Values)
sort.Slice(values, func(i, j int) bool {
return values[i].Traversal < values[j].Traversal
})
_, _ = buf.WriteString(" ├────────────────\n")
for _, value := range values {
_, _ = fmt.Fprintf(buf, " │ %s %s\n", value.Traversal, value.Statement)
}
}
}
_ = buf.WriteByte('\n')
}

View File

@ -0,0 +1,63 @@
package terraform_test
import (
"encoding/json"
"strings"
"testing"
tfjson "github.com/hashicorp/terraform-json"
"github.com/stretchr/testify/require"
"github.com/coder/coder/provisioner/terraform"
)
type hasDiagnostic struct {
Diagnostic *tfjson.Diagnostic `json:"diagnostic"`
}
func TestFormatDiagnostic(t *testing.T) {
t.Parallel()
tests := map[string]struct {
input string
expected []string
}{
"Expression": {
input: `{"@level":"error","@message":"Error: Unsupported attribute","@module":"terraform.ui","@timestamp":"2023-03-17T10:33:38.761493+01:00","diagnostic":{"severity":"error","summary":"Unsupported attribute","detail":"This object has no argument, nested block, or exported attribute named \"foobar\".","range":{"filename":"main.tf","start":{"line":230,"column":81,"byte":5648},"end":{"line":230,"column":88,"byte":5655}},"snippet":{"context":"resource \"docker_container\" \"workspace\"","code":" name = \"coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.foobar)}\"","start_line":230,"highlight_start_offset":80,"highlight_end_offset":87,"values":[]}},"type":"diagnostic"}`,
expected: []string{
"on main.tf line 230, in resource \"docker_container\" \"workspace\":",
" 230: name = \"coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.foobar)}\"",
"",
"This object has no argument, nested block, or exported attribute named \"foobar\".",
},
},
"DynamicValues": {
input: `{"@level":"error","@message":"Error: Invalid value for variable","@module":"terraform.ui","@timestamp":"2023-03-17T12:25:37.864793+01:00","diagnostic":{"severity":"error","summary":"Invalid value for variable","detail":"Invalid Digital Ocean Project ID.\n\nThis was checked by the validation rule at main.tf:27,3-13.","range":{"filename":"main.tf","start":{"line":18,"column":1,"byte":277},"end":{"line":18,"column":31,"byte":307}},"snippet":{"context":null,"code":"variable \"step1_do_project_id\" {","start_line":18,"highlight_start_offset":0,"highlight_end_offset":30,"values":[{"traversal":"var.step1_do_project_id","statement":"is \"magic-project-id\""}]}},"type":"diagnostic"}`,
expected: []string{
"on main.tf line 18:",
" 18: variable \"step1_do_project_id\" {",
" ├────────────────",
" │ var.step1_do_project_id is \"magic-project-id\"",
"",
"Invalid Digital Ocean Project ID.",
"",
"This was checked by the validation rule at main.tf:27,3-13.",
},
},
}
for name, tc := range tests {
tc := tc
t.Run(name, func(t *testing.T) {
t.Parallel()
var d hasDiagnostic
err := json.Unmarshal([]byte(tc.input), &d)
require.NoError(t, err)
output := terraform.FormatDiagnostic(d.Diagnostic)
require.Equal(t, tc.expected, strings.Split(output, "\n"))
})
}
}

View File

@ -496,8 +496,10 @@ func provisionReadAndLog(sink logSink, r io.Reader, done chan<- any) {
if log.Diagnostic == nil {
continue
}
logLevel = convertTerraformLogLevel(log.Diagnostic.Severity, sink)
sink.Log(&proto.Log{Level: logLevel, Output: log.Diagnostic.Detail})
logLevel = convertTerraformLogLevel(string(log.Diagnostic.Severity), sink)
for _, diagLine := range strings.Split(FormatDiagnostic(log.Diagnostic), "\n") {
sink.Log(&proto.Log{Level: logLevel, Output: diagLine})
}
}
}
@ -509,7 +511,7 @@ func convertTerraformLogLevel(logLevel string, sink logSink) proto.LogLevel {
return proto.LogLevel_DEBUG
case "info":
return proto.LogLevel_INFO
case "warn":
case "warn", "warning":
return proto.LogLevel_WARN
case "error":
return proto.LogLevel_ERROR
@ -526,13 +528,7 @@ type terraformProvisionLog struct {
Level string `json:"@level"`
Message string `json:"@message"`
Diagnostic *terraformProvisionLogDiagnostic `json:"diagnostic"`
}
type terraformProvisionLogDiagnostic struct {
Severity string `json:"severity"`
Summary string `json:"summary"`
Detail string `json:"detail"`
Diagnostic *tfjson.Diagnostic `json:"diagnostic,omitempty"`
}
// syncWriter wraps an io.Writer in a sync.Mutex.