coder/provisioner/terraform/provision_test.go

755 lines
18 KiB
Go

//go:build linux || darwin
package terraform_test
import (
"archive/tar"
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"runtime"
"sort"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"cdr.dev/slog"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/codersdk/drpc"
"github.com/coder/coder/v2/provisioner/terraform"
"github.com/coder/coder/v2/provisionersdk"
"github.com/coder/coder/v2/provisionersdk/proto"
)
type provisionerServeOptions struct {
binaryPath string
exitTimeout time.Duration
}
func setupProvisioner(t *testing.T, opts *provisionerServeOptions) (context.Context, proto.DRPCProvisionerClient) {
if opts == nil {
opts = &provisionerServeOptions{}
}
cachePath := t.TempDir()
workDir := t.TempDir()
client, server := drpc.MemTransportPipe()
ctx, cancelFunc := context.WithCancel(context.Background())
serverErr := make(chan error, 1)
t.Cleanup(func() {
_ = client.Close()
_ = server.Close()
cancelFunc()
err := <-serverErr
if !errors.Is(err, context.Canceled) {
assert.NoError(t, err)
}
})
go func() {
serverErr <- terraform.Serve(ctx, &terraform.ServeOptions{
ServeOptions: &provisionersdk.ServeOptions{
Listener: server,
Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug),
WorkDirectory: workDir,
},
BinaryPath: opts.binaryPath,
CachePath: cachePath,
ExitTimeout: opts.exitTimeout,
})
}()
api := proto.NewDRPCProvisionerClient(client)
return ctx, api
}
func makeTar(t *testing.T, files map[string]string) []byte {
t.Helper()
var buffer bytes.Buffer
writer := tar.NewWriter(&buffer)
for name, content := range files {
err := writer.WriteHeader(&tar.Header{
Name: name,
Size: int64(len(content)),
Mode: 0o644,
})
require.NoError(t, err)
_, err = writer.Write([]byte(content))
require.NoError(t, err)
}
err := writer.Flush()
require.NoError(t, err)
return buffer.Bytes()
}
func configure(ctx context.Context, t *testing.T, client proto.DRPCProvisionerClient, config *proto.Config) proto.DRPCProvisioner_SessionClient {
t.Helper()
sess, err := client.Session(ctx)
require.NoError(t, err)
err = sess.Send(&proto.Request{Type: &proto.Request_Config{Config: config}})
require.NoError(t, err)
return sess
}
func readProvisionLog(t *testing.T, response proto.DRPCProvisioner_SessionClient) string {
var logBuf strings.Builder
for {
msg, err := response.Recv()
require.NoError(t, err)
if log := msg.GetLog(); log != nil {
t.Log(log.Level.String(), log.Output)
_, err = logBuf.WriteString(log.Output)
require.NoError(t, err)
continue
}
break
}
return logBuf.String()
}
func sendPlan(sess proto.DRPCProvisioner_SessionClient, transition proto.WorkspaceTransition) error {
return sess.Send(&proto.Request{Type: &proto.Request_Plan{Plan: &proto.PlanRequest{
Metadata: &proto.Metadata{WorkspaceTransition: transition},
}}})
}
func sendApply(sess proto.DRPCProvisioner_SessionClient, transition proto.WorkspaceTransition) error {
return sess.Send(&proto.Request{Type: &proto.Request_Apply{Apply: &proto.ApplyRequest{
Metadata: &proto.Metadata{WorkspaceTransition: transition},
}}})
}
// below we exec fake_cancel.sh, which causes the kernel to execute it, and if more than
// one process tries to do this simultaneously, it can cause "text file busy"
// nolint: paralleltest
func TestProvision_Cancel(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("This test uses interrupts and is not supported on Windows")
}
cwd, err := os.Getwd()
require.NoError(t, err)
fakeBin := filepath.Join(cwd, "testdata", "fake_cancel.sh")
tests := []struct {
name string
mode string
startSequence []string
wantLog []string
}{
{
name: "Cancel init",
mode: "init",
startSequence: []string{"init_start"},
wantLog: []string{"interrupt", "exit"},
},
{
// Provisioner requires a plan before an apply, so test cancel with plan.
name: "Cancel plan",
mode: "plan",
startSequence: []string{"init", "plan_start"},
wantLog: []string{"interrupt", "exit"},
},
}
for _, tt := range tests {
tt := tt
// below we exec fake_cancel.sh, which causes the kernel to execute it, and if more than
// one process tries to do this, it can cause "text file busy"
// nolint: paralleltest
t.Run(tt.name, func(t *testing.T) {
dir := t.TempDir()
binPath := filepath.Join(dir, "terraform")
// Example: exec /path/to/terrafork_fake_cancel.sh 1.2.1 apply "$@"
content := fmt.Sprintf("#!/bin/sh\nexec %q %s %s \"$@\"\n", fakeBin, terraform.TerraformVersion.String(), tt.mode)
err := os.WriteFile(binPath, []byte(content), 0o755) //#nosec
require.NoError(t, err)
t.Logf("wrote fake terraform script to %s", binPath)
ctx, api := setupProvisioner(t, &provisionerServeOptions{
binaryPath: binPath,
})
sess := configure(ctx, t, api, &proto.Config{
TemplateSourceArchive: makeTar(t, nil),
})
err = sendPlan(sess, proto.WorkspaceTransition_START)
require.NoError(t, err)
for _, line := range tt.startSequence {
LoopStart:
msg, err := sess.Recv()
require.NoError(t, err)
t.Log(msg.Type)
log := msg.GetLog()
if log == nil {
goto LoopStart
}
require.Equal(t, line, log.Output)
}
err = sess.Send(&proto.Request{
Type: &proto.Request_Cancel{
Cancel: &proto.CancelRequest{},
},
})
require.NoError(t, err)
var gotLog []string
for {
msg, err := sess.Recv()
require.NoError(t, err)
if log := msg.GetLog(); log != nil {
gotLog = append(gotLog, log.Output)
}
if c := msg.GetPlan(); c != nil {
require.Contains(t, c.Error, "exit status 1")
break
}
}
require.Equal(t, tt.wantLog, gotLog)
})
}
}
// below we exec fake_cancel_hang.sh, which causes the kernel to execute it, and if more than
// one process tries to do this, it can cause "text file busy"
// nolint: paralleltest
func TestProvision_CancelTimeout(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("This test uses interrupts and is not supported on Windows")
}
cwd, err := os.Getwd()
require.NoError(t, err)
fakeBin := filepath.Join(cwd, "testdata", "fake_cancel_hang.sh")
dir := t.TempDir()
binPath := filepath.Join(dir, "terraform")
// Example: exec /path/to/terrafork_fake_cancel.sh 1.2.1 apply "$@"
content := fmt.Sprintf("#!/bin/sh\nexec %q %s \"$@\"\n", fakeBin, terraform.TerraformVersion.String())
err = os.WriteFile(binPath, []byte(content), 0o755) //#nosec
require.NoError(t, err)
ctx, api := setupProvisioner(t, &provisionerServeOptions{
binaryPath: binPath,
exitTimeout: time.Second,
})
sess := configure(ctx, t, api, &proto.Config{
TemplateSourceArchive: makeTar(t, nil),
})
// provisioner requires plan before apply, so test cancel with plan.
err = sendPlan(sess, proto.WorkspaceTransition_START)
require.NoError(t, err)
for _, line := range []string{"init", "plan_start"} {
LoopStart:
msg, err := sess.Recv()
require.NoError(t, err)
t.Log(msg.Type)
log := msg.GetLog()
if log == nil {
goto LoopStart
}
require.Equal(t, line, log.Output)
}
err = sess.Send(&proto.Request{Type: &proto.Request_Cancel{Cancel: &proto.CancelRequest{}}})
require.NoError(t, err)
for {
msg, err := sess.Recv()
require.NoError(t, err)
if c := msg.GetPlan(); c != nil {
require.Contains(t, c.Error, "killed")
break
}
}
}
func TestProvision(t *testing.T) {
t.Parallel()
testCases := []struct {
Name string
Files map[string]string
Metadata *proto.Metadata
Request *proto.PlanRequest
// Response may be nil to not check the response.
Response *proto.PlanComplete
// If ErrorContains is not empty, PlanComplete should have an Error containing the given string
ErrorContains string
// If ExpectLogContains is not empty, then the logs should contain it.
ExpectLogContains string
// If Apply is true, then send an Apply request and check we get the same Resources as in Response.
Apply bool
}{
{
Name: "missing-variable",
Files: map[string]string{
"main.tf": `variable "A" {
}`,
},
ErrorContains: "terraform plan:",
ExpectLogContains: "No value for required variable",
},
{
Name: "missing-variable-dry-run",
Files: map[string]string{
"main.tf": `variable "A" {
}`,
},
ErrorContains: "terraform plan:",
ExpectLogContains: "No value for required variable",
},
{
Name: "single-resource-dry-run",
Files: map[string]string{
"main.tf": `resource "null_resource" "A" {}`,
},
Response: &proto.PlanComplete{
Resources: []*proto.Resource{{
Name: "A",
Type: "null_resource",
}},
},
},
{
Name: "single-resource",
Files: map[string]string{
"main.tf": `resource "null_resource" "A" {}`,
},
Response: &proto.PlanComplete{
Resources: []*proto.Resource{{
Name: "A",
Type: "null_resource",
}},
},
Apply: true,
},
{
Name: "single-resource-json",
Files: map[string]string{
"main.tf.json": `{
"resource": {
"null_resource": {
"A": [
{}
]
}
}
}`,
},
Response: &proto.PlanComplete{
Resources: []*proto.Resource{{
Name: "A",
Type: "null_resource",
}},
},
Apply: true,
},
{
Name: "bad-syntax-1",
Files: map[string]string{
"main.tf": `a`,
},
ErrorContains: "initialize terraform",
ExpectLogContains: "Argument or block definition required",
},
{
Name: "bad-syntax-2",
Files: map[string]string{
"main.tf": `;asdf;`,
},
ErrorContains: "initialize terraform",
ExpectLogContains: `The ";" character is not valid.`,
},
{
Name: "destroy-no-state",
Files: map[string]string{
"main.tf": `resource "null_resource" "A" {}`,
},
Metadata: &proto.Metadata{
WorkspaceTransition: proto.WorkspaceTransition_DESTROY,
},
ExpectLogContains: "nothing to do",
},
{
Name: "rich-parameter-with-value",
Files: map[string]string{
"main.tf": `terraform {
required_providers {
coder = {
source = "coder/coder"
version = "0.6.20"
}
}
}
data "coder_parameter" "sample" {
name = "Sample"
type = "string"
default = "foobaz"
}
data "coder_parameter" "example" {
name = "Example"
type = "string"
default = "foobar"
}
resource "null_resource" "example" {
triggers = {
misc = "${data.coder_parameter.example.value}"
}
}`,
},
Request: &proto.PlanRequest{
RichParameterValues: []*proto.RichParameterValue{
{
Name: "Example",
Value: "foobaz",
},
{
Name: "Sample",
Value: "foofoo",
},
},
},
Response: &proto.PlanComplete{
Parameters: []*proto.RichParameter{
{
Name: "Example",
Type: "string",
DefaultValue: "foobar",
},
{
Name: "Sample",
Type: "string",
DefaultValue: "foobaz",
},
},
Resources: []*proto.Resource{{
Name: "example",
Type: "null_resource",
}},
},
},
{
Name: "rich-parameter-with-value-json",
Files: map[string]string{
"main.tf.json": `{
"data": {
"coder_parameter": {
"example": [
{
"default": "foobar",
"name": "Example",
"type": "string"
}
],
"sample": [
{
"default": "foobaz",
"name": "Sample",
"type": "string"
}
]
}
},
"resource": {
"null_resource": {
"example": [
{
"triggers": {
"misc": "${data.coder_parameter.example.value}"
}
}
]
}
},
"terraform": [
{
"required_providers": [
{
"coder": {
"source": "coder/coder",
"version": "0.6.20"
}
}
]
}
]
}`,
},
Request: &proto.PlanRequest{
RichParameterValues: []*proto.RichParameterValue{
{
Name: "Example",
Value: "foobaz",
},
{
Name: "Sample",
Value: "foofoo",
},
},
},
Response: &proto.PlanComplete{
Parameters: []*proto.RichParameter{
{
Name: "Example",
Type: "string",
DefaultValue: "foobar",
},
{
Name: "Sample",
Type: "string",
DefaultValue: "foobaz",
},
},
Resources: []*proto.Resource{{
Name: "example",
Type: "null_resource",
}},
},
},
{
Name: "git-auth",
Files: map[string]string{
"main.tf": `terraform {
required_providers {
coder = {
source = "coder/coder"
version = "0.6.20"
}
}
}
data "coder_git_auth" "github" {
id = "github"
}
resource "null_resource" "example" {}
resource "coder_metadata" "example" {
resource_id = null_resource.example.id
item {
key = "token"
value = data.coder_git_auth.github.access_token
}
}
`,
},
Request: &proto.PlanRequest{
ExternalAuthProviders: []*proto.ExternalAuthProvider{{
Id: "github",
AccessToken: "some-value",
}},
},
Response: &proto.PlanComplete{
Resources: []*proto.Resource{{
Name: "example",
Type: "null_resource",
Metadata: []*proto.Resource_Metadata{{
Key: "token",
Value: "some-value",
}},
}},
},
},
}
for _, testCase := range testCases {
testCase := testCase
t.Run(testCase.Name, func(t *testing.T) {
t.Parallel()
ctx, api := setupProvisioner(t, nil)
sess := configure(ctx, t, api, &proto.Config{
TemplateSourceArchive: makeTar(t, testCase.Files),
})
planRequest := &proto.Request{Type: &proto.Request_Plan{Plan: &proto.PlanRequest{
Metadata: testCase.Metadata,
}}}
if testCase.Request != nil {
planRequest = &proto.Request{Type: &proto.Request_Plan{Plan: testCase.Request}}
}
gotExpectedLog := testCase.ExpectLogContains == ""
provision := func(req *proto.Request) *proto.Response {
err := sess.Send(req)
require.NoError(t, err)
for {
msg, err := sess.Recv()
require.NoError(t, err)
if msg.GetLog() != nil {
if testCase.ExpectLogContains != "" && strings.Contains(msg.GetLog().Output, testCase.ExpectLogContains) {
gotExpectedLog = true
}
t.Logf("log: [%s] %s", msg.GetLog().Level, msg.GetLog().Output)
continue
}
return msg
}
}
resp := provision(planRequest)
planComplete := resp.GetPlan()
require.NotNil(t, planComplete)
if testCase.ErrorContains != "" {
require.Contains(t, planComplete.GetError(), testCase.ErrorContains)
}
if testCase.Response != nil {
require.Equal(t, testCase.Response.Error, planComplete.Error)
// Remove randomly generated data.
normalizeResources(planComplete.Resources)
resourcesGot, err := json.Marshal(planComplete.Resources)
require.NoError(t, err)
resourcesWant, err := json.Marshal(testCase.Response.Resources)
require.NoError(t, err)
require.Equal(t, string(resourcesWant), string(resourcesGot))
parametersGot, err := json.Marshal(planComplete.Parameters)
require.NoError(t, err)
parametersWant, err := json.Marshal(testCase.Response.Parameters)
require.NoError(t, err)
require.Equal(t, string(parametersWant), string(parametersGot))
}
if testCase.Apply {
resp = provision(&proto.Request{Type: &proto.Request_Apply{Apply: &proto.ApplyRequest{
Metadata: &proto.Metadata{WorkspaceTransition: proto.WorkspaceTransition_START},
}}})
applyComplete := resp.GetApply()
require.NotNil(t, applyComplete)
if testCase.Response != nil {
normalizeResources(applyComplete.Resources)
resourcesGot, err := json.Marshal(applyComplete.Resources)
require.NoError(t, err)
resourcesWant, err := json.Marshal(testCase.Response.Resources)
require.NoError(t, err)
require.Equal(t, string(resourcesWant), string(resourcesGot))
}
}
if !gotExpectedLog {
t.Fatalf("expected log string %q but never saw it", testCase.ExpectLogContains)
}
})
}
}
func normalizeResources(resources []*proto.Resource) {
for _, resource := range resources {
sort.Slice(resource.Agents, func(i, j int) bool {
return resource.Agents[i].Name < resource.Agents[j].Name
})
for _, agent := range resource.Agents {
agent.Id = ""
if agent.GetToken() == "" {
continue
}
agent.Auth = &proto.Agent_Token{}
}
}
}
// nolint:paralleltest
func TestProvision_ExtraEnv(t *testing.T) {
// #nosec
const secretValue = "oinae3uinxase"
t.Setenv("TF_LOG", "INFO")
t.Setenv("TF_SUPERSECRET", secretValue)
ctx, api := setupProvisioner(t, nil)
sess := configure(ctx, t, api, &proto.Config{
TemplateSourceArchive: makeTar(t, map[string]string{"main.tf": `resource "null_resource" "A" {}`}),
})
err := sendPlan(sess, proto.WorkspaceTransition_START)
require.NoError(t, err)
found := false
for {
msg, err := sess.Recv()
require.NoError(t, err)
if log := msg.GetLog(); log != nil {
t.Log(log.Level.String(), log.Output)
if strings.Contains(log.Output, "TF_LOG") {
found = true
}
require.NotContains(t, log.Output, secretValue)
}
if c := msg.GetPlan(); c != nil {
require.Empty(t, c.Error)
break
}
}
require.True(t, found)
}
// nolint:paralleltest
func TestProvision_SafeEnv(t *testing.T) {
// #nosec
const (
passedValue = "superautopets"
secretValue = "oinae3uinxase"
)
t.Setenv("VALID_USER_ENV", passedValue)
// We ensure random CODER_ variables aren't passed through to avoid leaking
// control plane secrets (e.g. PG URL).
t.Setenv("CODER_SECRET", secretValue)
const echoResource = `
resource "null_resource" "a" {
provisioner "local-exec" {
command = "env"
}
}
`
ctx, api := setupProvisioner(t, nil)
sess := configure(ctx, t, api, &proto.Config{
TemplateSourceArchive: makeTar(t, map[string]string{"main.tf": echoResource}),
})
err := sendPlan(sess, proto.WorkspaceTransition_START)
require.NoError(t, err)
_ = readProvisionLog(t, sess)
err = sendApply(sess, proto.WorkspaceTransition_START)
require.NoError(t, err)
log := readProvisionLog(t, sess)
require.Contains(t, log, passedValue)
require.NotContains(t, log, secretValue)
require.Contains(t, log, "CODER_")
}