fix(cli)!: protect client Logger and refactor cli scaletest tests (#8317)

- (breaking) Protects Logger and LogBodies fields of codersdk.Client with its mutex. This addresses a data race in cli/scaletest.
- Fillets the existing cli/createworkspaces unit test and moves the testing logic there into the tests under scaletest/createworkspaces.
- Adds testutil.RaceEnabled bool const and conditionaly skips previously-skipped tests under scaletest/ if the race detector is enabled. This is unfortunate and sad, but I would prefer to have these tests at least running without the race detector than not running at all.
- Adds IgnoreErrors option to fake in-memory agent loggers; having the agents fail the test immediately when they encounter any sort of error isn't really helpful.
This commit is contained in:
Cian Johnston 2023-07-06 09:43:39 +01:00 committed by GitHub
parent 1d746b901b
commit 7fcf319e01
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 309 additions and 254 deletions

View File

@ -167,7 +167,7 @@ func (r *RootCmd) workspaceAgent() *clibase.Cmd {
slog.F("version", version),
)
client := agentsdk.New(r.agentURL)
client.SDK.Logger = logger
client.SDK.SetLogger(logger)
// Set a reasonable timeout so requests can't hang forever!
// The timeout needs to be reasonably long, because requests
// with large payloads can take a bit. e.g. startup scripts

View File

@ -543,7 +543,7 @@ func (r *RootCmd) InitClient(client *codersdk.Client) clibase.MiddlewareFunc {
if r.debugHTTP {
client.PlainLogger = os.Stderr
client.LogBodies = true
client.SetLogBodies(true)
}
client.DisableDirectConnections = r.disableDirect

View File

@ -3,201 +3,53 @@ package cli_test
import (
"bytes"
"context"
"encoding/json"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/coder/coder/cli/clitest"
"github.com/coder/coder/coderd/coderdtest"
"github.com/coder/coder/codersdk"
"github.com/coder/coder/pty/ptytest"
"github.com/coder/coder/scaletest/harness"
"github.com/coder/coder/testutil"
)
func TestScaleTestCreateWorkspaces(t *testing.T) {
t.Skipf("This test is flakey. See https://github.com/coder/coder/issues/4942")
t.Parallel()
// This test does a create-workspaces scale test with --no-cleanup, checks
// that the created resources are OK, and then runs a cleanup.
t.Run("WorkspaceBuildNoCleanup", func(t *testing.T) {
t.Parallel()
// This test only validates that the CLI command accepts known arguments.
// More thorough testing is done in scaletest/createworkspaces/run_test.go.
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancelFunc()
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancelFunc()
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
_ = coderdtest.CreateFirstUser(t, client)
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
user := coderdtest.CreateFirstUser(t, client)
// Write a parameters file.
tDir := t.TempDir()
outputFile := filepath.Join(tDir, "output.json")
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil)
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
inv, root := clitest.New(t, "scaletest", "create-workspaces",
"--count", "2",
"--template", "doesnotexist",
"--no-cleanup",
"--no-wait-for-agents",
"--concurrency", "2",
"--timeout", "30s",
"--job-timeout", "15s",
"--cleanup-concurrency", "1",
"--cleanup-timeout", "30s",
"--cleanup-job-timeout", "15s",
"--output", "text",
"--output", "json:"+outputFile,
)
clitest.SetupConfig(t, client, root)
pty := ptytest.New(t)
inv.Stdout = pty.Output()
inv.Stderr = pty.Output()
// Write a parameters file.
tDir := t.TempDir()
paramsFile := filepath.Join(tDir, "params.yaml")
outputFile := filepath.Join(tDir, "output.json")
f, err := os.Create(paramsFile)
require.NoError(t, err)
defer f.Close()
_, err = f.WriteString(`---
param1: foo
param2: true
param3: 1
`)
require.NoError(t, err)
err = f.Close()
require.NoError(t, err)
inv, root := clitest.New(t, "scaletest", "create-workspaces",
"--count", "2",
"--template", template.Name,
"--parameters-file", paramsFile,
"--parameter", "param1=bar",
"--parameter", "param4=baz",
"--no-cleanup",
// This flag is important for tests because agents will never be
// started.
"--no-wait-for-agents",
// Run and connect flags cannot be tested because they require an
// agent.
"--concurrency", "2",
"--timeout", "30s",
"--job-timeout", "15s",
"--cleanup-concurrency", "1",
"--cleanup-timeout", "30s",
"--cleanup-job-timeout", "15s",
"--output", "text",
"--output", "json:"+outputFile,
)
clitest.SetupConfig(t, client, root)
pty := ptytest.New(t)
inv.Stdout = pty.Output()
inv.Stderr = pty.Output()
done := make(chan any)
go func() {
err := inv.WithContext(ctx).Run()
assert.NoError(t, err)
close(done)
}()
pty.ExpectMatch("Test results:")
pty.ExpectMatch("Pass: 2")
select {
case <-done:
case <-ctx.Done():
}
cancelFunc()
<-done
// Recreate the context.
ctx, cancelFunc = context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancelFunc()
// Verify the output file.
f, err = os.Open(outputFile)
require.NoError(t, err)
defer f.Close()
var res harness.Results
err = json.NewDecoder(f).Decode(&res)
require.NoError(t, err)
require.EqualValues(t, 2, res.TotalRuns)
require.EqualValues(t, 2, res.TotalPass)
// Find the workspaces and users and check that they are what we expect.
workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{
Offset: 0,
Limit: 100,
})
require.NoError(t, err)
require.Len(t, workspaces.Workspaces, 2)
seenUsers := map[string]struct{}{}
for _, w := range workspaces.Workspaces {
// Sadly we can't verify params as the API doesn't seem to return
// them.
// Verify that the user is a unique scaletest user.
u, err := client.User(ctx, w.OwnerID.String())
require.NoError(t, err)
_, ok := seenUsers[u.ID.String()]
require.False(t, ok, "user has more than one workspace")
seenUsers[u.ID.String()] = struct{}{}
require.Contains(t, u.Username, "scaletest-")
require.Contains(t, u.Email, "scaletest")
}
require.Len(t, seenUsers, len(workspaces.Workspaces))
// Check that there are exactly 3 users.
users, err := client.Users(ctx, codersdk.UsersRequest{
Pagination: codersdk.Pagination{
Offset: 0,
Limit: 100,
},
})
require.NoError(t, err)
require.Len(t, users.Users, len(seenUsers)+1)
// Cleanup.
inv, root = clitest.New(t, "scaletest", "cleanup",
"--cleanup-concurrency", "1",
"--cleanup-timeout", "30s",
"--cleanup-job-timeout", "15s",
)
clitest.SetupConfig(t, client, root)
pty = ptytest.New(t)
inv.Stdout = pty.Output()
inv.Stderr = pty.Output()
done = make(chan any)
go func() {
err := inv.WithContext(ctx).Run()
assert.NoError(t, err)
close(done)
}()
pty.ExpectMatch("Test results:")
pty.ExpectMatch("Pass: 2")
pty.ExpectMatch("Test results:")
pty.ExpectMatch("Pass: 2")
select {
case <-done:
case <-ctx.Done():
}
cancelFunc()
<-done
// Recreate the context (again).
ctx, cancelFunc = context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancelFunc()
// Verify that the workspaces are gone.
workspaces, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{
Offset: 0,
Limit: 100,
})
require.NoError(t, err)
require.Len(t, workspaces.Workspaces, 0)
// Verify that the users are gone.
users, err = client.Users(ctx, codersdk.UsersRequest{
Pagination: codersdk.Pagination{
Offset: 0,
Limit: 100,
},
})
require.NoError(t, err)
require.Len(t, users.Users, 1)
})
err := inv.WithContext(ctx).Run()
require.ErrorContains(t, err, "could not find template \"doesnotexist\" in any organization")
}
// This test just validates that the CLI command accepts its known arguments.

View File

@ -119,7 +119,7 @@ func (r *RootCmd) ssh() *clibase.Cmd {
}
// log HTTP requests
client.Logger = logger
client.SetLogger(logger)
}
workspace, workspaceAgent, err := getWorkspaceAndAgent(ctx, inv, client, codersdk.Me, inv.Args[0])

View File

@ -49,7 +49,7 @@ func setupWorkspaceForAgent(t *testing.T, mutate func([]*proto.Agent) []*proto.A
}
}
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
client.Logger = slogtest.Make(t, nil).Named("client").Leveled(slog.LevelDebug)
client.SetLogger(slogtest.Make(t, nil).Named("client").Leveled(slog.LevelDebug))
user := coderdtest.CreateFirstUser(t, client)
agentToken := uuid.NewString()
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{

View File

@ -4,6 +4,7 @@ import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/coder/coder/coderd/coderdtest"
@ -47,10 +48,13 @@ func TestDeploymentValues(t *testing.T) {
func TestDeploymentStats(t *testing.T) {
t.Parallel()
t.Log("This test is time-sensitive. It may fail if the deployment is not ready in time.")
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
client := coderdtest.New(t, &coderdtest.Options{})
_ = coderdtest.CreateFirstUser(t, client)
_, err := client.DeploymentStats(ctx)
require.NoError(t, err)
assert.True(t, testutil.Eventually(ctx, t, func(tctx context.Context) bool {
_, err := client.DeploymentStats(tctx)
return err == nil
}, testutil.IntervalMedium), "failed to get deployment stats in time")
}

View File

@ -194,7 +194,7 @@ func (c *Client) Listen(ctx context.Context) (net.Conn, error) {
ticker := time.NewTicker(tick)
defer ticker.Stop()
defer func() {
c.SDK.Logger.Debug(ctx, "coordinate pinger exited")
c.SDK.Logger().Debug(ctx, "coordinate pinger exited")
}()
for {
select {
@ -205,18 +205,18 @@ func (c *Client) Listen(ctx context.Context) (net.Conn, error) {
err := conn.Ping(ctx)
if err != nil {
c.SDK.Logger.Error(ctx, "workspace agent coordinate ping", slog.Error(err))
c.SDK.Logger().Error(ctx, "workspace agent coordinate ping", slog.Error(err))
err := conn.Close(websocket.StatusGoingAway, "Ping failed")
if err != nil {
c.SDK.Logger.Error(ctx, "close workspace agent coordinate websocket", slog.Error(err))
c.SDK.Logger().Error(ctx, "close workspace agent coordinate websocket", slog.Error(err))
}
cancel()
return
}
c.SDK.Logger.Debug(ctx, "got coordinate pong", slog.F("took", time.Since(start)))
c.SDK.Logger().Debug(ctx, "got coordinate pong", slog.F("took", time.Since(start)))
cancel()
}
}

View File

@ -93,8 +93,12 @@ func New(serverURL *url.URL) *Client {
// Client is an HTTP caller for methods to the Coder API.
// @typescript-ignore Client
type Client struct {
mu sync.RWMutex // Protects following.
// mu protects the fields sessionToken, logger, and logBodies. These
// need to be safe for concurrent access.
mu sync.RWMutex
sessionToken string
logger slog.Logger
logBodies bool
HTTPClient *http.Client
URL *url.URL
@ -103,13 +107,6 @@ type Client struct {
// default 'Coder-Session-Token' is used.
SessionTokenHeader string
// Logger is optionally provided to log requests.
// Method, URL, and response code will be logged by default.
Logger slog.Logger
// LogBodies can be enabled to print request and response bodies to the logger.
LogBodies bool
// PlainLogger may be set to log HTTP traffic in a human-readable form.
// It uses the LogBodies option.
PlainLogger io.Writer
@ -124,6 +121,34 @@ type Client struct {
DisableDirectConnections bool
}
// Logger returns the logger for the client.
func (c *Client) Logger() slog.Logger {
c.mu.RLock()
defer c.mu.RUnlock()
return c.logger
}
// SetLogger sets the logger for the client.
func (c *Client) SetLogger(logger slog.Logger) {
c.mu.Lock()
defer c.mu.Unlock()
c.logger = logger
}
// LogBodies returns whether requests and response bodies are logged.
func (c *Client) LogBodies() bool {
c.mu.RLock()
defer c.mu.RUnlock()
return c.logBodies
}
// SetLogBodies sets whether to log request and response bodies.
func (c *Client) SetLogBodies(logBodies bool) {
c.mu.Lock()
defer c.mu.Unlock()
c.logBodies = logBodies
}
// SessionToken returns the currently set token for the client.
func (c *Client) SessionToken() string {
c.mu.RLock()
@ -181,7 +206,10 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
// Copy the request body so we can log it.
var reqBody []byte
if r != nil && c.LogBodies {
c.mu.RLock()
logBodies := c.logBodies
c.mu.RUnlock()
if r != nil && logBodies {
reqBody, err = io.ReadAll(r)
if err != nil {
return nil, xerrors.Errorf("read request body: %w", err)
@ -223,7 +251,7 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
slog.F("url", req.URL.String()),
)
tracing.RunWithoutSpan(ctx, func(ctx context.Context) {
c.Logger.Debug(ctx, "sdk request", slog.F("body", string(reqBody)))
c.Logger().Debug(ctx, "sdk request", slog.F("body", string(reqBody)))
})
resp, err := c.HTTPClient.Do(req)
@ -231,7 +259,7 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
// We log after sending the request because the HTTP Transport may modify
// the request within Do, e.g. by adding headers.
if resp != nil && c.PlainLogger != nil {
out, err := httputil.DumpRequest(resp.Request, c.LogBodies)
out, err := httputil.DumpRequest(resp.Request, logBodies)
if err != nil {
return nil, xerrors.Errorf("dump request: %w", err)
}
@ -244,7 +272,7 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
}
if c.PlainLogger != nil {
out, err := httputil.DumpResponse(resp, c.LogBodies)
out, err := httputil.DumpResponse(resp, logBodies)
if err != nil {
return nil, xerrors.Errorf("dump response: %w", err)
}
@ -257,7 +285,7 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
// Copy the response body so we can log it if it's a loggable mime type.
var respBody []byte
if resp.Body != nil && c.LogBodies {
if resp.Body != nil && logBodies {
mimeType := parseMimeType(resp.Header.Get("Content-Type"))
if _, ok := loggableMimeTypes[mimeType]; ok {
respBody, err = io.ReadAll(resp.Body)
@ -274,7 +302,7 @@ func (c *Client) Request(ctx context.Context, method, path string, body interfac
// See above for why this is not logged to the span.
tracing.RunWithoutSpan(ctx, func(ctx context.Context) {
c.Logger.Debug(ctx, "sdk response",
c.Logger().Debug(ctx, "sdk response",
slog.F("status", resp.StatusCode),
slog.F("body", string(respBody)),
slog.F("trace_id", resp.Header.Get("X-Trace-Id")),

View File

@ -114,8 +114,8 @@ func Test_Client(t *testing.T) {
client.SetSessionToken(token)
logBuf := bytes.NewBuffer(nil)
client.Logger = slog.Make(sloghuman.Sink(logBuf)).Leveled(slog.LevelDebug)
client.LogBodies = true
client.SetLogger(slog.Make(sloghuman.Sink(logBuf)).Leveled(slog.LevelDebug))
client.SetLogBodies(true)
// Setup tracing.
res := resource.NewWithAttributes(

View File

@ -8,6 +8,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/google/uuid"
"github.com/coder/coder/cli/clibase"
"github.com/coder/coder/coderd/coderdtest"
"github.com/coder/coder/codersdk"
@ -17,7 +19,6 @@ import (
"github.com/coder/coder/enterprise/coderd/license"
"github.com/coder/coder/provisioner/echo"
"github.com/coder/coder/testutil"
"github.com/google/uuid"
)
func TestServiceBanners(t *testing.T) {

View File

@ -52,8 +52,8 @@ func (r *Runner) Run(ctx context.Context, _ string, w io.Writer) error {
logs := loadtestutil.NewSyncWriter(w)
defer logs.Close()
logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug)
r.client.Logger = logger
r.client.LogBodies = true
r.client.SetLogger(logger)
r.client.SetLogBodies(true)
_, _ = fmt.Fprintln(logs, "Opening connection to workspace agent")
switch r.cfg.ConnectionMode {

View File

@ -49,8 +49,8 @@ func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error {
logs = loadtestutil.NewSyncWriter(logs)
logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug)
r.client.Logger = logger
r.client.LogBodies = true
r.client.SetLogger(logger)
r.client.SetLogBodies(true)
var (
client = r.client

View File

@ -3,6 +3,7 @@ package createworkspaces_test
import (
"bytes"
"context"
"io"
"testing"
"time"
@ -28,7 +29,9 @@ import (
func Test_Runner(t *testing.T) {
t.Parallel()
t.Skip("Flake seen here: https://github.com/coder/coder/actions/runs/3436164958/jobs/5729513320")
if testutil.RaceEnabled() {
t.Skip("Race detector enabled, skipping time-sensitive test.")
}
t.Run("OK", func(t *testing.T) {
t.Parallel()
@ -79,43 +82,11 @@ func Test_Runner(t *testing.T) {
},
})
version = coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
// Since the runner creates the workspace on it's own, we have to keep
// listing workspaces until we find it, then wait for the build to
// finish, then start the agents.
go func() {
var workspace codersdk.Workspace
for {
res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{})
if !assert.NoError(t, err) {
return
}
workspaces := res.Workspaces
if len(workspaces) == 1 {
workspace = workspaces[0]
break
}
time.Sleep(100 * time.Millisecond)
}
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
agentClient := agentsdk.New(client.URL)
agentClient.SetSessionToken(authToken)
agentCloser := agent.New(agent.Options{
Client: agentClient,
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelWarn),
})
t.Cleanup(func() {
_ = agentCloser.Close()
})
coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
}()
closer := goEventuallyStartFakeAgent(ctx, t, client, authToken)
t.Cleanup(closer)
const (
username = "scaletest-user"
@ -185,7 +156,7 @@ func Test_Runner(t *testing.T) {
require.Len(t, workspaces.Workspaces, 0)
})
t.Run("FailedBuild", func(t *testing.T) {
t.Run("NoCleanup", func(t *testing.T) {
t.Parallel()
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
@ -196,6 +167,132 @@ func Test_Runner(t *testing.T) {
})
user := coderdtest.CreateFirstUser(t, client)
authToken := uuid.NewString()
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
Parse: echo.ParseComplete,
ProvisionPlan: echo.ProvisionComplete,
ProvisionApply: []*proto.Provision_Response{
{
Type: &proto.Provision_Response_Log{
Log: &proto.Log{
Level: proto.LogLevel_INFO,
Output: "hello from logs",
},
},
},
{
Type: &proto.Provision_Response_Complete{
Complete: &proto.Provision_Complete{
Resources: []*proto.Resource{
{
Name: "example",
Type: "aws_instance",
Agents: []*proto.Agent{
{
Id: uuid.NewString(),
Name: "agent",
Auth: &proto.Agent_Token{
Token: authToken,
},
Apps: []*proto.App{},
},
},
},
},
},
},
},
},
})
version = coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
closer := goEventuallyStartFakeAgent(ctx, t, client, authToken)
t.Cleanup(closer)
const (
username = "scaletest-user"
email = "scaletest@test.coder.com"
)
runner := createworkspaces.NewRunner(client, createworkspaces.Config{
NoCleanup: true,
User: createworkspaces.UserConfig{
OrganizationID: user.OrganizationID,
Username: username,
Email: email,
},
Workspace: workspacebuild.Config{
OrganizationID: user.OrganizationID,
Request: codersdk.CreateWorkspaceRequest{
TemplateID: template.ID,
},
},
ReconnectingPTY: &reconnectingpty.Config{
Init: codersdk.WorkspaceAgentReconnectingPTYInit{
Height: 24,
Width: 80,
Command: "echo hello",
},
Timeout: httpapi.Duration(testutil.WaitLong),
},
AgentConn: &agentconn.Config{
ConnectionMode: agentconn.ConnectionModeDerp,
HoldDuration: 0,
},
})
logs := bytes.NewBuffer(nil)
err := runner.Run(ctx, "1", logs)
logsStr := logs.String()
t.Log("Runner logs:\n\n" + logsStr)
require.NoError(t, err)
// Ensure a user and workspace were created.
users, err := client.Users(ctx, codersdk.UsersRequest{})
require.NoError(t, err)
require.Len(t, users.Users, 2) // 1 user already exists
workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{})
require.NoError(t, err)
require.Len(t, workspaces.Workspaces, 1)
// Look for strings in the logs.
require.Contains(t, logsStr, "Generating user password...")
require.Contains(t, logsStr, "Creating user:")
require.Contains(t, logsStr, "Org ID: "+user.OrganizationID.String())
require.Contains(t, logsStr, "Username: "+username)
require.Contains(t, logsStr, "Email: "+email)
require.Contains(t, logsStr, "Logging in as new user...")
require.Contains(t, logsStr, "Creating workspace...")
require.Contains(t, logsStr, `"agent" is connected`)
require.Contains(t, logsStr, "Opening reconnecting PTY connection to agent")
require.Contains(t, logsStr, "Opening connection to workspace agent")
err = runner.Cleanup(ctx, "1")
require.NoError(t, err)
// Ensure the user and workspace were not deleted.
users, err = client.Users(ctx, codersdk.UsersRequest{})
require.NoError(t, err)
require.Len(t, users.Users, 2)
workspaces, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{})
require.NoError(t, err)
require.Len(t, workspaces.Workspaces, 1)
})
t.Run("FailedBuild", func(t *testing.T) {
t.Parallel()
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true})
client := coderdtest.New(t, &coderdtest.Options{
IncludeProvisionerDaemon: true,
Logger: &logger,
})
user := coderdtest.CreateFirstUser(t, client)
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
Parse: echo.ParseComplete,
ProvisionPlan: echo.ProvisionComplete,
@ -210,8 +307,8 @@ func Test_Runner(t *testing.T) {
},
})
version = coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
runner := createworkspaces.NewRunner(client, createworkspaces.Config{
User: createworkspaces.UserConfig{
@ -235,3 +332,48 @@ func Test_Runner(t *testing.T) {
require.ErrorContains(t, err, "test error")
})
}
// Since the runner creates the workspace on it's own, we have to keep
// listing workspaces until we find it, then wait for the build to
// finish, then start the agents. It is the caller's responsibility to
// call the returned function to stop the agents.
func goEventuallyStartFakeAgent(ctx context.Context, t *testing.T, client *codersdk.Client, agentToken string) func() {
t.Helper()
ch := make(chan io.Closer, 1) // Don't block.
go func() {
defer close(ch)
var workspace codersdk.Workspace
for {
res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{})
if !assert.NoError(t, err) {
return
}
workspaces := res.Workspaces
if len(workspaces) == 1 {
workspace = workspaces[0]
break
}
time.Sleep(100 * time.Millisecond)
}
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
agentClient := agentsdk.New(client.URL)
agentClient.SetSessionToken(agentToken)
agentCloser := agent.New(agent.Options{
Client: agentClient,
Logger: slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).
Named("agent").Leveled(slog.LevelWarn),
})
coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
ch <- agentCloser
}()
closeFunc := func() {
if closer, ok := <-ch; ok {
_ = closer.Close()
}
}
return closeFunc
}

View File

@ -40,8 +40,8 @@ func (r *Runner) Run(ctx context.Context, _ string, logs io.Writer) error {
logs = loadtestutil.NewSyncWriter(logs)
logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug)
r.client.Logger = logger
r.client.LogBodies = true
r.client.SetLogger(logger)
r.client.SetLogBodies(true)
var (
id = r.cfg.Init.ID

View File

@ -45,8 +45,8 @@ func (r *Runner) Run(ctx context.Context, _ string, logs io.Writer) error {
logs = loadtestutil.NewSyncWriter(logs)
logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug)
r.client.Logger = logger
r.client.LogBodies = true
r.client.SetLogger(logger)
r.client.SetLogBodies(true)
req := r.cfg.Request
if req.Name == "" {

View File

@ -25,7 +25,9 @@ import (
func Test_Runner(t *testing.T) {
t.Parallel()
t.Skip("Flake seen here: https://github.com/coder/coder/actions/runs/3436164958/jobs/5729513320")
if testutil.RaceEnabled() {
t.Skip("Race detector enabled, skipping time-sensitive test.")
}
t.Run("OK", func(t *testing.T) {
t.Parallel()
@ -135,7 +137,7 @@ func Test_Runner(t *testing.T) {
agentClient.SetSessionToken(authToken)
agentCloser := agent.New(agent.Options{
Client: agentClient,
Logger: slogtest.Make(t, nil).
Logger: slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).
Named(fmt.Sprintf("agent%d", i)).
Leveled(slog.LevelWarn),
})
@ -188,8 +190,10 @@ func Test_Runner(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true})
client := coderdtest.New(t, &coderdtest.Options{
IncludeProvisionerDaemon: true,
Logger: &logger,
})
user := coderdtest.CreateFirstUser(t, client)

View File

@ -46,8 +46,8 @@ func (r *Runner) Run(ctx context.Context, _ string, logs io.Writer) error {
logs = loadtestutil.NewSyncWriter(logs)
logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug)
r.client.Logger = logger
r.client.LogBodies = true
r.client.SetLogger(logger)
r.client.SetLogBodies(true)
// Initialize our metrics eagerly. This is mainly so that we can test for the
// presence of a zero-valued metric as opposed to the absence of a metric.

10
testutil/race.go Normal file
View File

@ -0,0 +1,10 @@
package testutil
// RaceEnabled returns whether the race detector is enabled.
// This is a constant at compile time. It should be used to
// conditionally skip tests that are known to be sensitive to
// being run with the race detector enabled.
// Please use sparingly and as a last resort.
func RaceEnabled() bool {
return raceEnabled
}

7
testutil/race_off.go Normal file
View File

@ -0,0 +1,7 @@
//go:build !race
package testutil
const (
raceEnabled = false
)

7
testutil/race_on.go Normal file
View File

@ -0,0 +1,7 @@
//go:build race
package testutil
const (
raceEnabled = true
)