fix: Fix cleanup in test helpers, prefer `defer` in tests (#3113)

* fix: Change uses of t.Cleanup -> defer in test bodies

Mixing t.Cleanup and defer can lead to unexpected order of execution.

* fix: Ensure t.Cleanup is not aborted by require

* chore: Add helper annotations
This commit is contained in:
Mathias Fredriksson 2022-07-25 19:22:02 +03:00 committed by GitHub
parent c2cd51d8b8
commit 6916d34458
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 60 additions and 53 deletions

View File

@ -109,9 +109,9 @@ func TestConfigSSH(t *testing.T) {
agentCloser := agent.New(agentClient.ListenWorkspaceAgent, &agent.Options{
Logger: slogtest.Make(t, nil),
})
t.Cleanup(func() {
defer func() {
_ = agentCloser.Close()
})
}()
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.LatestBuild.ID)
agentConn, err := client.DialWorkspaceAgent(context.Background(), resources[0].Agents[0].ID, nil)
require.NoError(t, err)
@ -119,9 +119,9 @@ func TestConfigSSH(t *testing.T) {
listener, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(t, err)
t.Cleanup(func() {
defer func() {
_ = listener.Close()
})
}()
go func() {
for {
conn, err := listener.Accept()
@ -134,9 +134,6 @@ func TestConfigSSH(t *testing.T) {
go io.Copy(ssh, conn)
}
}()
t.Cleanup(func() {
_ = listener.Close()
})
sshConfigFile, _ := sshConfigFileNames(t)

View File

@ -152,19 +152,19 @@ func TestLogout(t *testing.T) {
err = os.Chmod(string(config), 0500)
require.NoError(t, err)
}
t.Cleanup(func() {
defer func() {
if runtime.GOOS == "windows" {
// Closing the opened files for cleanup.
err = urlFile.Close()
require.NoError(t, err)
assert.NoError(t, err)
err = sessionFile.Close()
require.NoError(t, err)
assert.NoError(t, err)
} else {
// Setting the permissions back for cleanup.
err = os.Chmod(string(config), 0700)
require.NoError(t, err)
err = os.Chmod(string(config), 0o700)
assert.NoError(t, err)
}
})
}()
logoutChan := make(chan struct{})
logout, _ := clitest.New(t, "logout", "--global-config", string(config))

View File

@ -249,7 +249,7 @@ func TestServer(t *testing.T) {
snapshot <- ss
})
server := httptest.NewServer(r)
t.Cleanup(server.Close)
defer server.Close()
root, _ := clitest.New(t, "server", "--in-memory", "--address", ":0", "--telemetry", "--telemetry-url", server.URL)
errC := make(chan error)

View File

@ -94,6 +94,8 @@ func TestAuthorizeAllEndpoints(t *testing.T) {
t.Cleanup(func() { close(tickerCh) })
ctx, cancelFunc := context.WithCancel(context.Background())
defer t.Cleanup(cancelFunc) // Defer to ensure cancelFunc is executed first.
lifecycleExecutor := executor.New(
ctx,
db,
@ -107,11 +109,15 @@ func TestAuthorizeAllEndpoints(t *testing.T) {
return ctx
}
srv.Start()
t.Cleanup(srv.Close)
serverURL, err := url.Parse(srv.URL)
require.NoError(t, err)
turnServer, err := turnconn.New(nil)
require.NoError(t, err)
t.Cleanup(func() {
_ = turnServer.Close()
})
validator, err := idtoken.NewValidator(ctx, option.WithoutAuthentication())
require.NoError(t, err)
@ -138,9 +144,6 @@ func TestAuthorizeAllEndpoints(t *testing.T) {
_ = coderdtest.NewProvisionerDaemon(t, coderAPI)
t.Cleanup(func() {
cancelFunc()
_ = turnServer.Close()
srv.Close()
_ = coderAPI.Close()
})

View File

@ -143,6 +143,8 @@ func newWithCloser(t *testing.T, options *Options) (*codersdk.Client, io.Closer)
}
ctx, cancelFunc := context.WithCancel(context.Background())
defer t.Cleanup(cancelFunc) // Defer to ensure cancelFunc is executed first.
lifecycleExecutor := executor.New(
ctx,
db,
@ -156,6 +158,8 @@ func newWithCloser(t *testing.T, options *Options) (*codersdk.Client, io.Closer)
return ctx
}
srv.Start()
t.Cleanup(srv.Close)
serverURL, err := url.Parse(srv.URL)
require.NoError(t, err)
@ -166,6 +170,9 @@ func newWithCloser(t *testing.T, options *Options) (*codersdk.Client, io.Closer)
turnServer, err := turnconn.New(nil)
require.NoError(t, err)
t.Cleanup(func() {
_ = turnServer.Close()
})
// We set the handler after server creation for the access URL.
coderAPI := coderd.New(&coderd.Options{
@ -188,18 +195,16 @@ func newWithCloser(t *testing.T, options *Options) (*codersdk.Client, io.Closer)
Authorizer: options.Authorizer,
Telemetry: telemetry.NewNoop(),
})
t.Cleanup(func() {
_ = coderAPI.Close()
})
srv.Config.Handler = coderAPI.Handler
var provisionerCloser io.Closer = nopcloser{}
if options.IncludeProvisionerD {
provisionerCloser = NewProvisionerDaemon(t, coderAPI)
}
t.Cleanup(func() {
cancelFunc()
_ = turnServer.Close()
srv.Close()
_ = coderAPI.Close()
_ = provisionerCloser.Close()
})

View File

@ -38,7 +38,7 @@ func TestProvisionerJobLogs_Unit(t *testing.T) {
}
api := New(&opts)
server := httptest.NewServer(api.Handler)
t.Cleanup(server.Close)
defer server.Close()
userID := uuid.New()
keyID, keySecret, err := generateAPIKeyIDSecret()
require.NoError(t, err)

View File

@ -41,7 +41,7 @@ func TestProvisionerJobLogs(t *testing.T) {
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
ctx, cancelFunc := context.WithCancel(context.Background())
t.Cleanup(cancelFunc)
defer cancelFunc()
logs, err := client.WorkspaceBuildLogsAfter(ctx, workspace.LatestBuild.ID, before)
require.NoError(t, err)
for {
@ -77,7 +77,7 @@ func TestProvisionerJobLogs(t *testing.T) {
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
before := database.Now()
ctx, cancelFunc := context.WithCancel(context.Background())
t.Cleanup(cancelFunc)
defer cancelFunc()
logs, err := client.WorkspaceBuildLogsAfter(ctx, workspace.LatestBuild.ID, before)
require.NoError(t, err)
for {

View File

@ -380,7 +380,7 @@ func TestTemplateVersionLogs(t *testing.T) {
}},
})
ctx, cancelFunc := context.WithCancel(context.Background())
t.Cleanup(cancelFunc)
defer cancelFunc()
logs, err := client.TemplateVersionLogsAfter(ctx, version.ID, before)
require.NoError(t, err)
for {

View File

@ -108,15 +108,15 @@ func TestWorkspaceAgentListen(t *testing.T) {
agentCloser := agent.New(agentClient.ListenWorkspaceAgent, &agent.Options{
Logger: slogtest.Make(t, nil).Named("agent").Leveled(slog.LevelDebug),
})
t.Cleanup(func() {
defer func() {
_ = agentCloser.Close()
})
}()
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.LatestBuild.ID)
conn, err := client.DialWorkspaceAgent(context.Background(), resources[0].Agents[0].ID, nil)
require.NoError(t, err)
t.Cleanup(func() {
defer func() {
_ = conn.Close()
})
}()
_, err = conn.Ping()
require.NoError(t, err)
})
@ -233,9 +233,9 @@ func TestWorkspaceAgentTURN(t *testing.T) {
agentCloser := agent.New(agentClient.ListenWorkspaceAgent, &agent.Options{
Logger: slogtest.Make(t, nil),
})
t.Cleanup(func() {
defer func() {
_ = agentCloser.Close()
})
}()
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.LatestBuild.ID)
opts := &peer.ConnOptions{
Logger: slogtest.Make(t, nil).Named("client"),
@ -244,9 +244,9 @@ func TestWorkspaceAgentTURN(t *testing.T) {
opts.SettingEngine.SetNetworkTypes([]webrtc.NetworkType{webrtc.NetworkTypeTCP4})
conn, err := client.DialWorkspaceAgent(context.Background(), resources[0].Agents[0].ID, opts)
require.NoError(t, err)
t.Cleanup(func() {
defer func() {
_ = conn.Close()
})
}()
_, err = conn.Ping()
require.NoError(t, err)
}
@ -294,9 +294,9 @@ func TestWorkspaceAgentPTY(t *testing.T) {
agentCloser := agent.New(agentClient.ListenWorkspaceAgent, &agent.Options{
Logger: slogtest.Make(t, nil),
})
t.Cleanup(func() {
defer func() {
_ = agentCloser.Close()
})
}()
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.LatestBuild.ID)
conn, err := client.WorkspaceAgentReconnectingPTY(context.Background(), resources[0].Agents[0].ID, uuid.New(), 80, 80, "/bin/bash")

View File

@ -327,7 +327,7 @@ func TestWorkspaceBuildLogs(t *testing.T) {
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
ctx, cancelFunc := context.WithCancel(context.Background())
t.Cleanup(cancelFunc)
defer cancelFunc()
logs, err := client.WorkspaceBuildLogsAfter(ctx, workspace.LatestBuild.ID, before.Add(-time.Hour))
require.NoError(t, err)
for {

View File

@ -40,9 +40,9 @@ func TestCache(t *testing.T) {
cache := wsconncache.New(func(r *http.Request, id uuid.UUID) (*agent.Conn, error) {
return setupAgent(t, agent.Metadata{}, 0), nil
}, 0)
t.Cleanup(func() {
defer func() {
_ = cache.Close()
})
}()
conn1, _, err := cache.Acquire(httptest.NewRequest(http.MethodGet, "/", nil), uuid.Nil)
require.NoError(t, err)
conn2, _, err := cache.Acquire(httptest.NewRequest(http.MethodGet, "/", nil), uuid.Nil)
@ -56,9 +56,9 @@ func TestCache(t *testing.T) {
called.Add(1)
return setupAgent(t, agent.Metadata{}, 0), nil
}, time.Microsecond)
t.Cleanup(func() {
defer func() {
_ = cache.Close()
})
}()
conn, release, err := cache.Acquire(httptest.NewRequest(http.MethodGet, "/", nil), uuid.Nil)
require.NoError(t, err)
release()
@ -74,9 +74,9 @@ func TestCache(t *testing.T) {
cache := wsconncache.New(func(r *http.Request, id uuid.UUID) (*agent.Conn, error) {
return setupAgent(t, agent.Metadata{}, 0), nil
}, time.Microsecond)
t.Cleanup(func() {
defer func() {
_ = cache.Close()
})
}()
conn, release, err := cache.Acquire(httptest.NewRequest(http.MethodGet, "/", nil), uuid.Nil)
require.NoError(t, err)
time.Sleep(time.Millisecond)
@ -87,9 +87,9 @@ func TestCache(t *testing.T) {
t.Parallel()
random, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(t, err)
t.Cleanup(func() {
defer func() {
_ = random.Close()
})
}()
tcpAddr, valid := random.Addr().(*net.TCPAddr)
require.True(t, valid)
@ -98,17 +98,17 @@ func TestCache(t *testing.T) {
w.WriteHeader(http.StatusOK)
}),
}
t.Cleanup(func() {
defer func() {
_ = server.Close()
})
}()
go server.Serve(random)
cache := wsconncache.New(func(r *http.Request, id uuid.UUID) (*agent.Conn, error) {
return setupAgent(t, agent.Metadata{}, 0), nil
}, time.Microsecond)
t.Cleanup(func() {
defer func() {
_ = cache.Close()
})
}()
var wg sync.WaitGroup
// Perform many requests in parallel to simulate
@ -132,7 +132,7 @@ func TestCache(t *testing.T) {
res := httptest.NewRecorder()
proxy.ServeHTTP(res, req)
res.Result().Body.Close()
require.Equal(t, http.StatusOK, res.Result().StatusCode)
assert.Equal(t, http.StatusOK, res.Result().StatusCode)
}()
}
wg.Wait()

View File

@ -43,9 +43,9 @@ func TestProxy(t *testing.T) {
Pubsub: pubsub,
})
require.NoError(t, err)
t.Cleanup(func() {
defer func() {
_ = proxyCloser.Close()
})
}()
var wg sync.WaitGroup
wg.Add(1)

View File

@ -962,6 +962,7 @@ func createProvisionerd(t *testing.T, dialer provisionerd.Dialer, provisioners p
// Creates a provisionerd protobuf client that's connected
// to the server implementation provided.
func createProvisionerDaemonClient(t *testing.T, server provisionerDaemonTestServer) proto.DRPCProvisionerDaemonClient {
t.Helper()
if server.failJob == nil {
// Default to asserting the error from the failure, otherwise
// it can be lost in tests!
@ -990,6 +991,7 @@ func createProvisionerDaemonClient(t *testing.T, server provisionerDaemonTestSer
// Creates a provisioner protobuf client that's connected
// to the server implementation provided.
func createProvisionerClient(t *testing.T, server provisionerTestServer) sdkproto.DRPCProvisionerClient {
t.Helper()
clientPipe, serverPipe := provisionersdk.TransportPipe()
t.Cleanup(func() {
_ = clientPipe.Close()

View File

@ -35,7 +35,7 @@ func TestAgentScript(t *testing.T) {
render.Status(r, http.StatusOK)
render.Data(rw, r, content)
}))
t.Cleanup(srv.Close)
defer srv.Close()
srvURL, err := url.Parse(srv.URL)
require.NoError(t, err)