Daily Active User Metrics (#3735)

* agent: add StatsReporter

* Stabilize protoc
This commit is contained in:
Ammar Bandukwala 2022-09-01 14:58:23 -05:00 committed by GitHub
parent e0cb52ceea
commit 30f8fd9b95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 2006 additions and 279 deletions

View File

@ -194,9 +194,15 @@ jobs:
run: ./scripts/yarn_install.sh
- name: Install Protoc
uses: arduino/setup-protoc@v1
with:
version: "3.20.0"
run: |
# protoc must be in lockstep with our dogfood Dockerfile
# or the version in the comments will differ.
set -x
cd dogfood
DOCKER_BUILDKIT=1 docker build . --target proto -t protoc
protoc_dir=/usr/local/bin/protoc
docker run --rm --entrypoint cat protoc /tmp/bin/protoc > $protoc_dir
chmod +x $protoc_dir
- uses: actions/setup-go@v3
with:
go-version: "~1.19"

1
.gitignore vendored
View File

@ -42,3 +42,4 @@ site/out/
.vscode/*.log
**/*.swp
.coderv2/*
**/__debug_bin

View File

@ -30,7 +30,9 @@ bin: $(shell find . -not -path './vendor/*' -type f -name '*.go') go.mod go.sum
darwin:amd64,arm64
.PHONY: bin
build: site/out/index.html $(shell find . -not -path './vendor/*' -type f -name '*.go') go.mod go.sum $(shell find ./examples/templates)
GO_FILES=$(shell find . -not -path './vendor/*' -type f -name '*.go') go.mod go.sum $(shell find ./examples/templates)
build: site/out/index.html $(GO_FILES)
rm -rf ./dist
mkdir -p ./dist
rm -f ./site/out/bin/coder*
@ -55,6 +57,30 @@ build: site/out/index.html $(shell find . -not -path './vendor/*' -type f -name
darwin:amd64,arm64
.PHONY: build
# Builds a test binary for just Linux
build-linux-test: site/out/index.html $(GO_FILES)
rm -rf ./dist
mkdir -p ./dist
rm -f ./site/out/bin/coder*
# build slim artifacts and copy them to the site output directory
./scripts/build_go_slim.sh \
--version "$(VERSION)" \
--compress 6 \
--output ./dist/ \
linux:amd64,armv7,arm64 \
windows:amd64,arm64 \
darwin:amd64,arm64
# build not-so-slim artifacts with the default name format
./scripts/build_go_matrix.sh \
--version "$(VERSION)" \
--output ./dist/ \
--archive \
--package-linux \
linux:amd64
.PHONY: build-linux-test
# Runs migrations to output a dump of the database.
coderd/database/dump.sql: coderd/database/gen/dump/main.go $(wildcard coderd/database/migrations/*.sql)
go run coderd/database/gen/dump/main.go

View File

@ -65,6 +65,7 @@ type Options struct {
WebRTCDialer WebRTCDialer
FetchMetadata FetchMetadata
StatsReporter StatsReporter
ReconnectingPTYTimeout time.Duration
EnvironmentVariables map[string]string
Logger slog.Logger
@ -100,6 +101,8 @@ func New(options Options) io.Closer {
envVars: options.EnvironmentVariables,
coordinatorDialer: options.CoordinatorDialer,
fetchMetadata: options.FetchMetadata,
stats: &Stats{},
statsReporter: options.StatsReporter,
}
server.init(ctx)
return server
@ -125,6 +128,8 @@ type agent struct {
network *tailnet.Conn
coordinatorDialer CoordinatorDialer
stats *Stats
statsReporter StatsReporter
}
func (a *agent) run(ctx context.Context) {
@ -194,6 +199,13 @@ func (a *agent) runTailnet(ctx context.Context, derpMap *tailcfg.DERPMap) {
a.logger.Critical(ctx, "create tailnet", slog.Error(err))
return
}
a.network.SetForwardTCPCallback(func(conn net.Conn, listenerExists bool) net.Conn {
if listenerExists {
// If a listener already exists, we would double-wrap the conn.
return conn
}
return a.stats.wrapConn(conn)
})
go a.runCoordinator(ctx)
sshListener, err := a.network.Listen("tcp", ":"+strconv.Itoa(tailnetSSHPort))
@ -207,7 +219,7 @@ func (a *agent) runTailnet(ctx context.Context, derpMap *tailcfg.DERPMap) {
if err != nil {
return
}
go a.sshServer.HandleConn(conn)
a.sshServer.HandleConn(a.stats.wrapConn(conn))
}
}()
reconnectingPTYListener, err := a.network.Listen("tcp", ":"+strconv.Itoa(tailnetReconnectingPTYPort))
@ -219,8 +231,10 @@ func (a *agent) runTailnet(ctx context.Context, derpMap *tailcfg.DERPMap) {
for {
conn, err := reconnectingPTYListener.Accept()
if err != nil {
a.logger.Debug(ctx, "accept pty failed", slog.Error(err))
return
}
conn = a.stats.wrapConn(conn)
// This cannot use a JSON decoder, since that can
// buffer additional data that is required for the PTY.
rawLen := make([]byte, 2)
@ -364,17 +378,17 @@ func (a *agent) runStartupScript(ctx context.Context, script string) error {
return nil
}
func (a *agent) handlePeerConn(ctx context.Context, conn *peer.Conn) {
func (a *agent) handlePeerConn(ctx context.Context, peerConn *peer.Conn) {
go func() {
select {
case <-a.closed:
case <-conn.Closed():
case <-peerConn.Closed():
}
_ = conn.Close()
_ = peerConn.Close()
a.connCloseWait.Done()
}()
for {
channel, err := conn.Accept(ctx)
channel, err := peerConn.Accept(ctx)
if err != nil {
if errors.Is(err, peer.ErrClosed) || a.isClosed() {
return
@ -383,9 +397,11 @@ func (a *agent) handlePeerConn(ctx context.Context, conn *peer.Conn) {
return
}
conn := channel.NetConn()
switch channel.Protocol() {
case ProtocolSSH:
go a.sshServer.HandleConn(channel.NetConn())
go a.sshServer.HandleConn(a.stats.wrapConn(conn))
case ProtocolReconnectingPTY:
rawID := channel.Label()
// The ID format is referenced in conn.go.
@ -418,9 +434,9 @@ func (a *agent) handlePeerConn(ctx context.Context, conn *peer.Conn) {
Height: uint16(height),
Width: uint16(width),
Command: idParts[3],
}, channel.NetConn())
}, a.stats.wrapConn(conn))
case ProtocolDial:
go a.handleDial(ctx, channel.Label(), channel.NetConn())
go a.handleDial(ctx, channel.Label(), a.stats.wrapConn(conn))
default:
a.logger.Warn(ctx, "unhandled protocol from channel",
slog.F("protocol", channel.Protocol()),
@ -514,6 +530,21 @@ func (a *agent) init(ctx context.Context) {
}
go a.run(ctx)
if a.statsReporter != nil {
cl, err := a.statsReporter(ctx, a.logger, func() *Stats {
return a.stats.Copy()
})
if err != nil {
a.logger.Error(ctx, "report stats", slog.Error(err))
return
}
a.connCloseWait.Add(1)
go func() {
defer a.connCloseWait.Done()
<-a.closed
cl.Close()
}()
}
}
// createCommand processes raw command input with OpenSSH-like behavior.

View File

@ -19,6 +19,7 @@ import (
"time"
"golang.org/x/xerrors"
"tailscale.com/tailcfg"
scp "github.com/bramvdbogaerde/go-scp"
"github.com/google/uuid"
@ -51,6 +52,67 @@ func TestMain(m *testing.M) {
func TestAgent(t *testing.T) {
t.Parallel()
t.Run("Stats", func(t *testing.T) {
for _, tailscale := range []bool{true, false} {
t.Run(fmt.Sprintf("tailscale=%v", tailscale), func(t *testing.T) {
t.Parallel()
setupAgent := func(t *testing.T) (agent.Conn, <-chan *agent.Stats) {
var derpMap *tailcfg.DERPMap
if tailscale {
derpMap = tailnettest.RunDERPAndSTUN(t)
}
conn, stats := setupAgent(t, agent.Metadata{
DERPMap: derpMap,
}, 0)
assert.Empty(t, <-stats)
return conn, stats
}
t.Run("SSH", func(t *testing.T) {
t.Parallel()
conn, stats := setupAgent(t)
sshClient, err := conn.SSHClient()
require.NoError(t, err)
session, err := sshClient.NewSession()
require.NoError(t, err)
defer session.Close()
assert.EqualValues(t, 1, (<-stats).NumConns)
assert.Greater(t, (<-stats).RxBytes, int64(0))
assert.Greater(t, (<-stats).TxBytes, int64(0))
})
t.Run("ReconnectingPTY", func(t *testing.T) {
t.Parallel()
conn, stats := setupAgent(t)
ptyConn, err := conn.ReconnectingPTY(uuid.NewString(), 128, 128, "/bin/bash")
require.NoError(t, err)
defer ptyConn.Close()
data, err := json.Marshal(agent.ReconnectingPTYRequest{
Data: "echo test\r\n",
})
require.NoError(t, err)
_, err = ptyConn.Write(data)
require.NoError(t, err)
var s *agent.Stats
require.Eventuallyf(t, func() bool {
var ok bool
s, ok = (<-stats)
return ok && s.NumConns > 0 && s.RxBytes > 0 && s.TxBytes > 0
}, testutil.WaitShort, testutil.IntervalFast,
"never saw stats: %+v", s,
)
})
})
}
})
t.Run("SessionExec", func(t *testing.T) {
t.Parallel()
session := setupSSHSession(t, agent.Metadata{})
@ -169,7 +231,8 @@ func TestAgent(t *testing.T) {
t.Run("SFTP", func(t *testing.T) {
t.Parallel()
sshClient, err := setupAgent(t, agent.Metadata{}, 0).SSHClient()
conn, _ := setupAgent(t, agent.Metadata{}, 0)
sshClient, err := conn.SSHClient()
require.NoError(t, err)
client, err := sftp.NewClient(sshClient)
require.NoError(t, err)
@ -184,7 +247,9 @@ func TestAgent(t *testing.T) {
t.Run("SCP", func(t *testing.T) {
t.Parallel()
sshClient, err := setupAgent(t, agent.Metadata{}, 0).SSHClient()
conn, _ := setupAgent(t, agent.Metadata{}, 0)
sshClient, err := conn.SSHClient()
require.NoError(t, err)
scpClient, err := scp.NewClientBySSH(sshClient)
require.NoError(t, err)
@ -318,7 +383,7 @@ func TestAgent(t *testing.T) {
t.Skip("ConPTY appears to be inconsistent on Windows.")
}
conn := setupAgent(t, agent.Metadata{
conn, _ := setupAgent(t, agent.Metadata{
DERPMap: tailnettest.RunDERPAndSTUN(t),
}, 0)
id := uuid.NewString()
@ -431,7 +496,7 @@ func TestAgent(t *testing.T) {
}()
// Dial the listener over WebRTC twice and test out of order
conn := setupAgent(t, agent.Metadata{}, 0)
conn, _ := setupAgent(t, agent.Metadata{}, 0)
conn1, err := conn.DialContext(context.Background(), l.Addr().Network(), l.Addr().String())
require.NoError(t, err)
defer conn1.Close()
@ -462,7 +527,7 @@ func TestAgent(t *testing.T) {
})
// Try to dial the non-existent Unix socket over WebRTC
conn := setupAgent(t, agent.Metadata{}, 0)
conn, _ := setupAgent(t, agent.Metadata{}, 0)
netConn, err := conn.DialContext(context.Background(), "unix", filepath.Join(tmpDir, "test.sock"))
require.Error(t, err)
require.ErrorContains(t, err, "remote dial error")
@ -473,7 +538,7 @@ func TestAgent(t *testing.T) {
t.Run("Tailnet", func(t *testing.T) {
t.Parallel()
derpMap := tailnettest.RunDERPAndSTUN(t)
conn := setupAgent(t, agent.Metadata{
conn, _ := setupAgent(t, agent.Metadata{
DERPMap: derpMap,
}, 0)
defer conn.Close()
@ -485,7 +550,7 @@ func TestAgent(t *testing.T) {
}
func setupSSHCommand(t *testing.T, beforeArgs []string, afterArgs []string) *exec.Cmd {
agentConn := setupAgent(t, agent.Metadata{}, 0)
agentConn, _ := setupAgent(t, agent.Metadata{}, 0)
listener, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(t, err)
waitGroup := sync.WaitGroup{}
@ -523,7 +588,8 @@ func setupSSHCommand(t *testing.T, beforeArgs []string, afterArgs []string) *exe
}
func setupSSHSession(t *testing.T, options agent.Metadata) *ssh.Session {
sshClient, err := setupAgent(t, options, 0).SSHClient()
conn, _ := setupAgent(t, options, 0)
sshClient, err := conn.SSHClient()
require.NoError(t, err)
t.Cleanup(func() {
_ = sshClient.Close()
@ -533,11 +599,21 @@ func setupSSHSession(t *testing.T, options agent.Metadata) *ssh.Session {
return session
}
func setupAgent(t *testing.T, metadata agent.Metadata, ptyTimeout time.Duration) agent.Conn {
type closeFunc func() error
func (c closeFunc) Close() error {
return c()
}
func setupAgent(t *testing.T, metadata agent.Metadata, ptyTimeout time.Duration) (
agent.Conn,
<-chan *agent.Stats,
) {
client, server := provisionersdk.TransportPipe()
tailscale := metadata.DERPMap != nil
coordinator := tailnet.NewCoordinator()
agentID := uuid.New()
statsCh := make(chan *agent.Stats)
closer := agent.New(agent.Options{
FetchMetadata: func(ctx context.Context) (agent.Metadata, error) {
return metadata, nil
@ -557,6 +633,38 @@ func setupAgent(t *testing.T, metadata agent.Metadata, ptyTimeout time.Duration)
},
Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug),
ReconnectingPTYTimeout: ptyTimeout,
StatsReporter: func(ctx context.Context, log slog.Logger, statsFn func() *agent.Stats) (io.Closer, error) {
doneCh := make(chan struct{})
ctx, cancel := context.WithCancel(ctx)
go func() {
defer close(doneCh)
t := time.NewTicker(time.Millisecond * 100)
defer t.Stop()
for {
select {
case <-ctx.Done():
return
case <-t.C:
}
select {
case statsCh <- statsFn():
case <-ctx.Done():
return
default:
// We don't want to send old stats.
continue
}
}
}()
return closeFunc(func() error {
cancel()
<-doneCh
close(statsCh)
return nil
}), nil
},
})
t.Cleanup(func() {
_ = client.Close()
@ -586,7 +694,7 @@ func setupAgent(t *testing.T, metadata agent.Metadata, ptyTimeout time.Duration)
conn.SetNodeCallback(sendNode)
return &agent.TailnetConn{
Conn: conn,
}
}, statsCh
}
conn, err := peerbroker.Dial(stream, []webrtc.ICEServer{}, &peer.ConnOptions{
Logger: slogtest.Make(t, nil),
@ -599,7 +707,7 @@ func setupAgent(t *testing.T, metadata agent.Metadata, ptyTimeout time.Duration)
return &agent.WebRTCConn{
Negotiator: api,
Conn: conn,
}
}, statsCh
}
var dialTestPayload = []byte("dean-was-here123")

67
agent/stats.go Normal file
View File

@ -0,0 +1,67 @@
package agent
import (
"context"
"io"
"net"
"sync/atomic"
"cdr.dev/slog"
)
// statsConn wraps a net.Conn with statistics.
type statsConn struct {
*Stats
net.Conn `json:"-"`
}
var _ net.Conn = new(statsConn)
func (c *statsConn) Read(b []byte) (n int, err error) {
n, err = c.Conn.Read(b)
atomic.AddInt64(&c.RxBytes, int64(n))
return n, err
}
func (c *statsConn) Write(b []byte) (n int, err error) {
n, err = c.Conn.Write(b)
atomic.AddInt64(&c.TxBytes, int64(n))
return n, err
}
var _ net.Conn = new(statsConn)
// Stats records the Agent's network connection statistics for use in
// user-facing metrics and debugging.
// Each member value must be written and read with atomic.
type Stats struct {
NumConns int64 `json:"num_comms"`
RxBytes int64 `json:"rx_bytes"`
TxBytes int64 `json:"tx_bytes"`
}
func (s *Stats) Copy() *Stats {
return &Stats{
NumConns: atomic.LoadInt64(&s.NumConns),
RxBytes: atomic.LoadInt64(&s.RxBytes),
TxBytes: atomic.LoadInt64(&s.TxBytes),
}
}
// wrapConn returns a new connection that records statistics.
func (s *Stats) wrapConn(conn net.Conn) net.Conn {
atomic.AddInt64(&s.NumConns, 1)
cs := &statsConn{
Stats: s,
Conn: conn,
}
return cs
}
// StatsReporter periodically accept and records agent stats.
type StatsReporter func(
ctx context.Context,
log slog.Logger,
stats func() *Stats,
) (io.Closer, error)

View File

@ -192,6 +192,7 @@ func workspaceAgent() *cobra.Command {
"CODER_AGENT_TOKEN": client.SessionToken,
},
CoordinatorDialer: client.ListenWorkspaceAgentTailnet,
StatsReporter: client.AgentReportStats,
})
<-cmd.Context().Done()
return closer.Close()

View File

@ -120,6 +120,8 @@ func Server(newAPI func(*coderd.Options) *coderd.API) *cobra.Command {
autoImportTemplates []string
spooky bool
verbose bool
metricsCacheRefreshInterval time.Duration
agentStatRefreshInterval time.Duration
)
root := &cobra.Command{
@ -345,21 +347,23 @@ func Server(newAPI func(*coderd.Options) *coderd.API) *cobra.Command {
}
options := &coderd.Options{
AccessURL: accessURLParsed,
ICEServers: iceServers,
Logger: logger.Named("coderd"),
Database: databasefake.New(),
DERPMap: derpMap,
Pubsub: database.NewPubsubInMemory(),
CacheDir: cacheDir,
GoogleTokenValidator: googleTokenValidator,
SecureAuthCookie: secureAuthCookie,
SSHKeygenAlgorithm: sshKeygenAlgorithm,
TailscaleEnable: tailscaleEnable,
TURNServer: turnServer,
TracerProvider: tracerProvider,
Telemetry: telemetry.NewNoop(),
AutoImportTemplates: validatedAutoImportTemplates,
AccessURL: accessURLParsed,
ICEServers: iceServers,
Logger: logger.Named("coderd"),
Database: databasefake.New(),
DERPMap: derpMap,
Pubsub: database.NewPubsubInMemory(),
CacheDir: cacheDir,
GoogleTokenValidator: googleTokenValidator,
SecureAuthCookie: secureAuthCookie,
SSHKeygenAlgorithm: sshKeygenAlgorithm,
TailscaleEnable: tailscaleEnable,
TURNServer: turnServer,
TracerProvider: tracerProvider,
Telemetry: telemetry.NewNoop(),
AutoImportTemplates: validatedAutoImportTemplates,
MetricsCacheRefreshInterval: metricsCacheRefreshInterval,
AgentStatsRefreshInterval: agentStatRefreshInterval,
}
if oauth2GithubClientSecret != "" {
@ -834,8 +838,16 @@ func Server(newAPI func(*coderd.Options) *coderd.API) *cobra.Command {
`Accepted values are "ed25519", "ecdsa", or "rsa4096"`)
cliflag.StringArrayVarP(root.Flags(), &autoImportTemplates, "auto-import-template", "", "CODER_TEMPLATE_AUTOIMPORT", []string{}, "Which templates to auto-import. Available auto-importable templates are: kubernetes")
cliflag.BoolVarP(root.Flags(), &spooky, "spooky", "", "", false, "Specifies spookiness level")
cliflag.BoolVarP(root.Flags(), &verbose, "verbose", "v", "CODER_VERBOSE", false, "Enables verbose logging.")
_ = root.Flags().MarkHidden("spooky")
cliflag.BoolVarP(root.Flags(), &verbose, "verbose", "v", "CODER_VERBOSE", false, "Enables verbose logging.")
// These metrics flags are for manually testing the metric system.
// The defaults should be acceptable for any Coder deployment of any
// reasonable size.
cliflag.DurationVarP(root.Flags(), &metricsCacheRefreshInterval, "metrics-cache-refresh-interval", "", "CODER_METRICS_CACHE_REFRESH_INTERVAL", time.Hour, "How frequently metrics are refreshed")
_ = root.Flags().MarkHidden("metrics-cache-refresh-interval")
cliflag.DurationVarP(root.Flags(), &agentStatRefreshInterval, "agent-stats-refresh-interval", "", "CODER_AGENT_STATS_REFRESH_INTERVAL", time.Minute*10, "How frequently agent stats are recorded")
_ = root.Flags().MarkHidden("agent-stats-report-interval")
return root
}

View File

@ -30,6 +30,7 @@ import (
"github.com/coder/coder/coderd/gitsshkey"
"github.com/coder/coder/coderd/httpapi"
"github.com/coder/coder/coderd/httpmw"
"github.com/coder/coder/coderd/metricscache"
"github.com/coder/coder/coderd/rbac"
"github.com/coder/coder/coderd/telemetry"
"github.com/coder/coder/coderd/tracing"
@ -76,6 +77,9 @@ type Options struct {
TailscaleEnable bool
TailnetCoordinator *tailnet.Coordinator
DERPMap *tailcfg.DERPMap
MetricsCacheRefreshInterval time.Duration
AgentStatsRefreshInterval time.Duration
}
// New constructs a Coder API handler.
@ -121,6 +125,12 @@ func New(options *Options) *API {
panic(xerrors.Errorf("read site bin failed: %w", err))
}
metricsCache := metricscache.New(
options.Database,
options.Logger.Named("metrics_cache"),
options.MetricsCacheRefreshInterval,
)
r := chi.NewRouter()
api := &API{
Options: options,
@ -130,6 +140,7 @@ func New(options *Options) *API {
Authorizer: options.Authorizer,
Logger: options.Logger,
},
metricsCache: metricsCache,
}
if options.TailscaleEnable {
api.workspaceAgentCache = wsconncache.New(api.dialWorkspaceAgentTailnet, 0)
@ -147,6 +158,13 @@ func New(options *Options) *API {
httpmw.Recover(api.Logger),
httpmw.Logger(api.Logger),
httpmw.Prometheus(options.PrometheusRegistry),
// Build-Version is helpful for debugging.
func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add("Build-Version", buildinfo.Version())
next.ServeHTTP(w, r)
})
},
)
apps := func(r chi.Router) {
@ -259,7 +277,7 @@ func New(options *Options) *API {
apiKeyMiddleware,
httpmw.ExtractTemplateParam(options.Database),
)
r.Get("/daus", api.templateDAUs)
r.Get("/", api.template)
r.Delete("/", api.deleteTemplate)
r.Patch("/", api.patchTemplateMeta)
@ -359,11 +377,14 @@ func New(options *Options) *API {
r.Get("/metadata", api.workspaceAgentMetadata)
r.Post("/version", api.postWorkspaceAgentVersion)
r.Get("/listen", api.workspaceAgentListen)
r.Get("/gitsshkey", api.agentGitSSHKey)
r.Get("/turn", api.workspaceAgentTurn)
r.Get("/iceservers", api.workspaceAgentICEServers)
r.Get("/coordinate", api.workspaceAgentCoordinate)
r.Get("/report-stats", api.workspaceAgentReportStats)
})
r.Route("/{workspaceagent}", func(r chi.Router) {
r.Use(
@ -452,6 +473,8 @@ type API struct {
websocketWaitGroup sync.WaitGroup
workspaceAgentCache *wsconncache.Cache
httpAuth *HTTPAuthorizer
metricsCache *metricscache.Cache
}
// Close waits for all WebSocket connections to drain before returning.
@ -460,6 +483,8 @@ func (api *API) Close() error {
api.websocketWaitGroup.Wait()
api.websocketWaitMutex.Unlock()
api.metricsCache.Close()
return api.workspaceAgentCache.Close()
}

View File

@ -197,6 +197,7 @@ func AGPLRoutes(a *AuthTester) (map[string]string, map[string]RouteCheck) {
"GET:/api/v2/workspaceagents/me/turn": {NoAuthorize: true},
"GET:/api/v2/workspaceagents/me/coordinate": {NoAuthorize: true},
"POST:/api/v2/workspaceagents/me/version": {NoAuthorize: true},
"GET:/api/v2/workspaceagents/me/report-stats": {NoAuthorize: true},
"GET:/api/v2/workspaceagents/{workspaceagent}/iceservers": {NoAuthorize: true},
// These endpoints have more assertions. This is good, add more endpoints to assert if you can!

View File

@ -234,7 +234,9 @@ func newWithAPI(t *testing.T, options *Options) (*codersdk.Client, io.Closer, *c
},
},
},
AutoImportTemplates: options.AutoImportTemplates,
AutoImportTemplates: options.AutoImportTemplates,
MetricsCacheRefreshInterval: time.Millisecond * 100,
AgentStatsRefreshInterval: time.Millisecond * 100,
})
t.Cleanup(func() {
_ = coderAPI.Close()

View File

@ -10,6 +10,7 @@ import (
"github.com/google/uuid"
"github.com/lib/pq"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
"github.com/coder/coder/coderd/database"
@ -23,6 +24,7 @@ func New() database.Store {
mutex: &sync.RWMutex{},
data: &data{
apiKeys: make([]database.APIKey, 0),
agentStats: make([]database.AgentStat, 0),
organizationMembers: make([]database.OrganizationMember, 0),
organizations: make([]database.Organization, 0),
users: make([]database.User, 0),
@ -78,6 +80,7 @@ type data struct {
userLinks []database.UserLink
// New tables
agentStats []database.AgentStat
auditLogs []database.AuditLog
files []database.File
gitSSHKey []database.GitSSHKey
@ -134,6 +137,64 @@ func (q *fakeQuerier) AcquireProvisionerJob(_ context.Context, arg database.Acqu
}
return database.ProvisionerJob{}, sql.ErrNoRows
}
func (*fakeQuerier) DeleteOldAgentStats(_ context.Context) error {
// no-op
return nil
}
func (q *fakeQuerier) InsertAgentStat(_ context.Context, p database.InsertAgentStatParams) (database.AgentStat, error) {
q.mutex.Lock()
defer q.mutex.Unlock()
stat := database.AgentStat{
ID: p.ID,
CreatedAt: p.CreatedAt,
WorkspaceID: p.WorkspaceID,
AgentID: p.AgentID,
UserID: p.UserID,
Payload: p.Payload,
TemplateID: p.TemplateID,
}
q.agentStats = append(q.agentStats, stat)
return stat, nil
}
func (q *fakeQuerier) GetTemplateDAUs(_ context.Context, templateID uuid.UUID) ([]database.GetTemplateDAUsRow, error) {
q.mutex.Lock()
defer q.mutex.Unlock()
counts := make(map[time.Time]map[string]struct{})
for _, as := range q.agentStats {
if as.TemplateID != templateID {
continue
}
date := as.CreatedAt.Truncate(time.Hour * 24)
dateEntry := counts[date]
if dateEntry == nil {
dateEntry = make(map[string]struct{})
}
counts[date] = dateEntry
dateEntry[as.UserID.String()] = struct{}{}
}
countKeys := maps.Keys(counts)
sort.Slice(countKeys, func(i, j int) bool {
return countKeys[i].Before(countKeys[j])
})
var rs []database.GetTemplateDAUsRow
for _, key := range countKeys {
rs = append(rs, database.GetTemplateDAUsRow{
Date: key,
Amount: int64(len(counts[key])),
})
}
return rs, nil
}
func (q *fakeQuerier) ParameterValue(_ context.Context, id uuid.UUID) (database.ParameterValue, error) {
q.mutex.Lock()

View File

@ -87,6 +87,16 @@ CREATE TYPE workspace_transition AS ENUM (
'delete'
);
CREATE TABLE agent_stats (
id uuid NOT NULL,
created_at timestamp with time zone NOT NULL,
user_id uuid NOT NULL,
agent_id uuid NOT NULL,
workspace_id uuid NOT NULL,
template_id uuid NOT NULL,
payload jsonb NOT NULL
);
CREATE TABLE api_keys (
id text NOT NULL,
hashed_secret bytea NOT NULL,
@ -372,6 +382,9 @@ CREATE TABLE workspaces (
ALTER TABLE ONLY licenses ALTER COLUMN id SET DEFAULT nextval('public.licenses_id_seq'::regclass);
ALTER TABLE ONLY agent_stats
ADD CONSTRAINT agent_stats_pkey PRIMARY KEY (id);
ALTER TABLE ONLY api_keys
ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id);
@ -468,6 +481,10 @@ ALTER TABLE ONLY workspace_resources
ALTER TABLE ONLY workspaces
ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id);
CREATE INDEX idx_agent_stats_created_at ON agent_stats USING btree (created_at);
CREATE INDEX idx_agent_stats_user_id ON agent_stats USING btree (user_id);
CREATE INDEX idx_api_keys_user ON api_keys USING btree (user_id);
CREATE INDEX idx_audit_log_organization_id ON audit_logs USING btree (organization_id);

View File

@ -0,0 +1 @@
DROP TABLE agent_stats;

View File

@ -0,0 +1,16 @@
CREATE TABLE agent_stats (
id uuid NOT NULL,
PRIMARY KEY (id),
created_at timestamptz NOT NULL,
user_id uuid NOT NULL,
agent_id uuid NOT NULL,
workspace_id uuid NOT NULL,
template_id uuid NOT NULL,
payload jsonb NOT NULL
);
-- We use created_at for DAU analysis and pruning.
CREATE INDEX idx_agent_stats_created_at ON agent_stats USING btree (created_at);
-- We perform user grouping to analyze DAUs.
CREATE INDEX idx_agent_stats_user_id ON agent_stats USING btree (user_id);

View File

@ -324,6 +324,16 @@ type APIKey struct {
IPAddress pqtype.Inet `db:"ip_address" json:"ip_address"`
}
type AgentStat struct {
ID uuid.UUID `db:"id" json:"id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UserID uuid.UUID `db:"user_id" json:"user_id"`
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
Payload json.RawMessage `db:"payload" json:"payload"`
}
type AuditLog struct {
ID uuid.UUID `db:"id" json:"id"`
Time time.Time `db:"time" json:"time"`

View File

@ -22,6 +22,7 @@ type querier interface {
DeleteAPIKeyByID(ctx context.Context, id string) error
DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error
DeleteLicense(ctx context.Context, id int32) (int32, error)
DeleteOldAgentStats(ctx context.Context) error
DeleteParameterValueByID(ctx context.Context, id uuid.UUID) error
GetAPIKeyByID(ctx context.Context, id string) (APIKey, error)
GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]APIKey, error)
@ -57,6 +58,7 @@ type querier interface {
GetProvisionerLogsByIDBetween(ctx context.Context, arg GetProvisionerLogsByIDBetweenParams) ([]ProvisionerJobLog, error)
GetTemplateByID(ctx context.Context, id uuid.UUID) (Template, error)
GetTemplateByOrganizationAndName(ctx context.Context, arg GetTemplateByOrganizationAndNameParams) (Template, error)
GetTemplateDAUs(ctx context.Context, templateID uuid.UUID) ([]GetTemplateDAUsRow, error)
GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (TemplateVersion, error)
GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (TemplateVersion, error)
GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg GetTemplateVersionByTemplateIDAndNameParams) (TemplateVersion, error)
@ -99,6 +101,7 @@ type querier interface {
GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) ([]Workspace, error)
GetWorkspacesAutostart(ctx context.Context) ([]Workspace, error)
InsertAPIKey(ctx context.Context, arg InsertAPIKeyParams) (APIKey, error)
InsertAgentStat(ctx context.Context, arg InsertAgentStatParams) (AgentStat, error)
InsertAuditLog(ctx context.Context, arg InsertAuditLogParams) (AuditLog, error)
InsertDeploymentID(ctx context.Context, value string) error
InsertFile(ctx context.Context, arg InsertFileParams) (File, error)

View File

@ -15,6 +15,104 @@ import (
"github.com/tabbed/pqtype"
)
const deleteOldAgentStats = `-- name: DeleteOldAgentStats :exec
DELETE FROM AGENT_STATS WHERE created_at < now() - interval '30 days'
`
func (q *sqlQuerier) DeleteOldAgentStats(ctx context.Context) error {
_, err := q.db.ExecContext(ctx, deleteOldAgentStats)
return err
}
const getTemplateDAUs = `-- name: GetTemplateDAUs :many
select
(created_at at TIME ZONE 'UTC')::date as date,
count(distinct(user_id)) as amount
from
agent_stats
where template_id = $1
group by
date
order by
date asc
`
type GetTemplateDAUsRow struct {
Date time.Time `db:"date" json:"date"`
Amount int64 `db:"amount" json:"amount"`
}
func (q *sqlQuerier) GetTemplateDAUs(ctx context.Context, templateID uuid.UUID) ([]GetTemplateDAUsRow, error) {
rows, err := q.db.QueryContext(ctx, getTemplateDAUs, templateID)
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetTemplateDAUsRow
for rows.Next() {
var i GetTemplateDAUsRow
if err := rows.Scan(&i.Date, &i.Amount); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const insertAgentStat = `-- name: InsertAgentStat :one
INSERT INTO
agent_stats (
id,
created_at,
user_id,
workspace_id,
template_id,
agent_id,
payload
)
VALUES
($1, $2, $3, $4, $5, $6, $7) RETURNING id, created_at, user_id, agent_id, workspace_id, template_id, payload
`
type InsertAgentStatParams struct {
ID uuid.UUID `db:"id" json:"id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UserID uuid.UUID `db:"user_id" json:"user_id"`
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
Payload json.RawMessage `db:"payload" json:"payload"`
}
func (q *sqlQuerier) InsertAgentStat(ctx context.Context, arg InsertAgentStatParams) (AgentStat, error) {
row := q.db.QueryRowContext(ctx, insertAgentStat,
arg.ID,
arg.CreatedAt,
arg.UserID,
arg.WorkspaceID,
arg.TemplateID,
arg.AgentID,
arg.Payload,
)
var i AgentStat
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UserID,
&i.AgentID,
&i.WorkspaceID,
&i.TemplateID,
&i.Payload,
)
return i, err
}
const deleteAPIKeyByID = `-- name: DeleteAPIKeyByID :exec
DELETE
FROM

View File

@ -0,0 +1,28 @@
-- name: InsertAgentStat :one
INSERT INTO
agent_stats (
id,
created_at,
user_id,
workspace_id,
template_id,
agent_id,
payload
)
VALUES
($1, $2, $3, $4, $5, $6, $7) RETURNING *;
-- name: GetTemplateDAUs :many
select
(created_at at TIME ZONE 'UTC')::date as date,
count(distinct(user_id)) as amount
from
agent_stats
where template_id = $1
group by
date
order by
date asc;
-- name: DeleteOldAgentStats :exec
DELETE FROM AGENT_STATS WHERE created_at < now() - interval '30 days';

View File

@ -0,0 +1,172 @@
package metricscache
import (
"context"
"sync/atomic"
"time"
"golang.org/x/xerrors"
"github.com/google/uuid"
"cdr.dev/slog"
"github.com/coder/coder/coderd/database"
"github.com/coder/coder/codersdk"
"github.com/coder/retry"
)
// Cache holds the template DAU cache.
// The aggregation queries responsible for these values can take up to a minute
// on large deployments. Even in small deployments, aggregation queries can
// take a few hundred milliseconds, which would ruin page load times and
// database performance if in the hot path.
type Cache struct {
database database.Store
log slog.Logger
templateDAUResponses atomic.Pointer[map[string]codersdk.TemplateDAUsResponse]
doneCh chan struct{}
cancel func()
interval time.Duration
}
func New(db database.Store, log slog.Logger, interval time.Duration) *Cache {
if interval <= 0 {
interval = time.Hour
}
ctx, cancel := context.WithCancel(context.Background())
c := &Cache{
database: db,
log: log,
doneCh: make(chan struct{}),
cancel: cancel,
interval: interval,
}
go c.run(ctx)
return c
}
func fillEmptyDays(rows []database.GetTemplateDAUsRow) []database.GetTemplateDAUsRow {
var newRows []database.GetTemplateDAUsRow
for i, row := range rows {
if i == 0 {
newRows = append(newRows, row)
continue
}
last := rows[i-1]
const day = time.Hour * 24
diff := row.Date.Sub(last.Date)
for diff > day {
if diff <= day {
break
}
last.Date = last.Date.Add(day)
last.Amount = 0
newRows = append(newRows, last)
diff -= day
}
newRows = append(newRows, row)
continue
}
return newRows
}
func (c *Cache) refresh(ctx context.Context) error {
err := c.database.DeleteOldAgentStats(ctx)
if err != nil {
return xerrors.Errorf("delete old stats: %w", err)
}
templates, err := c.database.GetTemplates(ctx)
if err != nil {
return err
}
templateDAUs := make(map[string]codersdk.TemplateDAUsResponse, len(templates))
for _, template := range templates {
daus, err := c.database.GetTemplateDAUs(ctx, template.ID)
if err != nil {
return err
}
var resp codersdk.TemplateDAUsResponse
for _, ent := range fillEmptyDays(daus) {
resp.Entries = append(resp.Entries, codersdk.DAUEntry{
Date: ent.Date,
Amount: int(ent.Amount),
})
}
templateDAUs[template.ID.String()] = resp
}
c.templateDAUResponses.Store(&templateDAUs)
return nil
}
func (c *Cache) run(ctx context.Context) {
defer close(c.doneCh)
ticker := time.NewTicker(c.interval)
defer ticker.Stop()
for {
for r := retry.New(time.Millisecond*100, time.Minute); r.Wait(ctx); {
start := time.Now()
err := c.refresh(ctx)
if err != nil {
if ctx.Err() != nil {
return
}
c.log.Error(ctx, "refresh", slog.Error(err))
continue
}
c.log.Debug(
ctx,
"metrics refreshed",
slog.F("took", time.Since(start)),
slog.F("interval", c.interval),
)
break
}
select {
case <-ticker.C:
case <-c.doneCh:
return
case <-ctx.Done():
return
}
}
}
func (c *Cache) Close() error {
c.cancel()
<-c.doneCh
return nil
}
// TemplateDAUs returns an empty response if the template doesn't have users
// or is loading for the first time.
func (c *Cache) TemplateDAUs(id uuid.UUID) codersdk.TemplateDAUsResponse {
m := c.templateDAUResponses.Load()
if m == nil {
// Data loading.
return codersdk.TemplateDAUsResponse{}
}
resp, ok := (*m)[id.String()]
if !ok {
// Probably no data.
return codersdk.TemplateDAUsResponse{}
}
return resp
}

View File

@ -0,0 +1,185 @@
package metricscache_test
import (
"context"
"reflect"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/coderd/database"
"github.com/coder/coder/coderd/database/databasefake"
"github.com/coder/coder/coderd/metricscache"
"github.com/coder/coder/codersdk"
"github.com/coder/coder/testutil"
)
func date(year, month, day int) time.Time {
return time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC)
}
func TestCache(t *testing.T) {
t.Parallel()
var (
zebra = uuid.New()
tiger = uuid.New()
)
type args struct {
rows []database.InsertAgentStatParams
}
tests := []struct {
name string
args args
want []codersdk.DAUEntry
}{
{"empty", args{}, nil},
{"one hole", args{
rows: []database.InsertAgentStatParams{
{
CreatedAt: date(2022, 8, 27),
UserID: zebra,
},
{
CreatedAt: date(2022, 8, 30),
UserID: zebra,
},
},
}, []codersdk.DAUEntry{
{
Date: date(2022, 8, 27),
Amount: 1,
},
{
Date: date(2022, 8, 28),
Amount: 0,
},
{
Date: date(2022, 8, 29),
Amount: 0,
},
{
Date: date(2022, 8, 30),
Amount: 1,
},
}},
{"no holes", args{
rows: []database.InsertAgentStatParams{
{
CreatedAt: date(2022, 8, 27),
UserID: zebra,
},
{
CreatedAt: date(2022, 8, 28),
UserID: zebra,
},
{
CreatedAt: date(2022, 8, 29),
UserID: zebra,
},
},
}, []codersdk.DAUEntry{
{
Date: date(2022, 8, 27),
Amount: 1,
},
{
Date: date(2022, 8, 28),
Amount: 1,
},
{
Date: date(2022, 8, 29),
Amount: 1,
},
}},
{"holes", args{
rows: []database.InsertAgentStatParams{
{
CreatedAt: date(2022, 1, 1),
UserID: zebra,
},
{
CreatedAt: date(2022, 1, 1),
UserID: tiger,
},
{
CreatedAt: date(2022, 1, 4),
UserID: zebra,
},
{
CreatedAt: date(2022, 1, 7),
UserID: zebra,
},
{
CreatedAt: date(2022, 1, 7),
UserID: tiger,
},
},
}, []codersdk.DAUEntry{
{
Date: date(2022, 1, 1),
Amount: 2,
},
{
Date: date(2022, 1, 2),
Amount: 0,
},
{
Date: date(2022, 1, 3),
Amount: 0,
},
{
Date: date(2022, 1, 4),
Amount: 1,
},
{
Date: date(2022, 1, 5),
Amount: 0,
},
{
Date: date(2022, 1, 6),
Amount: 0,
},
{
Date: date(2022, 1, 7),
Amount: 2,
},
}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
var (
db = databasefake.New()
cache = metricscache.New(db, slogtest.Make(t, nil), time.Millisecond*100)
)
defer cache.Close()
templateID := uuid.New()
db.InsertTemplate(context.Background(), database.InsertTemplateParams{
ID: templateID,
})
for _, row := range tt.args.rows {
row.TemplateID = templateID
db.InsertAgentStat(context.Background(), row)
}
var got codersdk.TemplateDAUsResponse
require.Eventuallyf(t, func() bool {
got = cache.TemplateDAUs(templateID)
return reflect.DeepEqual(got.Entries, tt.want)
}, testutil.WaitShort, testutil.IntervalFast,
"GetDAUs() = %v, want %v", got, tt.want,
)
})
}
}

View File

@ -39,6 +39,8 @@ func TestProvisionerJobLogs_Unit(t *testing.T) {
Pubsub: fPubsub,
}
api := New(&opts)
defer api.Close()
server := httptest.NewServer(api.Handler)
defer server.Close()
userID := uuid.New()

View File

@ -517,6 +517,20 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) {
httpapi.Write(rw, http.StatusOK, convertTemplate(updated, count, createdByNameMap[updated.ID.String()]))
}
func (api *API) templateDAUs(rw http.ResponseWriter, r *http.Request) {
template := httpmw.TemplateParam(r)
if !api.Authorize(r, rbac.ActionRead, template) {
httpapi.ResourceNotFound(rw)
return
}
resp := api.metricsCache.TemplateDAUs(template.ID)
if resp.Entries == nil {
resp.Entries = []codersdk.DAUEntry{}
}
httpapi.Write(rw, http.StatusOK, resp)
}
type autoImportTemplateOpts struct {
name string
archive []byte

View File

@ -10,10 +10,16 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/agent"
"github.com/coder/coder/coderd/coderdtest"
"github.com/coder/coder/coderd/rbac"
"github.com/coder/coder/coderd/util/ptr"
"github.com/coder/coder/codersdk"
"github.com/coder/coder/peer"
"github.com/coder/coder/provisioner/echo"
"github.com/coder/coder/provisionersdk/proto"
"github.com/coder/coder/testutil"
)
@ -539,3 +545,100 @@ func TestDeleteTemplate(t *testing.T) {
require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode())
})
}
func TestTemplateDAUs(t *testing.T) {
t.Parallel()
client := coderdtest.New(t, &coderdtest.Options{
IncludeProvisionerD: true,
})
user := coderdtest.CreateFirstUser(t, client)
authToken := uuid.NewString()
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
Parse: echo.ParseComplete,
ProvisionDryRun: echo.ProvisionComplete,
Provision: []*proto.Provision_Response{{
Type: &proto.Provision_Response_Complete{
Complete: &proto.Provision_Complete{
Resources: []*proto.Resource{{
Name: "example",
Type: "aws_instance",
Agents: []*proto.Agent{{
Id: uuid.NewString(),
Auth: &proto.Agent_Token{
Token: authToken,
},
}},
}},
},
},
}},
})
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
coderdtest.AwaitTemplateVersionJob(t, client, version.ID)
workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID)
coderdtest.AwaitWorkspaceBuildJob(t, client, workspace.LatestBuild.ID)
agentClient := codersdk.New(client.URL)
agentClient.SessionToken = authToken
agentCloser := agent.New(agent.Options{
Logger: slogtest.Make(t, nil),
StatsReporter: agentClient.AgentReportStats,
WebRTCDialer: agentClient.ListenWorkspaceAgent,
FetchMetadata: agentClient.WorkspaceAgentMetadata,
CoordinatorDialer: agentClient.ListenWorkspaceAgentTailnet,
})
defer func() {
_ = agentCloser.Close()
}()
resources := coderdtest.AwaitWorkspaceAgents(t, client, workspace.LatestBuild.ID)
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
defer cancel()
opts := &peer.ConnOptions{
Logger: slogtest.Make(t, nil).Named("client"),
}
daus, err := client.TemplateDAUs(context.Background(), template.ID)
require.NoError(t, err)
require.Equal(t, &codersdk.TemplateDAUsResponse{
Entries: []codersdk.DAUEntry{},
}, daus, "no DAUs when stats are empty")
conn, err := client.DialWorkspaceAgent(ctx, resources[0].Agents[0].ID, opts)
require.NoError(t, err)
defer func() {
_ = conn.Close()
}()
sshConn, err := conn.SSHClient()
require.NoError(t, err)
session, err := sshConn.NewSession()
require.NoError(t, err)
_, err = session.Output("echo hello")
require.NoError(t, err)
want := &codersdk.TemplateDAUsResponse{
Entries: []codersdk.DAUEntry{
{
Date: time.Now().UTC().Truncate(time.Hour * 24),
Amount: 1,
},
},
}
require.Eventuallyf(t, func() bool {
daus, err = client.TemplateDAUs(ctx, template.ID)
require.NoError(t, err)
return assert.ObjectsAreEqual(want, daus)
},
testutil.WaitShort, testutil.IntervalFast,
"got %+v != %+v", daus, want,
)
}

View File

@ -9,6 +9,7 @@ import (
"net"
"net/http"
"net/netip"
"reflect"
"strconv"
"strings"
"time"
@ -18,6 +19,7 @@ import (
"golang.org/x/mod/semver"
"golang.org/x/xerrors"
"nhooyr.io/websocket"
"nhooyr.io/websocket/wsjson"
"tailscale.com/tailcfg"
"cdr.dev/slog"
@ -745,6 +747,130 @@ func convertWorkspaceAgent(derpMap *tailcfg.DERPMap, coordinator *tailnet.Coordi
return workspaceAgent, nil
}
func (api *API) workspaceAgentReportStats(rw http.ResponseWriter, r *http.Request) {
api.websocketWaitMutex.Lock()
api.websocketWaitGroup.Add(1)
api.websocketWaitMutex.Unlock()
defer api.websocketWaitGroup.Done()
workspaceAgent := httpmw.WorkspaceAgent(r)
resource, err := api.Database.GetWorkspaceResourceByID(r.Context(), workspaceAgent.ResourceID)
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to get workspace resource.",
Detail: err.Error(),
})
return
}
build, err := api.Database.GetWorkspaceBuildByJobID(r.Context(), resource.JobID)
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to get build.",
Detail: err.Error(),
})
return
}
workspace, err := api.Database.GetWorkspaceByID(r.Context(), build.WorkspaceID)
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to get workspace.",
Detail: err.Error(),
})
return
}
conn, err := websocket.Accept(rw, r, &websocket.AcceptOptions{
CompressionMode: websocket.CompressionDisabled,
})
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to accept websocket.",
Detail: err.Error(),
})
return
}
defer conn.Close(websocket.StatusAbnormalClosure, "")
// Allow overriding the stat interval for debugging and testing purposes.
ctx := r.Context()
timer := time.NewTicker(api.AgentStatsRefreshInterval)
var lastReport codersdk.AgentStatsReportResponse
for {
err := wsjson.Write(ctx, conn, codersdk.AgentStatsReportRequest{})
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to write report request.",
Detail: err.Error(),
})
return
}
var rep codersdk.AgentStatsReportResponse
err = wsjson.Read(ctx, conn, &rep)
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to read report response.",
Detail: err.Error(),
})
return
}
repJSON, err := json.Marshal(rep)
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to marshal stat json.",
Detail: err.Error(),
})
return
}
// Avoid inserting duplicate rows to preserve DB space.
// We will see duplicate reports when on idle connections
// (e.g. web terminal left open) or when there are no connections at
// all.
var insert = !reflect.DeepEqual(lastReport, rep)
api.Logger.Debug(ctx, "read stats report",
slog.F("interval", api.AgentStatsRefreshInterval),
slog.F("agent", workspaceAgent.ID),
slog.F("resource", resource.ID),
slog.F("workspace", workspace.ID),
slog.F("insert", insert),
slog.F("payload", rep),
)
if insert {
lastReport = rep
_, err = api.Database.InsertAgentStat(ctx, database.InsertAgentStatParams{
ID: uuid.New(),
CreatedAt: time.Now(),
AgentID: workspaceAgent.ID,
WorkspaceID: build.WorkspaceID,
UserID: workspace.OwnerID,
TemplateID: workspace.TemplateID,
Payload: json.RawMessage(repJSON),
})
if err != nil {
httpapi.Write(rw, http.StatusBadRequest, codersdk.Response{
Message: "Failed to insert agent stat.",
Detail: err.Error(),
})
return
}
}
select {
case <-timer.C:
continue
case <-ctx.Done():
conn.Close(websocket.StatusNormalClosure, "")
return
}
}
}
// wsNetConn wraps net.Conn created by websocket.NetConn(). Cancel func
// is called if a read or write error is encountered.

View File

@ -218,3 +218,9 @@ func (e *Error) Error() string {
}
return builder.String()
}
type closeFunc func() error
func (c closeFunc) Close() error {
return c()
}

View File

@ -133,3 +133,43 @@ func (c *Client) TemplateVersionByName(ctx context.Context, template uuid.UUID,
var templateVersion TemplateVersion
return templateVersion, json.NewDecoder(res.Body).Decode(&templateVersion)
}
type DAUEntry struct {
Date time.Time `json:"date"`
Amount int `json:"amount"`
}
type TemplateDAUsResponse struct {
Entries []DAUEntry `json:"entries"`
}
func (c *Client) TemplateDAUs(ctx context.Context, templateID uuid.UUID) (*TemplateDAUsResponse, error) {
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/templates/%s/daus", templateID), nil)
if err != nil {
return nil, xerrors.Errorf("execute request: %w", err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, readBodyAsError(res)
}
var resp TemplateDAUsResponse
return &resp, json.NewDecoder(res.Body).Decode(&resp)
}
// AgentStatsReportRequest is a WebSocket request by coderd
// to the agent for stats.
// @typescript-ignore AgentStatsReportRequest
type AgentStatsReportRequest struct {
}
// AgentStatsReportResponse is returned for each report
// request by the agent.
type AgentStatsReportResponse struct {
NumConns int64 `json:"num_comms"`
// RxBytes is the number of received bytes.
RxBytes int64 `json:"rx_bytes"`
// TxBytes is the number of received bytes.
TxBytes int64 `json:"tx_bytes"`
}

View File

@ -19,6 +19,7 @@ import (
"golang.org/x/net/proxy"
"golang.org/x/xerrors"
"nhooyr.io/websocket"
"nhooyr.io/websocket/wsjson"
"tailscale.com/tailcfg"
"cdr.dev/slog"
@ -528,3 +529,87 @@ func (c *Client) turnProxyDialer(ctx context.Context, httpClient *http.Client, p
return websocket.NetConn(ctx, conn, websocket.MessageBinary), nil
})
}
// AgentReportStats begins a stat streaming connection with the Coder server.
// It is resilient to network failures and intermittent coderd issues.
func (c *Client) AgentReportStats(
ctx context.Context,
log slog.Logger,
stats func() *agent.Stats,
) (io.Closer, error) {
serverURL, err := c.URL.Parse("/api/v2/workspaceagents/me/report-stats")
if err != nil {
return nil, xerrors.Errorf("parse url: %w", err)
}
jar, err := cookiejar.New(nil)
if err != nil {
return nil, xerrors.Errorf("create cookie jar: %w", err)
}
jar.SetCookies(serverURL, []*http.Cookie{{
Name: SessionTokenKey,
Value: c.SessionToken,
}})
httpClient := &http.Client{
Jar: jar,
}
doneCh := make(chan struct{})
ctx, cancel := context.WithCancel(ctx)
go func() {
defer close(doneCh)
// If the agent connection succeeds for a while, then fails, then succeeds
// for a while (etc.) the retry may hit the maximum. This is a normal
// case for long-running agents that experience coderd upgrades, so
// we use a short maximum retry limit.
for r := retry.New(time.Second, time.Minute); r.Wait(ctx); {
err = func() error {
conn, res, err := websocket.Dial(ctx, serverURL.String(), &websocket.DialOptions{
HTTPClient: httpClient,
// Need to disable compression to avoid a data-race.
CompressionMode: websocket.CompressionDisabled,
})
if err != nil {
if res == nil {
return err
}
return readBodyAsError(res)
}
for {
var req AgentStatsReportRequest
err := wsjson.Read(ctx, conn, &req)
if err != nil {
return err
}
s := stats()
resp := AgentStatsReportResponse{
NumConns: s.NumConns,
RxBytes: s.RxBytes,
TxBytes: s.TxBytes,
}
err = wsjson.Write(ctx, conn, resp)
if err != nil {
return err
}
}
}()
if err != nil && ctx.Err() == nil {
log.Error(ctx, "report stats", slog.Error(err))
}
}
}()
return closeFunc(func() error {
cancel()
<-doneCh
return nil
}), nil
}

View File

@ -70,6 +70,12 @@ RUN mkdir --parents "$GOPATH" && \
# nfpm is used with `make build` to make release packages
go install github.com/goreleaser/nfpm/v2/cmd/nfpm@v2.16.0
FROM alpine:3.16 as proto
WORKDIR /tmp
RUN apk add curl unzip
RUN curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/download/v21.5/protoc-21.5-linux-x86_64.zip
RUN unzip protoc.zip
# Ubuntu 20.04 LTS (Focal Fossa)
FROM ubuntu:focal
@ -119,7 +125,6 @@ RUN apt-get update --quiet && apt-get install --yes \
openssh-server \
openssl \
pkg-config \
protobuf-compiler \
python3 \
python3-pip \
rsync \
@ -156,6 +161,7 @@ RUN apt-get update --quiet && apt-get install --yes \
skopeo \
fish \
gh \
unzip \
zstd && \
# Delete package cache to avoid consuming space in layer
apt-get clean && \
@ -300,6 +306,7 @@ RUN update-alternatives --install /usr/local/bin/gofmt gofmt /usr/local/go/bin/g
COPY --from=go /tmp/bin /usr/local/bin
COPY --from=rust-utils /tmp/bin /usr/local/bin
COPY --from=proto /tmp/bin /usr/local/bin
USER coder

View File

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.20.0
// protoc v3.21.5
// source: peerbroker/proto/peerbroker.proto
package proto

View File

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.20.0
// protoc v3.21.5
// source: provisionerd/proto/provisionerd.proto
package proto

View File

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.20.0
// protoc v3.21.5
// source: provisionersdk/proto/provisioner.proto
package proto

View File

@ -38,6 +38,7 @@
"@xstate/react": "3.0.1",
"axios": "0.26.1",
"can-ndjson-stream": "1.0.2",
"chart.js": "^3.5.0",
"cron-parser": "4.5.0",
"cronstrue": "2.11.0",
"dayjs": "1.11.4",
@ -46,13 +47,14 @@
"front-matter": "4.0.2",
"history": "5.3.0",
"i18next": "21.9.1",
"just-debounce-it": "3.1.1",
"just-debounce-it": "3.0.1",
"react": "18.2.0",
"react-chartjs-2": "^4.3.1",
"react-dom": "18.2.0",
"react-helmet-async": "1.3.0",
"react-i18next": "11.18.4",
"react-markdown": "8.0.3",
"react-router-dom": "6.3.0",
"react-router-dom": "^6.3.0",
"sourcemapped-stacktrace": "1.1.11",
"swr": "1.3.0",
"tzdata": "1.0.30",
@ -87,6 +89,7 @@
"@typescript-eslint/eslint-plugin": "5.31.0",
"@typescript-eslint/parser": "5.31.0",
"@xstate/cli": "0.3.0",
"canvas": "^2.9.3",
"chromatic": "6.7.1",
"copy-webpack-plugin": "10.2.4",
"css-loader": "6.7.1",
@ -113,6 +116,7 @@
"prettier": "2.7.1",
"prettier-plugin-organize-imports": "3.0.0",
"react-hot-loader": "4.13.0",
"resize-observer": "^1.0.4",
"semver": "^7.3.7",
"sql-formatter": "8.2.0",
"style-loader": "3.3.1",
@ -123,7 +127,7 @@
"webpack": "5.74.0",
"webpack-bundle-analyzer": "4.5.0",
"webpack-cli": "4.10.0",
"webpack-dev-server": "4.10.1"
"webpack-dev-server": "4.9.3"
},
"browserslist": [
"chrome 66",

View File

@ -388,3 +388,10 @@ export const getEntitlements = async (): Promise<TypesGen.Entitlements> => {
const response = await axios.get("/api/v2/entitlements")
return response.data
}
export const getTemplateDAUs = async (
templateId: string,
): Promise<TypesGen.TemplateDAUsResponse> => {
const response = await axios.get(`/api/v2/templates/${templateId}/daus`)
return response.data
}

View File

@ -29,6 +29,13 @@ export interface AgentGitSSHKey {
readonly private_key: string
}
// From codersdk/templates.go
export interface AgentStatsReportResponse {
readonly num_comms: number
readonly rx_bytes: number
readonly tx_bytes: number
}
// From codersdk/roles.go
export interface AssignableRoles extends Role {
readonly assignable: boolean
@ -175,6 +182,12 @@ export interface CreateWorkspaceRequest {
readonly parameter_values?: CreateParameterRequest[]
}
// From codersdk/templates.go
export interface DAUEntry {
readonly date: string
readonly amount: number
}
// From codersdk/workspaceresources.go
export interface DERPRegion {
readonly preferred: boolean
@ -362,6 +375,11 @@ export interface Template {
readonly created_by_name: string
}
// From codersdk/templates.go
export interface TemplateDAUsResponse {
readonly entries: DAUEntry[]
}
// From codersdk/templateversions.go
export interface TemplateVersion {
readonly id: string

View File

@ -0,0 +1,35 @@
import { render } from "testHelpers/renderHelpers"
import { DAUChart, Language } from "./DAUChart"
import { screen } from "@testing-library/react"
import { ResizeObserver } from "resize-observer"
// The Chart performs dynamic resizes which fail in tests without this.
Object.defineProperty(window, "ResizeObserver", {
value: ResizeObserver,
})
describe("DAUChart", () => {
it("renders a helpful paragraph on empty state", async () => {
render(
<DAUChart
templateDAUs={{
entries: [],
}}
/>,
)
await screen.findAllByText(Language.loadingText)
})
it("renders a graph", async () => {
render(
<DAUChart
templateDAUs={{
entries: [{ date: "2020-01-01", amount: 1 }],
}}
/>,
)
await screen.findAllByText(Language.chartTitle)
})
})

View File

@ -0,0 +1,123 @@
import useTheme from "@material-ui/styles/useTheme"
import { Theme } from "@material-ui/core/styles"
import {
BarElement,
CategoryScale,
Chart as ChartJS,
ChartOptions,
defaults,
Legend,
LinearScale,
LineElement,
PointElement,
Title,
Tooltip,
} from "chart.js"
import { Stack } from "components/Stack/Stack"
import { HelpTooltip, HelpTooltipText, HelpTooltipTitle } from "components/Tooltips/HelpTooltip"
import { WorkspaceSection } from "components/WorkspaceSection/WorkspaceSection"
import dayjs from "dayjs"
import { FC } from "react"
import { Line } from "react-chartjs-2"
import * as TypesGen from "../../api/typesGenerated"
ChartJS.register(
CategoryScale,
LinearScale,
PointElement,
BarElement,
LineElement,
Title,
Tooltip,
Legend,
)
export interface DAUChartProps {
templateDAUs: TypesGen.TemplateDAUsResponse
}
export const Language = {
loadingText: "DAU stats are loading. Check back later.",
chartTitle: "Daily Active Users",
}
export const DAUChart: FC<DAUChartProps> = ({ templateDAUs: templateMetricsData }) => {
const theme: Theme = useTheme()
if (templateMetricsData.entries.length === 0) {
return (
// We generate hidden element to prove this path is taken in the test
// and through site inspection.
<div style={{ display: "none" }}>
<p>{Language.loadingText}</p>
</div>
)
}
const labels = templateMetricsData.entries.map((val) => {
return dayjs(val.date).format("YYYY-MM-DD")
})
const data = templateMetricsData.entries.map((val) => {
return val.amount
})
defaults.font.family = theme.typography.fontFamily
defaults.color = theme.palette.text.secondary
const options = {
responsive: true,
plugins: {
legend: {
display: false,
},
},
scales: {
y: {
min: 0,
ticks: {
precision: 0,
},
},
x: {
ticks: {},
},
},
aspectRatio: 10 / 1,
} as ChartOptions
return (
<>
<WorkspaceSection>
<Stack direction="row" spacing={1} alignItems="center">
<h3>{Language.chartTitle}</h3>
<HelpTooltip size="small">
<HelpTooltipTitle>How do we calculate DAUs?</HelpTooltipTitle>
<HelpTooltipText>
We use all workspace connection traffic to calculate DAUs.
</HelpTooltipText>
</HelpTooltip>
</Stack>
<Line
data={{
labels: labels,
datasets: [
{
label: "Daily Active Users",
data: data,
lineTension: 1 / 4,
backgroundColor: theme.palette.secondary.dark,
borderColor: theme.palette.secondary.dark,
},
// There are type bugs in chart.js that force us to use any.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
] as any,
}}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
options={options as any}
height={400}
/>
</WorkspaceSection>
</>
)
}

View File

@ -1,5 +1,6 @@
import { fireEvent, screen } from "@testing-library/react"
import { rest } from "msw"
import { ResizeObserver } from "resize-observer"
import { server } from "testHelpers/server"
import * as CreateDayString from "util/createDayString"
import {
@ -12,6 +13,10 @@ import {
} from "../../testHelpers/renderHelpers"
import { TemplatePage } from "./TemplatePage"
Object.defineProperty(window, "ResizeObserver", {
value: ResizeObserver,
})
describe("TemplatePage", () => {
it("shows the template name, readme and resources", async () => {
// Mocking the dayjs module within the createDayString file

View File

@ -32,16 +32,19 @@ export const TemplatePage: FC<React.PropsWithChildren<unknown>> = () => {
organizationId,
},
})
const {
template,
activeTemplateVersion,
templateResources,
templateVersions,
deleteTemplateError,
templateDAUs,
} = templateState.context
const xServices = useContext(XServiceContext)
const permissions = useSelector(xServices.authXService, selectPermissions)
const isLoading = !template || !activeTemplateVersion || !templateResources || !permissions
const isLoading =
!template || !activeTemplateVersion || !templateResources || !permissions || !templateDAUs
const handleDeleteTemplate = () => {
templateSend("DELETE")
@ -65,6 +68,7 @@ export const TemplatePage: FC<React.PropsWithChildren<unknown>> = () => {
activeTemplateVersion={activeTemplateVersion}
templateResources={templateResources}
templateVersions={templateVersions}
templateDAUs={templateDAUs}
canDeleteTemplate={permissions.deleteTemplates}
handleDeleteTemplate={handleDeleteTemplate}
deleteTemplateError={deleteTemplateError}

View File

@ -12,7 +12,12 @@ import { FC } from "react"
import ReactMarkdown from "react-markdown"
import { Link as RouterLink } from "react-router-dom"
import { firstLetter } from "util/firstLetter"
import { Template, TemplateVersion, WorkspaceResource } from "../../api/typesGenerated"
import {
Template,
TemplateDAUsResponse,
TemplateVersion,
WorkspaceResource,
} from "../../api/typesGenerated"
import { Margins } from "../../components/Margins/Margins"
import {
PageHeader,
@ -24,6 +29,7 @@ import { TemplateResourcesTable } from "../../components/TemplateResourcesTable/
import { TemplateStats } from "../../components/TemplateStats/TemplateStats"
import { VersionsTable } from "../../components/VersionsTable/VersionsTable"
import { WorkspaceSection } from "../../components/WorkspaceSection/WorkspaceSection"
import { DAUChart } from "./DAUChart"
const Language = {
settingsButton: "Settings",
@ -39,6 +45,7 @@ export interface TemplatePageViewProps {
activeTemplateVersion: TemplateVersion
templateResources: WorkspaceResource[]
templateVersions?: TemplateVersion[]
templateDAUs?: TemplateDAUsResponse
handleDeleteTemplate: (templateId: string) => void
deleteTemplateError: Error | unknown
canDeleteTemplate: boolean
@ -49,6 +56,7 @@ export const TemplatePageView: FC<React.PropsWithChildren<TemplatePageViewProps>
activeTemplateVersion,
templateResources,
templateVersions,
templateDAUs,
handleDeleteTemplate,
deleteTemplateError,
canDeleteTemplate,
@ -131,6 +139,7 @@ export const TemplatePageView: FC<React.PropsWithChildren<TemplatePageViewProps>
<Stack spacing={2.5}>
{deleteError}
{templateDAUs && <DAUChart templateDAUs={templateDAUs} />}
<TemplateStats template={template} activeVersion={activeTemplateVersion} />
<WorkspaceSection
title={Language.resourcesTitle}

View File

@ -14,7 +14,6 @@ export const Language = {
activeUsersFilterName: "Active users",
allUsersFilterName: "All users",
}
export interface UsersPageViewProps {
users?: TypesGen.User[]
roles?: TypesGen.AssignableRoles[]
@ -67,12 +66,14 @@ export const UsersPageView: FC<React.PropsWithChildren<UsersPageViewProps>> = ({
<PageHeaderTitle>{Language.pageTitle}</PageHeaderTitle>
</PageHeader>
<SearchBarWithFilter
filter={filter}
onFilter={onFilter}
presetFilters={presetFilters}
error={error}
/>
<div style={{ marginTop: "15px" }}>
<SearchBarWithFilter
filter={filter}
onFilter={onFilter}
presetFilters={presetFilters}
error={error}
/>
</div>
<UsersTable
users={users}

View File

@ -2,6 +2,13 @@ import { FieldError } from "api/errors"
import * as Types from "../api/types"
import * as TypesGen from "../api/typesGenerated"
export const MockTemplateDAUResponse: TypesGen.TemplateDAUsResponse = {
entries: [
{ date: "2022-08-27T00:00:00Z", amount: 1 },
{ date: "2022-08-29T00:00:00Z", amount: 2 },
{ date: "2022-08-30T00:00:00Z", amount: 1 },
],
}
export const MockSessionToken: TypesGen.LoginWithPasswordResponse = {
session_token: "my-session-token",
}

View File

@ -5,6 +5,10 @@ import { permissionsToCheck } from "../xServices/auth/authXService"
import * as M from "./entities"
export const handlers = [
rest.get("/api/v2/templates/:templateId/daus", async (req, res, ctx) => {
return res(ctx.status(200), ctx.json(M.MockTemplateDAUResponse))
}),
// build info
rest.get("/api/v2/buildinfo", async (req, res, ctx) => {
return res(ctx.status(200), ctx.json(M.MockBuildInfo))

View File

@ -4,11 +4,17 @@ import { assign, createMachine } from "xstate"
import {
deleteTemplate,
getTemplateByName,
getTemplateDAUs,
getTemplateVersion,
getTemplateVersionResources,
getTemplateVersions,
} from "../../api/api"
import { Template, TemplateVersion, WorkspaceResource } from "../../api/typesGenerated"
import {
Template,
TemplateDAUsResponse,
TemplateVersion,
WorkspaceResource,
} from "../../api/typesGenerated"
interface TemplateContext {
organizationId: string
@ -17,6 +23,7 @@ interface TemplateContext {
activeTemplateVersion?: TemplateVersion
templateResources?: WorkspaceResource[]
templateVersions?: TemplateVersion[]
templateDAUs: TemplateDAUsResponse
deleteTemplateError?: Error | unknown
}
@ -46,6 +53,9 @@ export const templateMachine =
deleteTemplate: {
data: Template
}
getTemplateDAUs: {
data: TemplateDAUsResponse
}
},
},
id: "(machine)",
@ -122,6 +132,25 @@ export const templateMachine =
},
},
},
templateDAUs: {
initial: "gettingTemplateDAUs",
states: {
gettingTemplateDAUs: {
invoke: {
src: "getTemplateDAUs",
onDone: [
{
actions: "assignTemplateDAUs",
target: "success",
},
],
},
},
success: {
type: "final",
},
},
},
},
onDone: {
target: "loaded",
@ -133,6 +162,9 @@ export const templateMachine =
target: "confirmingDelete",
},
},
onDone: {
target: "loaded",
},
},
confirmingDelete: {
on: {
@ -198,6 +230,12 @@ export const templateMachine =
}
return deleteTemplate(ctx.template.id)
},
getTemplateDAUs: (ctx) => {
if (!ctx.template) {
throw new Error("Template not loaded")
}
return getTemplateDAUs(ctx.template.id)
},
},
actions: {
assignTemplate: assign({
@ -212,6 +250,9 @@ export const templateMachine =
assignTemplateVersions: assign({
templateVersions: (_, event) => event.data,
}),
assignTemplateDAUs: assign({
templateDAUs: (_, event) => event.data,
}),
assignDeleteTemplateError: assign({
deleteTemplateError: (_, event) => event.data,
}),

File diff suppressed because it is too large Load Diff

View File

@ -214,15 +214,16 @@ type Conn struct {
closed chan struct{}
logger slog.Logger
dialer *tsdial.Dialer
tunDevice *tstun.Wrapper
netMap *netmap.NetworkMap
netStack *netstack.Impl
magicConn *magicsock.Conn
wireguardMonitor *monitor.Mon
wireguardRouter *router.Config
wireguardEngine wgengine.Engine
listeners map[listenKey]*listener
dialer *tsdial.Dialer
tunDevice *tstun.Wrapper
netMap *netmap.NetworkMap
netStack *netstack.Impl
magicConn *magicsock.Conn
wireguardMonitor *monitor.Mon
wireguardRouter *router.Config
wireguardEngine wgengine.Engine
listeners map[listenKey]*listener
forwardTCPCallback func(conn net.Conn, listenerExists bool) net.Conn
lastMutex sync.Mutex
// It's only possible to store these values via status functions,
@ -232,6 +233,17 @@ type Conn struct {
lastDERPLatency map[string]float64
}
// SetForwardTCPCallback is called every time a TCP connection is initiated inbound.
// listenerExists is true if a listener is registered for the target port. If there
// isn't one, traffic is forwarded to the local listening port.
//
// This allows wrapping a Conn to track reads and writes.
func (c *Conn) SetForwardTCPCallback(callback func(conn net.Conn, listenerExists bool) net.Conn) {
c.mutex.Lock()
defer c.mutex.Unlock()
c.forwardTCPCallback = callback
}
// SetNodeCallback is triggered when a network change occurs and peer
// renegotiation may be required. Clients should constantly be emitting
// node changes.
@ -411,6 +423,9 @@ func (c *Conn) DialContextUDP(ctx context.Context, ipp netip.AddrPort) (*gonet.U
func (c *Conn) forwardTCP(conn net.Conn, port uint16) {
c.mutex.Lock()
ln, ok := c.listeners[listenKey{"tcp", "", fmt.Sprint(port)}]
if c.forwardTCPCallback != nil {
conn = c.forwardTCPCallback(conn, ok)
}
c.mutex.Unlock()
if !ok {
c.forwardTCPToLocal(conn, port)

View File

@ -1,4 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1