Compare commits

...

13 Commits

Author SHA1 Message Date
Michael Brewer 47c2f16cba
Merge branch 'coder:main' into feat/coder-login-secret 2024-04-27 08:12:13 -07:00
Colin Adler 15157c1c40
chore: add network integration test suite scaffolding (#13072)
* chore: add network integration test suite scaffolding

* dean comments
2024-04-26 17:48:41 +00:00
Cian Johnston 73ba36c9d2
chore(docs): add note regarding Apr 26 scaletest (#13085) 2024-04-26 17:06:36 +01:00
Garrett Delfosse 8ba05a9052
feat: add switch http(s) button to error page (#12942) 2024-04-26 11:52:53 -04:00
Michael Brewer 848ea7e9f1
chore: correct name for github enterprise example (#13083)
Co-authored-by: Muhammad Atif Ali <me@matifali.dev>
2024-04-26 14:43:28 +00:00
Cian Johnston f1ef9fd673
chore(docs): add note regarding vcredist for embedded postgres (#13020) 2024-04-26 10:56:43 +01:00
Mathias Fredriksson d50a31ef62
chore(scripts): auto create autoversion PR from release script (#13074)
Ref #12465
2024-04-26 12:53:22 +03:00
Cian Johnston 365231b1e5
fix(cli): scaletest: ignore errors syncing output (#13076) 2024-04-26 09:18:33 +01:00
Kayla Washburn-Love 74f27719b8
feat: specify a custom "terms of service" link (#13068) 2024-04-25 16:36:51 -06:00
Stephen Kirby 341114a020
chore(docs): remove max_ttl docs (#13077)
* removed MAX_TTL docs, updated template-level scheduling controls

* fmt
2024-04-25 16:13:42 -05:00
Cian Johnston 99dda4a43a
fix(agent): keep track of lastReportIndex between invocations of reportLifecycle() (#13075) 2024-04-25 16:54:51 +01:00
Mathias Fredriksson c24b562199
chore(scripts): fix release tagging sanity checks (#13073) 2024-04-25 12:26:37 +03:00
Mathias Fredriksson 46dced9cfe
chore(scripts): add release autoversion to bump releases in docs (#13063)
This PR adds a command to bump versions in docs/markdown.

This is still standalone and needs to be wired up.

For now, I'm planning on putting this in `scripts/release.sh` (checkout main -> autoversion (this command) -> commit -> submit PR).

It would be pretty neat to make it a GH actions that's triggered on release though, something for the future.

Part of #12465
2024-04-25 12:11:55 +03:00
45 changed files with 1081 additions and 121 deletions

View File

@ -200,7 +200,8 @@ endef
# calling this manually.
$(CODER_ALL_BINARIES): go.mod go.sum \
$(GO_SRC_FILES) \
$(shell find ./examples/templates)
$(shell find ./examples/templates) \
site/static/error.html
$(get-mode-os-arch-ext)
if [[ "$$os" != "windows" ]] && [[ "$$ext" != "" ]]; then

View File

@ -240,10 +240,11 @@ type agent struct {
sshServer *agentssh.Server
sshMaxTimeout time.Duration
lifecycleUpdate chan struct{}
lifecycleReported chan codersdk.WorkspaceAgentLifecycle
lifecycleMu sync.RWMutex // Protects following.
lifecycleStates []agentsdk.PostLifecycleRequest
lifecycleUpdate chan struct{}
lifecycleReported chan codersdk.WorkspaceAgentLifecycle
lifecycleMu sync.RWMutex // Protects following.
lifecycleStates []agentsdk.PostLifecycleRequest
lifecycleLastReportedIndex int // Keeps track of the last lifecycle state we successfully reported.
network *tailnet.Conn
addresses []netip.Prefix
@ -625,7 +626,6 @@ func (a *agent) reportMetadata(ctx context.Context, conn drpc.Conn) error {
// changes are reported in order.
func (a *agent) reportLifecycle(ctx context.Context, conn drpc.Conn) error {
aAPI := proto.NewDRPCAgentClient(conn)
lastReportedIndex := 0 // Start off with the created state without reporting it.
for {
select {
case <-a.lifecycleUpdate:
@ -636,20 +636,20 @@ func (a *agent) reportLifecycle(ctx context.Context, conn drpc.Conn) error {
for {
a.lifecycleMu.RLock()
lastIndex := len(a.lifecycleStates) - 1
report := a.lifecycleStates[lastReportedIndex]
if len(a.lifecycleStates) > lastReportedIndex+1 {
report = a.lifecycleStates[lastReportedIndex+1]
report := a.lifecycleStates[a.lifecycleLastReportedIndex]
if len(a.lifecycleStates) > a.lifecycleLastReportedIndex+1 {
report = a.lifecycleStates[a.lifecycleLastReportedIndex+1]
}
a.lifecycleMu.RUnlock()
if lastIndex == lastReportedIndex {
if lastIndex == a.lifecycleLastReportedIndex {
break
}
l, err := agentsdk.ProtoFromLifecycle(report)
if err != nil {
a.logger.Critical(ctx, "failed to convert lifecycle state", slog.F("report", report))
// Skip this report; there is no point retrying. Maybe we can successfully convert the next one?
lastReportedIndex++
a.lifecycleLastReportedIndex++
continue
}
payload := &proto.UpdateLifecycleRequest{Lifecycle: l}
@ -662,13 +662,13 @@ func (a *agent) reportLifecycle(ctx context.Context, conn drpc.Conn) error {
}
logger.Debug(ctx, "successfully reported lifecycle state")
lastReportedIndex++
a.lifecycleLastReportedIndex++
select {
case a.lifecycleReported <- report.State:
case <-a.lifecycleReported:
a.lifecycleReported <- report.State
}
if lastReportedIndex < lastIndex {
if a.lifecycleLastReportedIndex < lastIndex {
// Keep reporting until we've sent all messages, we can't
// rely on the channel triggering us before the backlog is
// consumed.

View File

@ -14,7 +14,6 @@ import (
"strconv"
"strings"
"sync"
"syscall"
"time"
"github.com/google/uuid"
@ -245,14 +244,8 @@ func (o *scaleTestOutput) write(res harness.Results, stdout io.Writer) error {
// Sync the file to disk if it's a file.
if s, ok := w.(interface{ Sync() error }); ok {
err := s.Sync()
// On Linux, EINVAL is returned when calling fsync on /dev/stdout. We
// can safely ignore this error.
// On macOS, ENOTTY is returned when calling sync on /dev/stdout. We
// can safely ignore this error.
if err != nil && !xerrors.Is(err, syscall.EINVAL) && !xerrors.Is(err, syscall.ENOTTY) {
return xerrors.Errorf("flush output file: %w", err)
}
// Best effort. If we get an error from syncing, just ignore it.
_ = s.Sync()
}
if c != nil {

View File

@ -60,6 +60,10 @@ OPTIONS:
--support-links struct[[]codersdk.LinkConfig], $CODER_SUPPORT_LINKS
Support links to display in the top right drop down menu.
--terms-of-service-url string, $CODER_TERMS_OF_SERVICE_URL
A URL to an external Terms of Service that must be accepted by users
when logging in.
--update-check bool, $CODER_UPDATE_CHECK (default: false)
Periodically check for new releases of Coder and inform the owner. The
check is performed once per day.

View File

@ -414,6 +414,10 @@ inMemoryDatabase: false
# Type of auth to use when connecting to postgres.
# (default: password, type: enum[password\|awsiamrds])
pgAuth: password
# A URL to an external Terms of Service that must be accepted by users when
# logging in.
# (default: <unset>, type: string)
termsOfServiceURL: ""
# The algorithm to use for generating ssh keys. Accepted values are "ed25519",
# "ecdsa", or "rsa4096".
# (default: ed25519, type: string)

6
coderd/apidoc/docs.go generated
View File

@ -8446,6 +8446,9 @@ const docTemplate = `{
},
"password": {
"$ref": "#/definitions/codersdk.AuthMethod"
},
"terms_of_service_url": {
"type": "string"
}
}
},
@ -9408,6 +9411,9 @@ const docTemplate = `{
"telemetry": {
"$ref": "#/definitions/codersdk.TelemetryConfig"
},
"terms_of_service_url": {
"type": "string"
},
"tls": {
"$ref": "#/definitions/codersdk.TLSConfig"
},

View File

@ -7515,6 +7515,9 @@
},
"password": {
"$ref": "#/definitions/codersdk.AuthMethod"
},
"terms_of_service_url": {
"type": "string"
}
}
},
@ -8413,6 +8416,9 @@
"telemetry": {
"$ref": "#/definitions/codersdk.TelemetryConfig"
},
"terms_of_service_url": {
"type": "string"
},
"tls": {
"$ref": "#/definitions/codersdk.TLSConfig"
},

View File

@ -4,11 +4,14 @@ import (
"bufio"
"context"
"crypto/tls"
"errors"
"fmt"
"net"
"net/http"
"net/http/httputil"
"net/netip"
"net/url"
"strings"
"sync"
"sync/atomic"
"time"
@ -23,6 +26,7 @@ import (
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/tracing"
"github.com/coder/coder/v2/coderd/workspaceapps"
"github.com/coder/coder/v2/coderd/workspaceapps/appurl"
"github.com/coder/coder/v2/codersdk/workspacesdk"
"github.com/coder/coder/v2/site"
"github.com/coder/coder/v2/tailnet"
@ -341,7 +345,7 @@ type ServerTailnet struct {
totalConns *prometheus.CounterVec
}
func (s *ServerTailnet) ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID) *httputil.ReverseProxy {
func (s *ServerTailnet) ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID, app appurl.ApplicationURL, wildcardHostname string) *httputil.ReverseProxy {
// Rewrite the targetURL's Host to point to the agent's IP. This is
// necessary because due to TCP connection caching, each agent needs to be
// addressed invidivually. Otherwise, all connections get dialed as
@ -351,13 +355,46 @@ func (s *ServerTailnet) ReverseProxy(targetURL, dashboardURL *url.URL, agentID u
tgt.Host = net.JoinHostPort(tailnet.IPFromUUID(agentID).String(), port)
proxy := httputil.NewSingleHostReverseProxy(&tgt)
proxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {
proxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, theErr error) {
var (
desc = "Failed to proxy request to application: " + theErr.Error()
additionalInfo = ""
additionalButtonLink = ""
additionalButtonText = ""
)
var tlsError tls.RecordHeaderError
if (errors.As(theErr, &tlsError) && tlsError.Msg == "first record does not look like a TLS handshake") ||
errors.Is(theErr, http.ErrSchemeMismatch) {
// If the error is due to an HTTP/HTTPS mismatch, we can provide a
// more helpful error message with redirect buttons.
switchURL := url.URL{
Scheme: dashboardURL.Scheme,
}
_, protocol, isPort := app.PortInfo()
if isPort {
targetProtocol := "https"
if protocol == "https" {
targetProtocol = "http"
}
app = app.ChangePortProtocol(targetProtocol)
switchURL.Host = fmt.Sprintf("%s%s", app.String(), strings.TrimPrefix(wildcardHostname, "*"))
additionalButtonLink = switchURL.String()
additionalButtonText = fmt.Sprintf("Switch to %s", strings.ToUpper(targetProtocol))
additionalInfo += fmt.Sprintf("This error seems to be due to an app protocol mismatch, try switching to %s.", strings.ToUpper(targetProtocol))
}
}
site.RenderStaticErrorPage(w, r, site.ErrorPageData{
Status: http.StatusBadGateway,
Title: "Bad Gateway",
Description: "Failed to proxy request to application: " + err.Error(),
RetryEnabled: true,
DashboardURL: dashboardURL.String(),
Status: http.StatusBadGateway,
Title: "Bad Gateway",
Description: desc,
RetryEnabled: true,
DashboardURL: dashboardURL.String(),
AdditionalInfo: additionalInfo,
AdditionalButtonLink: additionalButtonLink,
AdditionalButtonText: additionalButtonText,
})
}
proxy.Director = s.director(agentID, proxy.Director)

View File

@ -26,6 +26,7 @@ import (
"github.com/coder/coder/v2/agent/agenttest"
"github.com/coder/coder/v2/agent/proto"
"github.com/coder/coder/v2/coderd"
"github.com/coder/coder/v2/coderd/workspaceapps/appurl"
"github.com/coder/coder/v2/codersdk/agentsdk"
"github.com/coder/coder/v2/codersdk/workspacesdk"
"github.com/coder/coder/v2/tailnet"
@ -81,7 +82,7 @@ func TestServerTailnet_ReverseProxy_ProxyEnv(t *testing.T) {
u, err := url.Parse(fmt.Sprintf("http://127.0.0.1:%d", workspacesdk.AgentHTTPAPIServerPort))
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(
@ -112,7 +113,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
u, err := url.Parse(fmt.Sprintf("http://127.0.0.1:%d", workspacesdk.AgentHTTPAPIServerPort))
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(
@ -143,7 +144,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
u, err := url.Parse(fmt.Sprintf("http://127.0.0.1:%d", workspacesdk.AgentHTTPAPIServerPort))
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(
@ -177,7 +178,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
u, err := url.Parse(fmt.Sprintf("http://127.0.0.1:%d", workspacesdk.AgentHTTPAPIServerPort))
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
require.NoError(t, err)
@ -222,7 +223,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
u, err := url.Parse("http://127.0.0.1" + port)
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
for i := 0; i < 5; i++ {
rw := httptest.NewRecorder()
@ -279,7 +280,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
require.NoError(t, err)
for i, ag := range agents {
rp := serverTailnet.ReverseProxy(u, u, ag.id)
rp := serverTailnet.ReverseProxy(u, u, ag.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(
@ -317,7 +318,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
uri, err := url.Parse(s.URL)
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(uri, uri, a.id)
rp := serverTailnet.ReverseProxy(uri, uri, a.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(
@ -347,7 +348,7 @@ func TestServerTailnet_ReverseProxy(t *testing.T) {
u, err := url.Parse(fmt.Sprintf("http://127.0.0.1:%d", workspacesdk.AgentHTTPAPIServerPort))
require.NoError(t, err)
rp := serverTailnet.ReverseProxy(u, u, a.id)
rp := serverTailnet.ReverseProxy(u, u, a.id, appurl.ApplicationURL{}, "")
rw := httptest.NewRecorder()
req := httptest.NewRequest(

View File

@ -472,6 +472,7 @@ func (api *API) userAuthMethods(rw http.ResponseWriter, r *http.Request) {
}
httpapi.Write(r.Context(), rw, http.StatusOK, codersdk.AuthMethods{
TermsOfServiceURL: api.DeploymentValues.TermsOfServiceURL.Value(),
Password: codersdk.AuthMethod{
Enabled: !api.DeploymentValues.DisablePasswordAuth.Value(),
},

View File

@ -5,6 +5,7 @@ import (
"net"
"net/url"
"regexp"
"strconv"
"strings"
"golang.org/x/xerrors"
@ -83,6 +84,55 @@ func (a ApplicationURL) Path() string {
return fmt.Sprintf("/@%s/%s.%s/apps/%s", a.Username, a.WorkspaceName, a.AgentName, a.AppSlugOrPort)
}
// PortInfo returns the port, protocol, and whether the AppSlugOrPort is a port or not.
func (a ApplicationURL) PortInfo() (uint, string, bool) {
var (
port uint64
protocol string
isPort bool
err error
)
if strings.HasSuffix(a.AppSlugOrPort, "s") {
trimmed := strings.TrimSuffix(a.AppSlugOrPort, "s")
port, err = strconv.ParseUint(trimmed, 10, 16)
if err == nil {
protocol = "https"
isPort = true
}
} else {
port, err = strconv.ParseUint(a.AppSlugOrPort, 10, 16)
if err == nil {
protocol = "http"
isPort = true
}
}
return uint(port), protocol, isPort
}
func (a *ApplicationURL) ChangePortProtocol(target string) ApplicationURL {
newAppURL := *a
port, protocol, isPort := a.PortInfo()
if !isPort {
return newAppURL
}
if target == protocol {
return newAppURL
}
if target == "https" {
newAppURL.AppSlugOrPort = fmt.Sprintf("%ds", port)
}
if target == "http" {
newAppURL.AppSlugOrPort = fmt.Sprintf("%d", port)
}
return newAppURL
}
// ParseSubdomainAppURL parses an ApplicationURL from the given subdomain. If
// the subdomain is not a valid application URL hostname, returns a non-nil
// error. If the hostname is not a subdomain of the given base hostname, returns

View File

@ -124,6 +124,16 @@ func TestParseSubdomainAppURL(t *testing.T) {
Username: "user",
},
},
{
Name: "Port--Agent--Workspace--User",
Subdomain: "8080s--agent--workspace--user",
Expected: appurl.ApplicationURL{
AppSlugOrPort: "8080s",
AgentName: "agent",
WorkspaceName: "workspace",
Username: "user",
},
},
{
Name: "HyphenatedNames",
Subdomain: "app-slug--agent-name--workspace-name--user-name",

View File

@ -66,7 +66,7 @@ var nonCanonicalHeaders = map[string]string{
type AgentProvider interface {
// ReverseProxy returns an httputil.ReverseProxy for proxying HTTP requests
// to the specified agent.
ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID) *httputil.ReverseProxy
ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID, app appurl.ApplicationURL, wildcardHost string) *httputil.ReverseProxy
// AgentConn returns a new connection to the specified agent.
AgentConn(ctx context.Context, agentID uuid.UUID) (_ *workspacesdk.AgentConn, release func(), _ error)
@ -314,7 +314,7 @@ func (s *Server) workspaceAppsProxyPath(rw http.ResponseWriter, r *http.Request)
return
}
s.proxyWorkspaceApp(rw, r, *token, chiPath)
s.proxyWorkspaceApp(rw, r, *token, chiPath, appurl.ApplicationURL{})
}
// HandleSubdomain handles subdomain-based application proxy requests (aka.
@ -417,7 +417,7 @@ func (s *Server) HandleSubdomain(middlewares ...func(http.Handler) http.Handler)
if !ok {
return
}
s.proxyWorkspaceApp(rw, r, *token, r.URL.Path)
s.proxyWorkspaceApp(rw, r, *token, r.URL.Path, app)
})).ServeHTTP(rw, r.WithContext(ctx))
})
}
@ -476,7 +476,7 @@ func (s *Server) parseHostname(rw http.ResponseWriter, r *http.Request, next htt
return app, true
}
func (s *Server) proxyWorkspaceApp(rw http.ResponseWriter, r *http.Request, appToken SignedToken, path string) {
func (s *Server) proxyWorkspaceApp(rw http.ResponseWriter, r *http.Request, appToken SignedToken, path string, app appurl.ApplicationURL) {
ctx := r.Context()
// Filter IP headers from untrusted origins.
@ -545,8 +545,12 @@ func (s *Server) proxyWorkspaceApp(rw http.ResponseWriter, r *http.Request, appT
r.URL.Path = path
appURL.RawQuery = ""
_, protocol, isPort := app.PortInfo()
if isPort {
appURL.Scheme = protocol
}
proxy := s.AgentProvider.ReverseProxy(appURL, s.DashboardURL, appToken.AgentID)
proxy := s.AgentProvider.ReverseProxy(appURL, s.DashboardURL, appToken.AgentID, app, s.Hostname)
proxy.ModifyResponse = func(r *http.Response) error {
r.Header.Del(httpmw.AccessControlAllowOriginHeader)

View File

@ -200,6 +200,7 @@ type DeploymentValues struct {
AllowWorkspaceRenames serpent.Bool `json:"allow_workspace_renames,omitempty" typescript:",notnull"`
Healthcheck HealthcheckConfig `json:"healthcheck,omitempty" typescript:",notnull"`
CLIUpgradeMessage serpent.String `json:"cli_upgrade_message,omitempty" typescript:",notnull"`
TermsOfServiceURL serpent.String `json:"terms_of_service_url,omitempty" typescript:",notnull"`
Config serpent.YAMLConfigPath `json:"config,omitempty" typescript:",notnull"`
WriteConfig serpent.Bool `json:"write_config,omitempty" typescript:",notnull"`
@ -1683,6 +1684,14 @@ when required by your organization's security policy.`,
YAML: "secureAuthCookie",
Annotations: serpent.Annotations{}.Mark(annotationExternalProxies, "true"),
},
{
Name: "Terms of Service URL",
Description: "A URL to an external Terms of Service that must be accepted by users when logging in.",
Flag: "terms-of-service-url",
Env: "CODER_TERMS_OF_SERVICE_URL",
YAML: "termsOfServiceURL",
Value: &c.TermsOfServiceURL,
},
{
Name: "Strict-Transport-Security",
Description: "Controls if the 'Strict-Transport-Security' header is set on all static file responses. " +

View File

@ -209,9 +209,10 @@ type CreateOrganizationRequest struct {
// AuthMethods contains authentication method information like whether they are enabled or not or custom text, etc.
type AuthMethods struct {
Password AuthMethod `json:"password"`
Github AuthMethod `json:"github"`
OIDC OIDCAuthMethod `json:"oidc"`
TermsOfServiceURL string `json:"terms_of_service_url,omitempty"`
Password AuthMethod `json:"password"`
Github AuthMethod `json:"github"`
OIDC OIDCAuthMethod `json:"oidc"`
}
type AuthMethod struct {

View File

@ -89,7 +89,7 @@ GitHub Enterprise requires the following environment variables:
```env
CODER_EXTERNAL_AUTH_0_ID="primary-github"
CODER_EXTERNAL_AUTH_0_TYPE=github-enterprise
CODER_EXTERNAL_AUTH_0_TYPE=github
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
CODER_EXTERNAL_AUTH_0_VALIDATE_URL="https://github.example.com/api/v3/user"
@ -102,8 +102,8 @@ CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://github.example.com/login/oauth/access_t
Bitbucket Server requires the following environment variables:
```env
CODER_EXTERNAL_AUTH_0_TYPE="bitbucket-server"
CODER_EXTERNAL_AUTH_0_ID=bitbucket
CODER_EXTERNAL_AUTH_0_ID="primary-bitbucket-server"
CODER_EXTERNAL_AUTH_0_TYPE=bitbucket-server
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxx
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxx
CODER_EXTERNAL_AUTH_0_AUTH_URL=https://bitbucket.domain.com/rest/oauth2/latest/authorize

View File

@ -21,6 +21,7 @@ Learn more about [Coders architecture](../about/architecture.md) and our
| Kubernetes (GKE) | 2 cores | 4 GB | 1 | db-custom-1-3840 | 500 | 20 | 500 simulated | `v0.27.2` | Jul 27, 2023 |
| Kubernetes (GKE) | 2 cores | 8 GB | 2 | db-custom-2-7680 | 1000 | 20 | 1000 simulated | `v2.2.1` | Oct 9, 2023 |
| Kubernetes (GKE) | 4 cores | 16 GB | 2 | db-custom-8-30720 | 2000 | 50 | 2000 simulated | `v2.8.4` | Feb 28, 2024 |
| Kubernetes (GKE) | 2 cores | 4 GB | 2 | db-custom-2-7680 | 1000 | 50 | 1000 simulated | `v2.10.2` | Apr 26, 2024 |
> Note: a simulated connection reads and writes random data at 40KB/s per
> connection.

1
docs/api/general.md generated
View File

@ -377,6 +377,7 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
"user": {}
}
},
"terms_of_service_url": "string",
"tls": {
"address": {
"host": "string",

17
docs/api/schemas.md generated
View File

@ -1040,17 +1040,19 @@
},
"password": {
"enabled": true
}
},
"terms_of_service_url": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
| ---------- | -------------------------------------------------- | -------- | ------------ | ----------- |
| `github` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
| `oidc` | [codersdk.OIDCAuthMethod](#codersdkoidcauthmethod) | false | | |
| `password` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
| Name | Type | Required | Restrictions | Description |
| ---------------------- | -------------------------------------------------- | -------- | ------------ | ----------- |
| `github` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
| `oidc` | [codersdk.OIDCAuthMethod](#codersdkoidcauthmethod) | false | | |
| `password` | [codersdk.AuthMethod](#codersdkauthmethod) | false | | |
| `terms_of_service_url` | string | false | | |
## codersdk.AuthorizationCheck
@ -2102,6 +2104,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
"user": {}
}
},
"terms_of_service_url": "string",
"tls": {
"address": {
"host": "string",
@ -2474,6 +2477,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
"user": {}
}
},
"terms_of_service_url": "string",
"tls": {
"address": {
"host": "string",
@ -2562,6 +2566,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
| `support` | [codersdk.SupportConfig](#codersdksupportconfig) | false | | |
| `swagger` | [codersdk.SwaggerConfig](#codersdkswaggerconfig) | false | | |
| `telemetry` | [codersdk.TelemetryConfig](#codersdktelemetryconfig) | false | | |
| `terms_of_service_url` | string | false | | |
| `tls` | [codersdk.TLSConfig](#codersdktlsconfig) | false | | |
| `trace` | [codersdk.TraceConfig](#codersdktraceconfig) | false | | |
| `update_check` | boolean | false | | |

3
docs/api/users.md generated
View File

@ -157,7 +157,8 @@ curl -X GET http://coder-server:8080/api/v2/users/authmethods \
},
"password": {
"enabled": true
}
},
"terms_of_service_url": "string"
}
```

10
docs/cli/server.md generated
View File

@ -928,6 +928,16 @@ Type of auth to use when connecting to postgres.
Controls if the 'Secure' property is set on browser session cookies.
### --terms-of-service-url
| | |
| ----------- | ---------------------------------------- |
| Type | <code>string</code> |
| Environment | <code>$CODER_TERMS_OF_SERVICE_URL</code> |
| YAML | <code>termsOfServiceURL</code> |
A URL to an external Terms of Service that must be accepted by users when logging in.
### --strict-transport-security
| | |

View File

@ -24,6 +24,11 @@ alternate installation methods (e.g. standalone binaries, system packages).
## Windows
> **Important:** If you plan to use the built-in PostgreSQL database, you will
> need to ensure that the
> [Visual C++ Runtime](https://learn.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist#latest-microsoft-visual-c-redistributable-version)
> is installed.
Use [GitHub releases](https://github.com/coder/coder/releases) to download the
Windows installer (`.msi`) or standalone binary (`.exe`).

View File

@ -128,6 +128,8 @@ locally in order to log in and manage templates.
For the **mainline** Coder release:
<!-- autoversion(mainline): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
@ -137,6 +139,8 @@ locally in order to log in and manage templates.
For the **stable** Coder release:
<!-- autoversion(stable): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \

View File

@ -9,10 +9,15 @@ You can also manage the lifecycle of failed or inactive workspaces.
Template [admins](../admin/users.md) may define these default values:
- **Default autostop**: How long a workspace runs without user activity before
Coder automatically stops it.
- **Max lifetime**: The maximum duration a workspace stays in a started state
before Coder forcibly stops it.
- [**Default autostop**](../workspaces.md#autostart-and-autostop): How long a
workspace runs without user activity before Coder automatically stops it.
- [**Autostop requirement**](../workspaces.md#autostop-requirement-enterprise):
Enforce mandatory workspace restarts to apply template updates regardless of
user activity.
- **Activity bump**: The duration of inactivity that must pass before a worksace
is automatically stopped.
- **Dormancy**: This allows automatic deletion of unused workspaces to reduce
spend on idle resources.
## Allow users scheduling
@ -21,13 +26,13 @@ allow users to define their own autostart and autostop schedules. Admins can
restrict the days of the week a workspace should automatically start to help
manage infrastructure costs.
## Failure cleanup
## Failure cleanup (enterprise)
Failure cleanup defines how long a workspace is permitted to remain in the
failed state prior to being automatically stopped. Failure cleanup is an
enterprise-only feature.
## Dormancy threshold
## Dormancy threshold (enterprise)
Dormancy Threshold defines how long Coder allows a workspace to remain inactive
before being moved into a dormant state. A workspace's inactivity is determined
@ -37,7 +42,7 @@ the user before being accessible. Coder stops workspaces during their transition
to the dormant state if they are detected to be running. Dormancy Threshold is
an enterprise-only feature.
## Dormancy auto-deletion
## Dormancy auto-deletion (enterprise)
Dormancy Auto-Deletion allows a template admin to dictate how long a workspace
is permitted to remain dormant before it is automatically deleted. Dormancy

View File

@ -74,18 +74,6 @@ coder_app.
![Autostop UI](./images/autostop.png)
### Max lifetime (Deprecated, Enterprise)
Max lifetime is a template setting that determines the number of hours a
workspace will run before Coder automatically stops it, regardless of any active
connections. Use this setting to ensure that workspaces do not run in perpetuity
when connections are left open inadvertently.
Max lifetime is deprecated in favor of template autostop requirements. Templates
can choose to use a max lifetime or an autostop requirement during the
deprecation period, but only one can be used at a time. Coder recommends using
autostop requirements instead as they avoid restarts during work hours.
### Autostop requirement (enterprise)
Autostop requirement is a template setting that determines how often workspaces

View File

@ -61,6 +61,10 @@ OPTIONS:
--support-links struct[[]codersdk.LinkConfig], $CODER_SUPPORT_LINKS
Support links to display in the top right drop down menu.
--terms-of-service-url string, $CODER_TERMS_OF_SERVICE_URL
A URL to an external Terms of Service that must be accepted by users
when logging in.
--update-check bool, $CODER_UPDATE_CHECK (default: false)
Periodically check for new releases of Coder and inform the owner. The
check is performed once per day.

View File

@ -53,6 +53,10 @@ script_check=1
mainline=1
channel=mainline
# These values will be used for any PRs created.
pr_review_assignee=${CODER_RELEASE_PR_REVIEW_ASSIGNEE:-@me}
pr_review_reviewer=${CODER_RELEASE_PR_REVIEW_REVIEWER:-bpmct,stirby}
args="$(getopt -o h -l dry-run,help,ref:,mainline,stable,major,minor,patch,force,ignore-script-out-of-date -- "$@")"
eval set -- "$args"
while true; do
@ -139,9 +143,9 @@ fi
log "Fetching ${branch} and tags from ${remote}..."
git fetch --quiet --tags "${remote}" "$branch"
# Resolve to the latest ref on origin/main unless otherwise specified.
ref_name=${ref:-${remote}/${branch}}
ref=$(git rev-parse --short "${ref_name}")
# Resolve to the current commit unless otherwise specified.
ref_name=${ref:-HEAD}
ref=$(git rev-parse "${ref_name}")
# Make sure that we're running the latest release script.
script_diff=$(git diff --name-status "${remote}/${branch}" -- scripts/release.sh)
@ -149,7 +153,7 @@ if [[ ${script_check} = 1 ]] && [[ -n ${script_diff} ]]; then
error "Release script is out-of-date. Please check out the latest version and try again."
fi
# Make sure no other release contains this ref.
# Make sure no other remote release contains this ref.
release_contains_ref="$(git branch --remotes --contains "${ref}" --list "${remote}/release/*" --format='%(refname)')"
if [[ -n ${release_contains_ref} ]]; then
error "Ref ${ref_name} is already part of another release: $(git describe --always "${ref}") on ${release_contains_ref#"refs/remotes/${remote}/"}."
@ -180,7 +184,7 @@ source "$SCRIPT_DIR/release/check_commit_metadata.sh" "$old_version" "$ref"
trap - EXIT
log
tag_version_args=(--old-version "$old_version" --ref "$ref" --"$increment")
tag_version_args=(--old-version "$old_version" --ref "$ref_name" --"$increment")
if ((force == 1)); then
tag_version_args+=(--force)
fi
@ -294,7 +298,7 @@ log "Release tags for ${new_version} created successfully and pushed to ${remote
log
# Write to a tmp file for ease of debugging.
release_json_file=$(mktemp -t coder-release.json)
release_json_file=$(mktemp -t coder-release.json.XXXXXX)
log "Writing release JSON to ${release_json_file}"
jq -n \
--argjson dry_run "${dry_run}" \
@ -310,6 +314,49 @@ maybedryrun "${dry_run}" cat "${release_json_file}" |
log
log "Release workflow started successfully!"
log
log "Would you like for me to create a pull request for you to automatically bump the version numbers in the docs?"
while [[ ! ${create_pr:-} =~ ^[YyNn]$ ]]; do
read -p "Create PR? (y/n) " -n 1 -r create_pr
log
done
if [[ ${create_pr} =~ ^[Yy]$ ]]; then
pr_branch=autoversion/${new_version}
title="docs: bump ${channel} version to ${new_version}"
body="This PR was automatically created by the [release script](https://github.com/coder/coder/blob/main/scripts/release.sh).
Please review the changes and merge if they look good and the release is complete.
You can follow the release progress [here](https://github.com/coder/coder/actions/workflows/release.yaml) and view the published release [here](https://github.com/coder/coder/releases/tag/${new_version}) (once complete)."
log
log "Creating branch \"${pr_branch}\" and updating versions..."
create_pr_stash=0
if ! git diff --quiet --exit-code -- docs; then
maybedryrun "${dry_run}" git stash push --message "scripts/release.sh: autostash (autoversion)" -- docs
create_pr_stash=1
fi
maybedryrun "${dry_run}" git checkout -b "${pr_branch}" "${remote}/${branch}"
execrelative go run ./release autoversion --channel "${channel}" "${new_version}" --dry-run
maybedryrun "${dry_run}" git add docs
maybedryrun "${dry_run}" git commit -m "${title}"
# Return to previous branch.
maybedryrun "${dry_run}" git checkout -
if ((create_pr_stash)); then
maybedryrun "${dry_run}" git stash pop
fi
log "Creating pull request..."
maybedryrun "${dry_run}" gh pr create \
--assignee "${pr_review_assignee}" \
--reviewer "${pr_review_reviewer}" \
--base "${branch}" \
--head "${pr_branch}" \
--title "${title}" \
--body "${body}"
fi
if ((dry_run)); then
# We can't watch the release.yaml workflow if we're in dry-run mode.
exit 0

View File

@ -4,13 +4,18 @@ import (
"context"
"errors"
"fmt"
"io/fs"
"os"
"os/exec"
"path/filepath"
"regexp"
"slices"
"strings"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-github/v61/github"
"github.com/spf13/afero"
"golang.org/x/mod/semver"
"golang.org/x/xerrors"
@ -26,42 +31,89 @@ const (
)
func main() {
logger := slog.Make(sloghuman.Sink(os.Stderr)).Leveled(slog.LevelDebug)
// Pre-flight checks.
toplevel, err := run("git", "rev-parse", "--show-toplevel")
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
_, _ = fmt.Fprintf(os.Stderr, "NOTE: This command must be run in the coder/coder repository.\n")
os.Exit(1)
}
var ghToken string
var dryRun bool
if err = checkCoderRepo(toplevel); err != nil {
_, _ = fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
_, _ = fmt.Fprintf(os.Stderr, "NOTE: This command must be run in the coder/coder repository.\n")
os.Exit(1)
}
r := &releaseCommand{
fs: afero.NewBasePathFs(afero.NewOsFs(), toplevel),
logger: slog.Make(sloghuman.Sink(os.Stderr)).Leveled(slog.LevelInfo),
}
var channel string
cmd := serpent.Command{
Use: "release <subcommand>",
Short: "Prepare, create and publish releases.",
Options: serpent.OptionSet{
{
Flag: "debug",
Description: "Enable debug logging.",
Value: serpent.BoolOf(&r.debug),
},
{
Flag: "gh-token",
Description: "GitHub personal access token.",
Env: "GH_TOKEN",
Value: serpent.StringOf(&ghToken),
Value: serpent.StringOf(&r.ghToken),
},
{
Flag: "dry-run",
FlagShorthand: "n",
Description: "Do not make any changes, only print what would be done.",
Value: serpent.BoolOf(&dryRun),
Value: serpent.BoolOf(&r.dryRun),
},
},
Children: []*serpent.Command{
{
Use: "promote <version>",
Short: "Promote version to stable.",
Use: "promote <version>",
Short: "Promote version to stable.",
Middleware: r.debugMiddleware, // Serpent doesn't support this on parent.
Handler: func(inv *serpent.Invocation) error {
ctx := inv.Context()
if len(inv.Args) == 0 {
return xerrors.New("version argument missing")
}
if !dryRun && ghToken == "" {
if !r.dryRun && r.ghToken == "" {
return xerrors.New("GitHub personal access token is required, use --gh-token or GH_TOKEN")
}
err := promoteVersionToStable(ctx, inv, logger, ghToken, dryRun, inv.Args[0])
err := r.promoteVersionToStable(ctx, inv, inv.Args[0])
if err != nil {
return err
}
return nil
},
},
{
Use: "autoversion <version>",
Short: "Automatically update the provided channel to version in markdown files.",
Options: serpent.OptionSet{
{
Flag: "channel",
Description: "Channel to update.",
Value: serpent.EnumOf(&channel, "mainline", "stable"),
},
},
Middleware: r.debugMiddleware, // Serpent doesn't support this on parent.
Handler: func(inv *serpent.Invocation) error {
ctx := inv.Context()
if len(inv.Args) == 0 {
return xerrors.New("version argument missing")
}
err := r.autoversion(ctx, channel, inv.Args[0])
if err != nil {
return err
}
@ -72,24 +124,55 @@ func main() {
},
}
err := cmd.Invoke().WithOS().Run()
err = cmd.Invoke().WithOS().Run()
if err != nil {
if errors.Is(err, cliui.Canceled) {
os.Exit(1)
}
logger.Error(context.Background(), "release command failed", "err", err)
r.logger.Error(context.Background(), "release command failed", "err", err)
os.Exit(1)
}
}
func checkCoderRepo(path string) error {
remote, err := run("git", "-C", path, "remote", "get-url", "origin")
if err != nil {
return xerrors.Errorf("get remote failed: %w", err)
}
if !strings.Contains(remote, "github.com") || !strings.Contains(remote, "coder/coder") {
return xerrors.Errorf("origin is not set to the coder/coder repository on github.com")
}
return nil
}
type releaseCommand struct {
fs afero.Fs
logger slog.Logger
debug bool
ghToken string
dryRun bool
}
func (r *releaseCommand) debugMiddleware(next serpent.HandlerFunc) serpent.HandlerFunc {
return func(inv *serpent.Invocation) error {
if r.debug {
r.logger = r.logger.Leveled(slog.LevelDebug)
}
if r.dryRun {
r.logger = r.logger.With(slog.F("dry_run", true))
}
return next(inv)
}
}
//nolint:revive // Allow dryRun control flag.
func promoteVersionToStable(ctx context.Context, inv *serpent.Invocation, logger slog.Logger, ghToken string, dryRun bool, version string) error {
func (r *releaseCommand) promoteVersionToStable(ctx context.Context, inv *serpent.Invocation, version string) error {
client := github.NewClient(nil)
if ghToken != "" {
client = client.WithAuthToken(ghToken)
if r.ghToken != "" {
client = client.WithAuthToken(r.ghToken)
}
logger = logger.With(slog.F("dry_run", dryRun), slog.F("version", version))
logger := r.logger.With(slog.F("version", version))
logger.Info(ctx, "checking current stable release")
@ -161,7 +244,7 @@ func promoteVersionToStable(ctx context.Context, inv *serpent.Invocation, logger
updatedNewStable.Body = github.String(updatedBody)
updatedNewStable.Prerelease = github.Bool(false)
updatedNewStable.Draft = github.Bool(false)
if !dryRun {
if !r.dryRun {
_, _, err = client.Repositories.EditRelease(ctx, owner, repo, newStable.GetID(), newStable)
if err != nil {
return xerrors.Errorf("edit release failed: %w", err)
@ -221,3 +304,129 @@ func removeMainlineBlurb(body string) string {
return strings.Join(newBody, "\n")
}
// autoversion automatically updates the provided channel to version in markdown
// files.
func (r *releaseCommand) autoversion(ctx context.Context, channel, version string) error {
var files []string
// For now, scope this to docs, perhaps we include README.md in the future.
if err := afero.Walk(r.fs, "docs", func(path string, _ fs.FileInfo, err error) error {
if err != nil {
return err
}
if strings.EqualFold(filepath.Ext(path), ".md") {
files = append(files, path)
}
return nil
}); err != nil {
return xerrors.Errorf("walk failed: %w", err)
}
for _, file := range files {
err := r.autoversionFile(ctx, file, channel, version)
if err != nil {
return xerrors.Errorf("autoversion file failed: %w", err)
}
}
return nil
}
// autoversionMarkdownPragmaRe matches the autoversion pragma in markdown files.
//
// Example:
//
// <!-- autoversion(stable): "--version [version]" -->
//
// The channel is the first capture group and the match string is the second
// capture group. The string "[version]" is replaced with the new version.
var autoversionMarkdownPragmaRe = regexp.MustCompile(`<!-- ?autoversion\(([^)]+)\): ?"([^"]+)" ?-->`)
func (r *releaseCommand) autoversionFile(ctx context.Context, file, channel, version string) error {
version = strings.TrimPrefix(version, "v")
logger := r.logger.With(slog.F("file", file), slog.F("channel", channel), slog.F("version", version))
logger.Debug(ctx, "checking file for autoversion pragma")
contents, err := afero.ReadFile(r.fs, file)
if err != nil {
return xerrors.Errorf("read file failed: %w", err)
}
lines := strings.Split(string(contents), "\n")
var matchRe *regexp.Regexp
for i, line := range lines {
if autoversionMarkdownPragmaRe.MatchString(line) {
matches := autoversionMarkdownPragmaRe.FindStringSubmatch(line)
matchChannel := matches[1]
match := matches[2]
logger := logger.With(slog.F("line_number", i+1), slog.F("match_channel", matchChannel), slog.F("match", match))
logger.Debug(ctx, "autoversion pragma detected")
if matchChannel != channel {
logger.Debug(ctx, "channel mismatch, skipping")
continue
}
logger.Info(ctx, "autoversion pragma found with channel match")
match = strings.Replace(match, "[version]", `(?P<version>[0-9]+\.[0-9]+\.[0-9]+)`, 1)
logger.Debug(ctx, "compiling match regexp", "match", match)
matchRe, err = regexp.Compile(match)
if err != nil {
return xerrors.Errorf("regexp compile failed: %w", err)
}
}
if matchRe != nil {
// Apply matchRe and find the group named "version", then replace it
// with the new version.
if match := matchRe.FindStringSubmatchIndex(line); match != nil {
vg := matchRe.SubexpIndex("version")
if vg == -1 {
logger.Error(ctx, "version group not found in match", "num_subexp", matchRe.NumSubexp(), "subexp_names", matchRe.SubexpNames(), "match", match)
return xerrors.Errorf("bug: version group not found in match")
}
start := match[vg*2]
end := match[vg*2+1]
logger.Info(ctx, "updating version number", "line_number", i+1, "match_start", start, "match_end", end, "old_version", line[start:end])
lines[i] = line[:start] + version + line[end:]
matchRe = nil
break
}
}
}
if matchRe != nil {
return xerrors.Errorf("match not found in file")
}
updated := strings.Join(lines, "\n")
// Only update the file if there are changes.
diff := cmp.Diff(string(contents), updated)
if diff == "" {
return nil
}
if !r.dryRun {
if err := afero.WriteFile(r.fs, file, []byte(updated), 0o644); err != nil {
return xerrors.Errorf("write file failed: %w", err)
}
logger.Info(ctx, "file autoversioned")
} else {
logger.Info(ctx, "dry-run: file not updated", "uncommitted_changes", diff)
}
return nil
}
func run(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
out, err := cmd.CombinedOutput()
if err != nil {
return "", xerrors.Errorf("command failed: %q: %w\n%s", fmt.Sprintf("%s %s", command, strings.Join(args, " ")), err, out)
}
return strings.TrimSpace(string(out)), nil
}

View File

@ -1,10 +1,14 @@
package main
import (
"context"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func Test_removeMainlineBlurb(t *testing.T) {
@ -115,7 +119,7 @@ Enjoy.
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
if diff := cmp.Diff(removeMainlineBlurb(tt.body), tt.want); diff != "" {
t.Errorf("removeMainlineBlurb() mismatch (-want +got):\n%s", diff)
require.Fail(t, "removeMainlineBlurb() mismatch (-want +got):\n%s", diff)
}
})
}
@ -131,6 +135,44 @@ func Test_addStableSince(t *testing.T) {
result := addStableSince(date, body)
if diff := cmp.Diff(expected, result); diff != "" {
t.Errorf("addStableSince() mismatch (-want +got):\n%s", diff)
require.Fail(t, "addStableSince() mismatch (-want +got):\n%s", diff)
}
}
func Test_release_autoversion(t *testing.T) {
t.Parallel()
ctx := context.Background()
dir := filepath.Join("testdata", "autoversion")
fs := afero.NewCopyOnWriteFs(afero.NewOsFs(), afero.NewMemMapFs())
r := releaseCommand{
fs: afero.NewBasePathFs(fs, dir),
}
err := r.autoversion(ctx, "mainline", "v2.11.1")
require.NoError(t, err)
err = r.autoversion(ctx, "stable", "v2.9.4")
require.NoError(t, err)
files, err := filepath.Glob(filepath.Join(dir, "docs", "*.md"))
require.NoError(t, err)
for _, file := range files {
file := file
t.Run(file, func(t *testing.T) {
t.Parallel()
got, err := afero.ReadFile(fs, file)
require.NoError(t, err)
want, err := afero.ReadFile(fs, file+".golden")
require.NoError(t, err)
if diff := cmp.Diff(string(got), string(want)); diff != "" {
require.Failf(t, "mismatch (-want +got):\n%s", diff)
}
})
}
}

View File

@ -72,6 +72,9 @@ done
# Check dependencies.
dependencies git
ref_name=${ref:-HEAD}
ref=$(git rev-parse "${ref_name}")
if [[ -z $increment ]]; then
error "No version increment provided."
fi
@ -79,8 +82,6 @@ fi
if [[ -z $old_version ]]; then
old_version="$(git describe --abbrev=0 "$ref^1" --always)"
fi
ref_name=${ref}
ref=$(git rev-parse --short "$ref")
# shellcheck source=scripts/release/check_commit_metadata.sh
source "$SCRIPT_DIR/check_commit_metadata.sh" "$old_version" "$ref"
@ -110,17 +111,19 @@ release_ff=0
case "$increment" in
patch)
release_branch="${release_branch_prefix}${version_parts[0]}.${version_parts[1]}"
branch_contains_ref=$(git branch --remotes --contains "${ref}" --list "*/${release_branch}" --format='%(refname)')
branch_contains_ref=$(git branch --contains "${ref}" --list "${release_branch}" --format='%(refname)')
if [[ -z $branch_contains_ref ]]; then
# Allow patch if we can fast-forward to ref, no need for dry-run here
# since we're not checking out the branch and deleting it afterwards.
git branch --no-track "${release_branch}-ff" "origin/${release_branch}"
if ! git merge --ff-only --into-name "${release_branch}-ff" "${ref}" >/dev/null 2>&1; then
git branch -D "${release_branch}-ff"
git branch --no-track "${release_branch}-ff" "${release_branch}"
# We're using git fetch here to perform a fast-forward on a
# non-checked-out branch. The "." uses the local repo as remote (faster).
if ! git fetch --quiet . "${ref}":"${release_branch}-ff"; then
git branch --quiet --delete --force "${release_branch}-ff"
error "Provided ref (${ref_name}) is not in the required release branch (${release_branch}) and cannot be fast-forwarded, unable to increment patch version. Please increment minor or major."
fi
git branch --quiet --delete --force "${release_branch}-ff"
release_ff=1
git branch -D "${release_branch}-ff"
fi
version_parts[2]=$((version_parts[2] + 1))
;;
@ -144,6 +147,12 @@ new_version="v${version_parts[0]}.${version_parts[1]}.${version_parts[2]}"
log "Old version: $old_version"
log "New version: $new_version"
log "Release branch: $release_branch"
tag_exists=$(git tag --list "$new_version")
if [[ -n ${tag_exists} ]]; then
error "Tag ${new_version} already exists."
fi
if [[ ${increment} = patch ]]; then
if ((release_ff == 1)); then
log "Fast-forwarding release branch"
@ -154,9 +163,38 @@ if [[ ${increment} = patch ]]; then
maybedryrun "$dry_run" git checkout "${release_branch}"
fi
else
log "Creating new release branch"
maybedryrun "$dry_run" git checkout -b "${release_branch}" "${ref}"
remote_branch_exists=$(git branch --remotes --list "*/${release_branch}" --format='%(refname)')
local_branch_exists=$(git branch --list "${release_branch}" --format='%(refname)')
if [[ -n ${remote_branch_exists} ]] || [[ -n ${local_branch_exists} ]]; then
if [[ ${prev_increment} == patch ]]; then
error "Release branch ${release_branch} already exists, impossible upgrade from \"${prev_increment}\" to \"${increment}\" detected. Please check your ref (${ref_name}) and that no incompatible commits were cherry-picked."
fi
fi
if [[ -n ${remote_branch_exists} ]]; then
error "Release branch ${release_branch} already exists on remote, please check your ref."
fi
if [[ -n ${local_branch_exists} ]]; then
# If it exists, ensure that this release branch points to the provided ref.
release_branch_ref=$(git rev-parse "${release_branch}")
if [[ ${release_branch_ref} != "${ref}" ]]; then
error "Local release branch ${release_branch} already exists, but does not point to the provided ref (${ref_name})."
fi
log "Using existing release branch"
maybedryrun "$dry_run" git checkout "${release_branch}"
else
log "Creating new release branch"
maybedryrun "$dry_run" git checkout -b "${release_branch}" "${ref}"
fi
fi
# Ensure the ref is in the release branch.
branch_contains_ref=$(git branch --contains "${ref}" --list "${release_branch}" --format='%(refname)')
if ((!dry_run)) && [[ -z $branch_contains_ref ]]; then
error "Provided ref (${ref_name}) is not in the required release branch (${release_branch})."
fi
maybedryrun "$dry_run" git tag -a "$new_version" -m "Release $new_version" "$ref"
echo "${release_branch} ${new_version}"

View File

@ -0,0 +1,25 @@
# Some documentation
1. Run the following command to install the chart in your cluster.
For the **mainline** Coder release:
<!-- autoversion(mainline): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.10.0
```
For the **stable** Coder release:
<!-- autoversion(stable): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.9.1
```

View File

@ -0,0 +1,25 @@
# Some documentation
1. Run the following command to install the chart in your cluster.
For the **mainline** Coder release:
<!-- autoversion(mainline): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.11.1
```
For the **stable** Coder release:
<!-- autoversion(stable): "--version [version]" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.9.4
```

View File

@ -0,0 +1,14 @@
# Some documentation
1. Run the following command to install the chart in your cluster.
For the **mainline** Coder release:
<!-- autoversion(mainline): "--version [version] # trailing comment!" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.10.0 # trailing comment!
```

View File

@ -0,0 +1,14 @@
# Some documentation
1. Run the following command to install the chart in your cluster.
For the **mainline** Coder release:
<!-- autoversion(mainline): "--version [version] # trailing comment!" -->
```shell
helm install coder coder-v2/coder \
--namespace coder \
--values values.yaml \
--version 2.11.1 # trailing comment!
```

View File

@ -786,12 +786,15 @@ func extractBin(dest string, r io.Reader) (numExtracted int, err error) {
type ErrorPageData struct {
Status int
// HideStatus will remove the status code from the page.
HideStatus bool
Title string
Description string
RetryEnabled bool
DashboardURL string
Warnings []string
HideStatus bool
Title string
Description string
RetryEnabled bool
DashboardURL string
Warnings []string
AdditionalInfo string
AdditionalButtonLink string
AdditionalButtonText string
RenderDescriptionMarkdown bool
}

View File

@ -124,6 +124,7 @@ export interface AuthMethod {
// From codersdk/users.go
export interface AuthMethods {
readonly terms_of_service_url?: string;
readonly password: AuthMethod;
readonly github: AuthMethod;
readonly oidc: OIDCAuthMethod;
@ -445,6 +446,7 @@ export interface DeploymentValues {
readonly allow_workspace_renames?: boolean;
readonly healthcheck?: HealthcheckConfig;
readonly cli_upgrade_message?: string;
readonly terms_of_service_url?: string;
readonly config?: string;
readonly write_config?: boolean;
readonly address?: string;

View File

@ -3,6 +3,8 @@ import {
MockAuthMethodsAll,
MockAuthMethodsExternal,
MockAuthMethodsPasswordOnly,
MockAuthMethodsPasswordTermsOfService,
MockBuildInfo,
mockApiError,
} from "testHelpers/entities";
import { LoginPageView } from "./LoginPageView";
@ -10,6 +12,9 @@ import { LoginPageView } from "./LoginPageView";
const meta: Meta<typeof LoginPageView> = {
title: "pages/LoginPage",
component: LoginPageView,
args: {
buildInfo: MockBuildInfo,
},
};
export default meta;
@ -33,6 +38,12 @@ export const WithAllAuthMethods: Story = {
},
};
export const WithTermsOfService: Story = {
args: {
authMethods: MockAuthMethodsPasswordTermsOfService,
},
};
export const AuthError: Story = {
args: {
error: mockApiError({
@ -53,6 +64,7 @@ export const ExternalAuthError: Story = {
export const LoadingAuthMethods: Story = {
args: {
isLoading: true,
authMethods: undefined,
},
};

View File

@ -1,5 +1,6 @@
import type { Interpolation, Theme } from "@emotion/react";
import type { FC } from "react";
import Button from "@mui/material/Button";
import { type FC, useState } from "react";
import { useLocation } from "react-router-dom";
import type { AuthMethods, BuildInfoResponse } from "api/typesGenerated";
import { CoderIcon } from "components/Icons/CoderIcon";
@ -7,6 +8,7 @@ import { Loader } from "components/Loader/Loader";
import { getApplicationName, getLogoURL } from "utils/appearance";
import { retrieveRedirect } from "utils/redirect";
import { SignInForm } from "./SignInForm";
import { TermsOfServiceLink } from "./TermsOfServiceLink";
export interface LoginPageViewProps {
authMethods: AuthMethods | undefined;
@ -49,12 +51,21 @@ export const LoginPageView: FC<LoginPageViewProps> = ({
<CoderIcon fill="white" opacity={1} css={styles.icon} />
);
const [tosAccepted, setTosAccepted] = useState(false);
const tosAcceptanceRequired =
authMethods?.terms_of_service_url && !tosAccepted;
return (
<div css={styles.root}>
<div css={styles.container}>
{applicationLogo}
{isLoading ? (
<Loader />
) : tosAcceptanceRequired ? (
<>
<TermsOfServiceLink url={authMethods.terms_of_service_url} />
<Button onClick={() => setTosAccepted(true)}>I agree</Button>
</>
) : (
<SignInForm
authMethods={authMethods}
@ -70,6 +81,12 @@ export const LoginPageView: FC<LoginPageViewProps> = ({
Copyright &copy; {new Date().getFullYear()} Coder Technologies, Inc.
</div>
<div>{buildInfo?.version}</div>
{tosAccepted && (
<TermsOfServiceLink
url={authMethods?.terms_of_service_url}
css={{ fontSize: 12 }}
/>
)}
</footer>
</div>
</div>

View File

@ -110,7 +110,7 @@ export const SignInForm: FC<SignInFormProps> = ({
{passwordEnabled && oAuthEnabled && (
<div css={styles.divider}>
<div css={styles.dividerLine} />
<div css={styles.dividerLabel}>Or</div>
<div css={styles.dividerLabel}>or</div>
<div css={styles.dividerLine} />
</div>
)}

View File

@ -0,0 +1,28 @@
import LaunchIcon from "@mui/icons-material/LaunchOutlined";
import Link from "@mui/material/Link";
import type { FC } from "react";
interface TermsOfServiceLinkProps {
className?: string;
url?: string;
}
export const TermsOfServiceLink: FC<TermsOfServiceLinkProps> = ({
className,
url,
}) => {
return (
<div css={{ paddingTop: 12, fontSize: 16 }} className={className}>
By continuing, you agree to the{" "}
<Link
css={{ fontWeight: 500, textWrap: "nowrap" }}
href={url}
target="_blank"
rel="noreferrer"
>
Terms of Service&nbsp;
<LaunchIcon css={{ fontSize: 12 }} />
</Link>
</div>
);
};

View File

@ -136,10 +136,7 @@ export const SingleSignOnSection: FC<SingleSignOnSectionProps> = ({
}) => {
const theme = useTheme();
const authList = Object.values(
authMethods,
) as (typeof authMethods)[keyof typeof authMethods][];
const noSsoEnabled = !authList.some((method) => method.enabled);
const noSsoEnabled = !authMethods.github.enabled && !authMethods.oidc.enabled;
return (
<>

View File

@ -1373,6 +1373,13 @@ export const MockAuthMethodsPasswordOnly: TypesGen.AuthMethods = {
oidc: { enabled: false, signInText: "", iconUrl: "" },
};
export const MockAuthMethodsPasswordTermsOfService: TypesGen.AuthMethods = {
terms_of_service_url: "https://www.youtube.com/watch?v=C2f37Vb2NAE",
password: { enabled: true },
github: { enabled: false },
oidc: { enabled: false, signInText: "", iconUrl: "" },
};
export const MockAuthMethodsExternal: TypesGen.AuthMethods = {
password: { enabled: false },
github: { enabled: true },

View File

@ -33,7 +33,7 @@ running). */}}
.container {
--side-padding: 24px;
width: 100%;
max-width: calc(320px + var(--side-padding) * 2);
max-width: calc(500px + var(--side-padding) * 2);
padding: 0 var(--side-padding);
text-align: center;
}
@ -170,6 +170,9 @@ running). */}}
{{- if .Error.RenderDescriptionMarkdown }} {{ .ErrorDescriptionHTML }} {{
else }}
<p>{{ .Error.Description }}</p>
{{ end }} {{- if .Error.AdditionalInfo }}
<br />
<p>{{ .Error.AdditionalInfo }}</p>
{{ end }} {{- if .Error.Warnings }}
<div class="warning">
<div class="warning-title">
@ -195,7 +198,11 @@ running). */}}
</div>
{{ end }}
<div class="button-group">
{{- if .Error.RetryEnabled }}
{{- if and .Error.AdditionalButtonText .Error.AdditionalButtonLink }}
<a href="{{ .Error.AdditionalButtonLink }}"
>{{ .Error.AdditionalButtonText }}</a
>
{{ end }} {{- if .Error.RetryEnabled }}
<button onclick="window.location.reload()">Retry</button>
{{ end }}
<a href="{{ .Error.DashboardURL }}">Back to site</a>

View File

@ -0,0 +1,128 @@
package integration
import (
"context"
"encoding/json"
"io"
"net/http"
"net/http/httptest"
"net/netip"
"strings"
"sync/atomic"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"golang.org/x/xerrors"
"nhooyr.io/websocket"
"tailscale.com/tailcfg"
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/tailnet"
"github.com/coder/coder/v2/testutil"
)
func NetworkSetupDefault(*testing.T) {}
func DERPMapTailscale(ctx context.Context, t *testing.T) *tailcfg.DERPMap {
ctx, cancel := context.WithTimeout(ctx, testutil.WaitShort)
defer cancel()
req, err := http.NewRequestWithContext(ctx, "GET", "https://controlplane.tailscale.com/derpmap/default", nil)
require.NoError(t, err)
res, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer res.Body.Close()
dm := &tailcfg.DERPMap{}
dec := json.NewDecoder(res.Body)
err = dec.Decode(dm)
require.NoError(t, err)
return dm
}
func CoordinatorInMemory(t *testing.T, logger slog.Logger, dm *tailcfg.DERPMap) (coord tailnet.Coordinator, url string) {
coord = tailnet.NewCoordinator(logger)
var coordPtr atomic.Pointer[tailnet.Coordinator]
coordPtr.Store(&coord)
t.Cleanup(func() { _ = coord.Close() })
csvc, err := tailnet.NewClientService(logger, &coordPtr, 10*time.Minute, func() *tailcfg.DERPMap {
return dm
})
require.NoError(t, err)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
idStr := strings.TrimPrefix(r.URL.Path, "/")
id, err := uuid.Parse(idStr)
if err != nil {
httpapi.Write(r.Context(), w, http.StatusBadRequest, codersdk.Response{
Message: "Bad agent id.",
Detail: err.Error(),
})
return
}
conn, err := websocket.Accept(w, r, nil)
if err != nil {
httpapi.Write(r.Context(), w, http.StatusBadRequest, codersdk.Response{
Message: "Failed to accept websocket.",
Detail: err.Error(),
})
return
}
ctx, wsNetConn := codersdk.WebsocketNetConn(r.Context(), conn, websocket.MessageBinary)
defer wsNetConn.Close()
err = csvc.ServeConnV2(ctx, wsNetConn, tailnet.StreamID{
Name: "client-" + id.String(),
ID: id,
Auth: tailnet.SingleTailnetCoordinateeAuth{},
})
if err != nil && !xerrors.Is(err, io.EOF) && !xerrors.Is(err, context.Canceled) {
_ = conn.Close(websocket.StatusInternalError, err.Error())
return
}
}))
t.Cleanup(srv.Close)
return coord, srv.URL
}
func TailnetSetupDRPC(ctx context.Context, t *testing.T, logger slog.Logger,
id, agentID uuid.UUID,
coordinateURL string,
dm *tailcfg.DERPMap,
) *tailnet.Conn {
ip := tailnet.IPFromUUID(id)
conn, err := tailnet.NewConn(&tailnet.Options{
Addresses: []netip.Prefix{netip.PrefixFrom(ip, 128)},
DERPMap: dm,
Logger: logger,
})
require.NoError(t, err)
t.Cleanup(func() { _ = conn.Close() })
//nolint:bodyclose
ws, _, err := websocket.Dial(ctx, coordinateURL+"/"+id.String(), nil)
require.NoError(t, err)
client, err := tailnet.NewDRPCClient(
websocket.NetConn(ctx, ws, websocket.MessageBinary),
logger,
)
require.NoError(t, err)
coord, err := client.Coordinate(ctx)
require.NoError(t, err)
coordination := tailnet.NewRemoteCoordination(logger, coord, conn, agentID)
t.Cleanup(func() { _ = coordination.Close() })
return conn
}

View File

@ -0,0 +1,194 @@
package integration
import (
"context"
"flag"
"fmt"
"os"
"os/exec"
"strconv"
"syscall"
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"tailscale.com/tailcfg"
"cdr.dev/slog"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/tailnet"
"github.com/coder/coder/v2/testutil"
)
var (
isChild = flag.Bool("child", false, "Run tests as a child")
childTestID = flag.Int("child-test-id", 0, "Which test is being run")
childCoordinateURL = flag.String("child-coordinate-url", "", "The coordinate url to connect back to")
childAgentID = flag.String("child-agent-id", "", "The agent id of the child")
)
func TestMain(m *testing.M) {
if run := os.Getenv("CODER_TAILNET_TESTS"); run == "" {
_, _ = fmt.Println("skipping tests...")
return
}
if os.Getuid() != 0 {
_, _ = fmt.Println("networking integration tests must run as root")
return
}
flag.Parse()
os.Exit(m.Run())
}
var tests = []Test{{
Name: "Normal",
DERPMap: DERPMapTailscale,
Coordinator: CoordinatorInMemory,
Parent: Parent{
NetworkSetup: NetworkSetupDefault,
TailnetSetup: TailnetSetupDRPC,
Run: func(ctx context.Context, t *testing.T, opts ParentOpts) {
reach := opts.Conn.AwaitReachable(ctx, tailnet.IPFromUUID(opts.AgentID))
assert.True(t, reach)
},
},
Child: Child{
NetworkSetup: NetworkSetupDefault,
TailnetSetup: TailnetSetupDRPC,
Run: func(ctx context.Context, t *testing.T, opts ChildOpts) {
// wait until the parent kills us
<-make(chan struct{})
},
},
}}
//nolint:paralleltest
func TestIntegration(t *testing.T) {
if *isChild {
logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug)
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong)
t.Cleanup(cancel)
agentID, err := uuid.Parse(*childAgentID)
require.NoError(t, err)
test := tests[*childTestID]
test.Child.NetworkSetup(t)
dm := test.DERPMap(ctx, t)
conn := test.Child.TailnetSetup(ctx, t, logger, agentID, uuid.Nil, *childCoordinateURL, dm)
test.Child.Run(ctx, t, ChildOpts{
Logger: logger,
Conn: conn,
AgentID: agentID,
})
return
}
for id, test := range tests {
t.Run(test.Name, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong)
t.Cleanup(cancel)
logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug)
parentID, childID := uuid.New(), uuid.New()
dm := test.DERPMap(ctx, t)
_, coordURL := test.Coordinator(t, logger, dm)
child, waitChild := execChild(ctx, id, coordURL, childID)
test.Parent.NetworkSetup(t)
conn := test.Parent.TailnetSetup(ctx, t, logger, parentID, childID, coordURL, dm)
test.Parent.Run(ctx, t, ParentOpts{
Logger: logger,
Conn: conn,
ClientID: parentID,
AgentID: childID,
})
child.Process.Signal(syscall.SIGINT)
<-waitChild
})
}
}
type Test struct {
// Name is the name of the test.
Name string
// DERPMap returns the DERP map to use for both the parent and child. It is
// called once at the beginning of the test.
DERPMap func(ctx context.Context, t *testing.T) *tailcfg.DERPMap
// Coordinator returns a running tailnet coordinator, and the url to reach
// it on.
Coordinator func(t *testing.T, logger slog.Logger, dm *tailcfg.DERPMap) (coord tailnet.Coordinator, url string)
Parent Parent
Child Child
}
// Parent is the struct containing all of the parent specific configurations.
// Functions are invoked in order of struct definition.
type Parent struct {
// NetworkSetup is run before all test code. It can be used to setup
// networking scenarios.
NetworkSetup func(t *testing.T)
// TailnetSetup creates a tailnet network.
TailnetSetup func(
ctx context.Context, t *testing.T, logger slog.Logger,
id, agentID uuid.UUID, coordURL string, dm *tailcfg.DERPMap,
) *tailnet.Conn
Run func(ctx context.Context, t *testing.T, opts ParentOpts)
}
// Child is the struct containing all of the child specific configurations.
// Functions are invoked in order of struct definition.
type Child struct {
// NetworkSetup is run before all test code. It can be used to setup
// networking scenarios.
NetworkSetup func(t *testing.T)
// TailnetSetup creates a tailnet network.
TailnetSetup func(
ctx context.Context, t *testing.T, logger slog.Logger,
id, agentID uuid.UUID, coordURL string, dm *tailcfg.DERPMap,
) *tailnet.Conn
// Run runs the actual test. Parents and children run in separate processes,
// so it's important to ensure no communication happens over memory between
// run functions of parents and children.
Run func(ctx context.Context, t *testing.T, opts ChildOpts)
}
type ParentOpts struct {
Logger slog.Logger
Conn *tailnet.Conn
ClientID uuid.UUID
AgentID uuid.UUID
}
type ChildOpts struct {
Logger slog.Logger
Conn *tailnet.Conn
AgentID uuid.UUID
}
func execChild(ctx context.Context, testID int, coordURL string, agentID uuid.UUID) (*exec.Cmd, <-chan error) {
ch := make(chan error)
binary := os.Args[0]
args := os.Args[1:]
args = append(args,
"--child=true",
"--child-test-id="+strconv.Itoa(testID),
"--child-coordinate-url="+coordURL,
"--child-agent-id="+agentID.String(),
)
cmd := exec.CommandContext(ctx, binary, args...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
go func() {
ch <- cmd.Run()
}()
return cmd, ch
}