mirror of https://github.com/coder/coder.git
Merge branch 'main' into node-20
This commit is contained in:
commit
7d7da2a27d
|
@ -142,7 +142,7 @@ jobs:
|
|||
|
||||
# Check for any typos
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@v1.19.0
|
||||
uses: crate-ci/typos@v1.20.9
|
||||
with:
|
||||
config: .github/workflows/typos.toml
|
||||
|
||||
|
@ -912,6 +912,7 @@ jobs:
|
|||
uses: actions/dependency-review-action@v4
|
||||
with:
|
||||
allow-licenses: Apache-2.0, BSD-2-Clause, BSD-3-Clause, CC0-1.0, ISC, MIT, MIT-0, MPL-2.0
|
||||
allow-dependencies-licenses: "pkg:golang/github.com/pelletier/go-toml/v2"
|
||||
license-check: true
|
||||
vulnerability-check: false
|
||||
- name: "Report"
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
# GitHub release workflow.
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_channel:
|
||||
type: choice
|
||||
description: Release channel
|
||||
options:
|
||||
- mainline
|
||||
- stable
|
||||
release_notes:
|
||||
description: Release notes for the publishing the release. This is required to create a release.
|
||||
dry_run:
|
||||
description: Perform a dry-run release (devel). Note that ref must be an annotated tag when run without dry-run.
|
||||
type: boolean
|
||||
|
@ -28,6 +33,8 @@ env:
|
|||
# https://github.blog/changelog/2022-06-10-github-actions-inputs-unified-across-manual-and-reusable-workflows/
|
||||
CODER_RELEASE: ${{ !inputs.dry_run }}
|
||||
CODER_DRY_RUN: ${{ inputs.dry_run }}
|
||||
CODER_RELEASE_CHANNEL: ${{ inputs.release_channel }}
|
||||
CODER_RELEASE_NOTES: ${{ inputs.release_notes }}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
@ -62,21 +69,45 @@ jobs:
|
|||
echo "CODER_FORCE_VERSION=$version" >> $GITHUB_ENV
|
||||
echo "$version"
|
||||
|
||||
- name: Create release notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# We always have to set this since there might be commits on
|
||||
# main that didn't have a PR.
|
||||
CODER_IGNORE_MISSING_COMMIT_METADATA: "1"
|
||||
# Verify that all expectations for a release are met.
|
||||
- name: Verify release input
|
||||
if: ${{ !inputs.dry_run }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${GITHUB_REF}" != "refs/tags/v"* ]]; then
|
||||
echo "Ref must be a semver tag when creating a release, did you use scripts/release.sh?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 2.10.2 -> release/2.10
|
||||
version="$(./scripts/version.sh)"
|
||||
release_branch=release/${version%.*}
|
||||
branch_contains_tag=$(git branch --remotes --contains "${GITHUB_REF}" --list "*/${release_branch}" --format='%(refname)')
|
||||
if [[ -z "${branch_contains_tag}" ]]; then
|
||||
echo "Ref tag must exist in a branch named ${release_branch} when creating a release, did you use scripts/release.sh?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${CODER_RELEASE_NOTES}" ]]; then
|
||||
echo "Release notes are required to create a release, did you use scripts/release.sh?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Release inputs verified:"
|
||||
echo
|
||||
echo "- Ref: ${GITHUB_REF}"
|
||||
echo "- Version: ${version}"
|
||||
echo "- Release channel: ${CODER_RELEASE_CHANNEL}"
|
||||
echo "- Release branch: ${release_branch}"
|
||||
echo "- Release notes: true"
|
||||
|
||||
- name: Create release notes file
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ref=HEAD
|
||||
old_version="$(git describe --abbrev=0 "$ref^1")"
|
||||
version="v$(./scripts/version.sh)"
|
||||
|
||||
# Generate notes.
|
||||
release_notes_file="$(mktemp -t release_notes.XXXXXX)"
|
||||
./scripts/release/generate_release_notes.sh --check-for-changelog --old-version "$old_version" --new-version "$version" --ref "$ref" >> "$release_notes_file"
|
||||
echo "$CODER_RELEASE_NOTES" > "$release_notes_file"
|
||||
echo CODER_RELEASE_NOTES_FILE="$release_notes_file" >> $GITHUB_ENV
|
||||
|
||||
- name: Show release notes
|
||||
|
@ -130,6 +161,10 @@ jobs:
|
|||
AC_CERTIFICATE_PASSWORD: ${{ secrets.AC_CERTIFICATE_PASSWORD }}
|
||||
AC_APIKEY_P8_BASE64: ${{ secrets.AC_APIKEY_P8_BASE64 }}
|
||||
|
||||
- name: Test migrations from current ref to main
|
||||
run: |
|
||||
make test-migrations
|
||||
|
||||
- name: Build binaries
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
@ -261,6 +296,9 @@ jobs:
|
|||
set -euo pipefail
|
||||
|
||||
publish_args=()
|
||||
if [[ $CODER_RELEASE_CHANNEL == "stable" ]]; then
|
||||
publish_args+=(--stable)
|
||||
fi
|
||||
if [[ $CODER_DRY_RUN == *t* ]]; then
|
||||
publish_args+=(--dry-run)
|
||||
fi
|
||||
|
|
|
@ -15,6 +15,7 @@ Hashi = "Hashi"
|
|||
trialer = "trialer"
|
||||
encrypter = "encrypter"
|
||||
hel = "hel" # as in helsinki
|
||||
pn = "pn" # this is used as proto node
|
||||
|
||||
[files]
|
||||
extend-exclude = [
|
||||
|
|
|
@ -4,6 +4,11 @@ on:
|
|||
schedule:
|
||||
- cron: "0 9 * * 1"
|
||||
workflow_dispatch: # allows to run manually for testing
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "docs/**"
|
||||
|
||||
jobs:
|
||||
check-docs:
|
||||
|
|
8
Makefile
8
Makefile
|
@ -783,6 +783,14 @@ test-postgres: test-postgres-docker
|
|||
-count=1
|
||||
.PHONY: test-postgres
|
||||
|
||||
test-migrations: test-postgres-docker
|
||||
echo "--- test migrations"
|
||||
set -euo pipefail
|
||||
COMMIT_FROM=$(shell git rev-parse --short HEAD)
|
||||
COMMIT_TO=$(shell git rev-parse --short main)
|
||||
echo "DROP DATABASE IF EXISTS migrate_test_$${COMMIT_FROM}; CREATE DATABASE migrate_test_$${COMMIT_FROM};" | psql 'postgresql://postgres:postgres@localhost:5432/postgres?sslmode=disable'
|
||||
go run ./scripts/migrate-test/main.go --from="$$COMMIT_FROM" --to="$$COMMIT_TO" --postgres-url="postgresql://postgres:postgres@localhost:5432/migrate_test_$${COMMIT_FROM}?sslmode=disable"
|
||||
|
||||
# NOTE: we set --memory to the same size as a GitHub runner.
|
||||
test-postgres-docker:
|
||||
docker rm -f test-postgres-docker || true
|
||||
|
|
|
@ -116,3 +116,9 @@ We are always working on new integrations. Feel free to open an issue to request
|
|||
|
||||
- [**Provision Coder with Terraform**](https://github.com/ElliotG/coder-oss-tf): Provision Coder on Google GKE, Azure AKS, AWS EKS, DigitalOcean DOKS, IBMCloud K8s, OVHCloud K8s, and Scaleway K8s Kapsule with Terraform
|
||||
- [**Coder Template GitHub Action**](https://github.com/marketplace/actions/update-coder-template): A GitHub Action that updates Coder templates
|
||||
|
||||
## Contributing
|
||||
|
||||
We are always happy to see new contributors to Coder. If you are new to the Coder codebase, we have
|
||||
[a guide on how to get started](https://coder.com/docs/v2/latest/CONTRIBUTING). We'd love to see your
|
||||
contributions!
|
||||
|
|
|
@ -190,9 +190,9 @@ func (api *API) checkAuthorization(rw http.ResponseWriter, r *http.Request) {
|
|||
// Only support referencing some resources by ID.
|
||||
switch v.Object.ResourceType.String() {
|
||||
case rbac.ResourceWorkspaceExecution.Type:
|
||||
wrkSpace, err := api.Database.GetWorkspaceByID(ctx, id)
|
||||
workSpace, err := api.Database.GetWorkspaceByID(ctx, id)
|
||||
if err == nil {
|
||||
dbObj = wrkSpace.ExecutionRBAC()
|
||||
dbObj = workSpace.ExecutionRBAC()
|
||||
}
|
||||
dbErr = err
|
||||
case rbac.ResourceWorkspace.Type:
|
||||
|
|
|
@ -185,6 +185,142 @@ QYLbNYkedkNuhRmEBesPqj4aDz68ZDI6fJ92sj2q18QvJUJ5Qz728AvtFOat+Ajg
|
|||
K0PFqPYEAviUKr162NB1XZJxf6uyIjUlnG4UEdHfUqdhl0R84mMtrYINksTzQ2sH
|
||||
YM8fEhqICtTlcRLr/FErUaPUe9648nziSnA0qKH7rUZqP/Ifmbo+WNZSZG1BbgOh
|
||||
lk+521W+Ncih3HRbvRBE0LWYT8vWKnfjgZKxwHwJ
|
||||
-----END CERTIFICATE-----`,
|
||||
// Microsoft Azure RSA TLS Issuing CA 03
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIFrDCCBJSgAwIBAgIQBRllJkSaXj0aOHSPXc/rzDANBgkqhkiG9w0BAQwFADBh
|
||||
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
||||
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
|
||||
MjAeFw0yMzA2MDgwMDAwMDBaFw0yNjA4MjUyMzU5NTlaMF0xCzAJBgNVBAYTAlVT
|
||||
MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xLjAsBgNVBAMTJU1pY3Jv
|
||||
c29mdCBBenVyZSBSU0EgVExTIElzc3VpbmcgQ0EgMDMwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQCUaitvevlZirydcTjMIt2fr5ei7LvQx7bdIVobgEZ1
|
||||
Qlqf3BH6etKdmZChydkN0XXAb8Ysew8aCixKtrVeDCe5xRRCnKaFcEvqg2cSfbpX
|
||||
FevXDvfbTK2ed7YASOJ/pv31stqHd9m0xWZLCmsXZ8x6yIxgEGVHjIAOCyTAgcQy
|
||||
8ItIjmxn3Vu2FFVBemtP38Nzur/8id85uY7QPspI8Er8qVBBBHp6PhxTIKxAZpZb
|
||||
XtBf2VxIKbvUGEvCxWCrKNfv+j0oEqDpXOqGFpVBK28Q48u/0F+YBUY8FKP4rfgF
|
||||
I4lG9mnzMmCL76k+HjyBtU5zikDGqgm4mlPXgSRqEh0CvQS7zyrBRWiJCfK0g67f
|
||||
69CVGa7fji8pz99J59s8bYW7jgyro93LCGb4N3QfJLurB//ehDp33XdIhizJtopj
|
||||
UoFUGLnomVnMRTUNtMSAy7J4r1yjJDLufgnrPZ0yjYo6nyMiFswCaMmFfclUKtGz
|
||||
zbPDpIBuf0hmvJAt0LyWlYUst5geusPxbkM5XOhLn7px+/y+R0wMT3zNZYQxlsLD
|
||||
bXGYsRdE9jxcIts+IQwWZGnmHhhC1kvKC/nAYcqBZctMQB5q/qsPH652dc73zOx6
|
||||
Bp2gTZqokGCv5PGxiXcrwouOUIlYgizBDYGBDU02S4BRDM3oW9motVUonBnF8JHV
|
||||
RwIDAQABo4IBYjCCAV4wEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU/glx
|
||||
QFUFEETYpIF1uJ4a6UoGiMgwHwYDVR0jBBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYX
|
||||
jzkwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
||||
AjB2BggrBgEFBQcBAQRqMGgwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2lj
|
||||
ZXJ0LmNvbTBABggrBgEFBQcwAoY0aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29t
|
||||
L0RpZ2lDZXJ0R2xvYmFsUm9vdEcyLmNydDBCBgNVHR8EOzA5MDegNaAzhjFodHRw
|
||||
Oi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRHbG9iYWxSb290RzIuY3JsMB0G
|
||||
A1UdIAQWMBQwCAYGZ4EMAQIBMAgGBmeBDAECAjANBgkqhkiG9w0BAQwFAAOCAQEA
|
||||
AQkxu6RRPlD3yrYhxg9jIlVZKjAnC9H+D0SSq4j1I8dNImZ4QjexTEv+224CSvy4
|
||||
zfp9gmeRfC8rnrr4FN4UFppYIgqR4H7jIUVMG9ECUcQj2Ef11RXqKOg5LK3fkoFz
|
||||
/Nb9CYvg4Ws9zv8xmE1Mr2N6WDgLuTBIwul2/7oakjj8MA5EeijIjHgB1/0r5mPm
|
||||
eFYVx8xCuX/j7+q4tH4PiHzzBcfqb3k0iR4DlhiZfDmy4FuNWXGM8ZoMM43EnRN/
|
||||
meqAcMkABZhY4gqeWZbOgxber297PnGOCcIplOwpPfLu1A1K9frVwDzAG096a8L0
|
||||
+ItQCmz7TjRH4ptX5Zh9pw==
|
||||
-----END CERTIFICATE-----`,
|
||||
// Microsoft Azure RSA TLS Issuing CA 04
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIFrDCCBJSgAwIBAgIQCfluwpVVXyR0nq8eXc7UnTANBgkqhkiG9w0BAQwFADBh
|
||||
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
||||
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
|
||||
MjAeFw0yMzA2MDgwMDAwMDBaFw0yNjA4MjUyMzU5NTlaMF0xCzAJBgNVBAYTAlVT
|
||||
MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xLjAsBgNVBAMTJU1pY3Jv
|
||||
c29mdCBBenVyZSBSU0EgVExTIElzc3VpbmcgQ0EgMDQwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQDBeUy13eRZ/QC5bN7/IOGxodny7Xm2BFc88d3cca3y
|
||||
HyyVx1Y60+afY6DAo/2Ls1uzAfbDfMzAVWJazPH4tckaItDv//htEbbNJnAGvZPB
|
||||
4VqNviwDEmlAWT/MTAmzXfTgWXuUNgRlzZbjoFaPm+t6iJ6HdvDpWQAJbsBUZCga
|
||||
t257tM28JnAHUTWdiDBn+2z6EGh2DA6BCx04zHDKVSegLY8+5P80Lqze0d6i3T2J
|
||||
J7rfxCmxUXfCGOv9iQIUZfhv4vCb8hsm/JdNUMiomJhSPa0bi3rda/swuJHCH//d
|
||||
wz2AGzZRRGdj7Kna4t6ToxK17lAF3Q6Qp368C9cE6JLMj+3UbY3umWCPRA5/Dms4
|
||||
/wl3GvDEw7HpyKsvRNPpjDZyiFzZGC2HZmGMsrZMT3hxmyQwmz1O3eGYdO5EIq1S
|
||||
W/vT1yShZTSusqmICQo5gWWRZTwCENekSbVX9qRr77o0pjKtuBMZTGQTixwpT/rg
|
||||
Ul7Mr4M2nqK55Kovy/kUN1znfPdW/Fj9iCuvPKwKFdyt2RVgxJDvgIF/bNoRkRxh
|
||||
wVB6qRgs4EiTrNbRoZAHEFF5wRBf9gWn9HeoI66VtdMZvJRH+0/FDWB4/zwxS16n
|
||||
nADJaVPXh6JHJFYs9p0wZmvct3GNdWrOLRAG2yzbfFZS8fJcX1PYxXXo4By16yGW
|
||||
hQIDAQABo4IBYjCCAV4wEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUO3DR
|
||||
U+l2JZ1gqMpmD8abrm9UFmowHwYDVR0jBBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYX
|
||||
jzkwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
||||
AjB2BggrBgEFBQcBAQRqMGgwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2lj
|
||||
ZXJ0LmNvbTBABggrBgEFBQcwAoY0aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29t
|
||||
L0RpZ2lDZXJ0R2xvYmFsUm9vdEcyLmNydDBCBgNVHR8EOzA5MDegNaAzhjFodHRw
|
||||
Oi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRHbG9iYWxSb290RzIuY3JsMB0G
|
||||
A1UdIAQWMBQwCAYGZ4EMAQIBMAgGBmeBDAECAjANBgkqhkiG9w0BAQwFAAOCAQEA
|
||||
o9sJvBNLQSJ1e7VaG3cSZHBz6zjS70A1gVO1pqsmX34BWDPz1TAlOyJiLlA+eUF4
|
||||
B2OWHd3F//dJJ/3TaCFunjBhZudv3busl7flz42K/BG/eOdlg0kiUf07PCYY5/FK
|
||||
YTIch51j1moFlBqbglwkdNIVae2tOu0OdX2JiA+bprYcGxa7eayLetvPiA77ynTc
|
||||
UNMKOqYB41FZHOXe5IXDI5t2RsDM9dMEZv4+cOb9G9qXcgDar1AzPHEt/39335zC
|
||||
HofQ0QuItCDCDzahWZci9Nn9hb/SvAtPWHZLkLBG6I0iwGxvMwcTTc9Jnb4Flysr
|
||||
mQlwKsS2MphOoI23Qq3cSA==
|
||||
-----END CERTIFICATE-----`,
|
||||
// Microsoft Azure RSA TLS Issuing CA 07
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIFrDCCBJSgAwIBAgIQCkOpUJsBNS+JlXnscgi6UDANBgkqhkiG9w0BAQwFADBh
|
||||
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
||||
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
|
||||
MjAeFw0yMzA2MDgwMDAwMDBaFw0yNjA4MjUyMzU5NTlaMF0xCzAJBgNVBAYTAlVT
|
||||
MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xLjAsBgNVBAMTJU1pY3Jv
|
||||
c29mdCBBenVyZSBSU0EgVExTIElzc3VpbmcgQ0EgMDcwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQC1ZF7KYus5OO3GWqJoR4xznLDNCjocogqeCIVdi4eE
|
||||
BmF3zIYeuXXNoJAUF+mn86NBt3yMM0559JZDkiSDi9MpA2By4yqQlTHzfbOrvs7I
|
||||
4LWsOYTEClVFQgzXqa2ps2g855HPQW1hZXVh/yfmbtrCNVa//G7FPDqSdrAQ+M8w
|
||||
0364kyZApds/RPcqGORjZNokrNzYcGub27vqE6BGP6XeQO5YDFobi9BvvTOO+ZA9
|
||||
HGIU7FbdLhRm6YP+FO8NRpvterfqZrRt3bTn8GT5LsOTzIQgJMt4/RWLF4EKNc97
|
||||
CXOSCZFn7mFNx4SzTvy23B46z9dQPfWBfTFaxU5pIa0uVWv+jFjG7l1odu0WZqBd
|
||||
j0xnvXggu564CXmLz8F3draOH6XS7Ys9sTVM3Ow20MJyHtuA3hBDv+tgRhrGvNRD
|
||||
MbSzTO6axNWvL46HWVEChHYlxVBCTfSQmpbcAdZOQtUfs9E4sCFrqKcRPdg7ryhY
|
||||
fGbj3q0SLh55559ITttdyYE+wE4RhODgILQ3MaYZoyiL1E/4jqCOoRaFhF5R++vb
|
||||
YpemcpWx7unptfOpPRRnnN4U3pqZDj4yXexcyS52Rd8BthFY/cBg8XIR42BPeVRl
|
||||
OckZ+ttduvKVbvmGf+rFCSUoy1tyRwQNXzqeZTLrX+REqgFDOMVe0I49Frc2/Avw
|
||||
3wIDAQABo4IBYjCCAV4wEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUzhUW
|
||||
O+oCo6Zr2tkr/eWMUr56UKgwHwYDVR0jBBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYX
|
||||
jzkwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
||||
AjB2BggrBgEFBQcBAQRqMGgwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2lj
|
||||
ZXJ0LmNvbTBABggrBgEFBQcwAoY0aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29t
|
||||
L0RpZ2lDZXJ0R2xvYmFsUm9vdEcyLmNydDBCBgNVHR8EOzA5MDegNaAzhjFodHRw
|
||||
Oi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRHbG9iYWxSb290RzIuY3JsMB0G
|
||||
A1UdIAQWMBQwCAYGZ4EMAQIBMAgGBmeBDAECAjANBgkqhkiG9w0BAQwFAAOCAQEA
|
||||
bbV8m4/LCSvb0nBF9jb7MVLH/9JjHGbn0QjB4R4bMlGHbDXDWtW9pFqMPrRh2Q76
|
||||
Bqm+yrrgX83jPZAcvOd7F7+lzDxZnYoFEWhxW9WnuM8Te5x6HBPCPRbIuzf9pSUT
|
||||
/ozvbKFCDxxgC2xKmgp6NwxRuGcy5KQQh4xkq/hJrnnF3RLakrkUBYFPUneip+wS
|
||||
BzAfK3jHXnkNCPNvKeLIXfLMsffEzP/j8hFkjWL3oh5yaj1HmlW8RE4Tl/GdUVzQ
|
||||
D1x42VSusQuRGtuSxLhzBNBeJtyD//2u7wY2uLYpgK0o3X0iIJmwpt7Ovp6Bs4tI
|
||||
E/peia+Qcdk9Qsr+1VgCGA==
|
||||
-----END CERTIFICATE-----`,
|
||||
// Microsoft Azure RSA TLS Issuing CA 08
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
MIIFrDCCBJSgAwIBAgIQDvt+VH7fD/EGmu5XaW17oDANBgkqhkiG9w0BAQwFADBh
|
||||
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
||||
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
|
||||
MjAeFw0yMzA2MDgwMDAwMDBaFw0yNjA4MjUyMzU5NTlaMF0xCzAJBgNVBAYTAlVT
|
||||
MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xLjAsBgNVBAMTJU1pY3Jv
|
||||
c29mdCBBenVyZSBSU0EgVExTIElzc3VpbmcgQ0EgMDgwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQCy7oIFzcDVZVbomWZtSwrAX8LiKXsbCcwuFL7FHkD5
|
||||
m67olmOdTueOKhNER5ykFs/meKG1fwzd35/+Q1+KTxcV89IIXmErtSsj8EWu7rdE
|
||||
AVYnYMFbstqwkIVNEoz4OIM82hn+N5p57zkHGPogzF6TOPRUOK8yYyCPeqnHvoVp
|
||||
E5b0kZL4QT8bdyhSRQbUsUiSaOuF5y3eZ9Vc92baDkhY7CFZE2ThLLv5PQ0WxzLo
|
||||
t3t18d2vQP5x29I0n6NFsj37J2d/EH/Z6a/lhAVzKjfYloGcQ1IPyDEIGh9gYJnM
|
||||
LFZiUbm/GBmlpKVr8M03OWKCR0thRbfnU6UoskrwGrECAnnojFEUw+j8i6gFLBNW
|
||||
XtBOtYvgl8SHCCVKUUUl4YOfR5zF4OkKirJuUbOmB2AOmLjYJIcabDvxMcmryhQi
|
||||
nog+/+jgHJnY62opgStkdaImMPzyLB7ZaWVnxpRdtFKO1ZvGkZeRNvbPAUKR2kNe
|
||||
knuh3NtFvz2dY3xP7AfhyLE/t8vW72nAzlRKz++L70CgCvj/yeObPwaAPDd2sZ0o
|
||||
j2u/N+k6egGq04e+GBW+QYCSoJ5eAY36il0fu7dYSHYDo7RB5aPTLqnybp8wMeAa
|
||||
tcagc8U9OM42ghELTaWFARuyoCmgqR7y8fAU9Njhcqrm6+0Xzv/vzMfhL4Ulpf1G
|
||||
7wIDAQABo4IBYjCCAV4wEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU9n4v
|
||||
vYCjSrJwW+vfmh/Y7cphgAcwHwYDVR0jBBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYX
|
||||
jzkwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
||||
AjB2BggrBgEFBQcBAQRqMGgwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2lj
|
||||
ZXJ0LmNvbTBABggrBgEFBQcwAoY0aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29t
|
||||
L0RpZ2lDZXJ0R2xvYmFsUm9vdEcyLmNydDBCBgNVHR8EOzA5MDegNaAzhjFodHRw
|
||||
Oi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRHbG9iYWxSb290RzIuY3JsMB0G
|
||||
A1UdIAQWMBQwCAYGZ4EMAQIBMAgGBmeBDAECAjANBgkqhkiG9w0BAQwFAAOCAQEA
|
||||
loABcB94CeH6DWKwa4550BTzLxlTHVNseQJ5SetnPpBuPNLPgOLe9Y7ZMn4ZK6mh
|
||||
feK7RiMzan4UF9CD5rF3TcCevo3IxrdV+YfBwvlbGYv+6JmX3mAMlaUb23Y2pONo
|
||||
ixFJEOcAMKKR55mSC5W4nQ6jDfp7Qy/504MQpdjJflk90RHsIZGXVPw/JdbBp0w6
|
||||
pDb4o5CqydmZqZMrEvbGk1p8kegFkBekp/5WVfd86BdH2xs+GKO3hyiA8iBrBCGJ
|
||||
fqrijbRnZm7q5+ydXF3jhJDJWfxW5EBYZBJrUz/a+8K/78BjwI8z2VYJpG4t6r4o
|
||||
tOGB5sEyDPDwqx00Rouu8g==
|
||||
-----END CERTIFICATE-----`,
|
||||
// Microsoft Azure TLS Issuing CA 01
|
||||
`-----BEGIN CERTIFICATE-----
|
||||
|
|
|
@ -41,6 +41,11 @@ func TestValidate(t *testing.T) {
|
|||
payload: "MIILiQYJKoZIhvcNAQcCoIILejCCC3YCAQExDzANBgkqhkiG9w0BAQsFADCCAUAGCSqGSIb3DQEHAaCCATEEggEteyJsaWNlbnNlVHlwZSI6IiIsIm5vbmNlIjoiMjAyMzAzMDgtMjMwOTMzIiwicGxhbiI6eyJuYW1lIjoiIiwicHJvZHVjdCI6IiIsInB1Ymxpc2hlciI6IiJ9LCJza3UiOiIxOC4wNC1MVFMiLCJzdWJzY3JpcHRpb25JZCI6IjBhZmJmZmZhLTVkZjktNGEzYi05ODdlLWZlNzU3NzYyNDI3MiIsInRpbWVTdGFtcCI6eyJjcmVhdGVkT24iOiIwMy8wOC8yMyAxNzowOTozMyAtMDAwMCIsImV4cGlyZXNPbiI6IjAzLzA4LzIzIDIzOjA5OjMzIC0wMDAwIn0sInZtSWQiOiI5OTA4NzhkNC0wNjhhLTRhYzQtOWVlOS0xMjMxZDIyMThlZjIifaCCCHswggh3MIIGX6ADAgECAhMzAIXQK9n2YdJHP1paAAAAhdArMA0GCSqGSIb3DQEBDAUAMFkxCzAJBgNVBAYTAlVTMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKjAoBgNVBAMTIU1pY3Jvc29mdCBBenVyZSBUTFMgSXNzdWluZyBDQSAwNTAeFw0yMzAyMDMxOTAxMThaFw0yNDAxMjkxOTAxMThaMGgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMRowGAYDVQQDExFtZXRhZGF0YS5henVyZS51czCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMrbkY7Z8ffglHPokuGfRDOBjFt6n68OuReoq2CbnhyEdosDsfJBsoCr5vV3mVcpil1+y0HeabKr+PdJ6GWCXiymxxgMtNMIuz/kt4OVOJSkV3wJyMNYRjGUAB53jw2cJnhIgLy6QmxOm2cnDb+IBFGn7WAw/XqT8taDd6RPDHR6P+XqpWuMN/MheCOdJRagmr8BUNt95eOhRAGZeUWHKcCssBa9xZNmTzgd26NuBRpeGVrjuPCaQXiGWXvJ7zujWOiMopgw7UWXMiJp6J+Nn75Dx+MbPjlLYYBhFEEBaXj0iKuj/3/lm3nkkMLcYPxEJE0lPuX1yQQLUx3l1bBYyykCAwEAAaOCBCcwggQjMIIBfQYKKwYBBAHWeQIEAgSCAW0EggFpAWcAdgDuzdBk1dsazsVct520zROiModGfLzs3sNRSFlGcR+1mwAAAYYYsLzVAAAEAwBHMEUCIQD+BaiDS1uFyVGdeMc5vBUpJOmBhxgRyTkH3kQG+KD6RwIgWIMxqyGtmM9rH5CrWoruToiz7NNfDmp11LLHZNaKpq4AdgBz2Z6JG0yWeKAgfUed5rLGHNBRXnEZKoxrgBB6wXdytQAAAYYYsL0bAAAEAwBHMEUCIQDNxRWECEZmEk9zRmRPNv3QP0lDsUzaKhYvFPmah/wkKwIgXyCv+fvWga+XB2bcKQqom10nvTDBExIZeoOWBSfKVLgAdQB2/4g/Crb7lVHCYcz1h7o0tKTNuyncaEIKn+ZnTFo6dAAAAYYYsL0bAAAEAwBGMEQCICCTSeyEisZwmi49g941B6exndOFwF4JqtoXbWmFcxRcAiBCDaVJJN0e0ZVSPkx9NVMGWvBjQbIYtSG4LEkCdDsMejAnBgkrBgEEAYI3FQoEGjAYMAoGCCsGAQUFBwMCMAoGCCsGAQUFBwMBMDwGCSsGAQQBgjcVBwQvMC0GJSsGAQQBgjcVCIe91xuB5+tGgoGdLo7QDIfw2h1dgoTlaYLzpz4CAWQCASUwga4GCCsGAQUFBwEBBIGhMIGeMG0GCCsGAQUFBzAChmFodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY3Jvc29mdCUyMEF6dXJlJTIwVExTJTIwSXNzdWluZyUyMENBJTIwMDUlMjAtJTIweHNpZ24uY3J0MC0GCCsGAQUFBzABhiFodHRwOi8vb25lb2NzcC5taWNyb3NvZnQuY29tL29jc3AwHQYDVR0OBBYEFBcZK26vkjWcbAk7XwJHTP/lxgeXMA4GA1UdDwEB/wQEAwIEsDA9BgNVHREENjA0gh91c2dvdnZpcmdpbmlhLm1ldGFkYXRhLmF6dXJlLnVzghFtZXRhZGF0YS5henVyZS51czAMBgNVHRMBAf8EAjAAMGQGA1UdHwRdMFswWaBXoFWGU2h0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY3Jvc29mdCUyMEF6dXJlJTIwVExTJTIwSXNzdWluZyUyMENBJTIwMDUuY3JsMGYGA1UdIARfMF0wUQYMKwYBBAGCN0yDfQEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2lvcHMvRG9jcy9SZXBvc2l0b3J5Lmh0bTAIBgZngQwBAgIwHwYDVR0jBBgwFoAUx7KcfxzjuFrv6WgaqF2UwSZSamgwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMBMA0GCSqGSIb3DQEBDAUAA4ICAQCUExuLe7D71C5kek65sqKXUodQJXVVpFG0Y4l9ZacBFql8BgHvu2Qvt8zfWsyCHy4A2KcMeHLwi2DdspyTjxSnwkuPcQ4ndhgAqrLkfoTc435NnnsiyzCUNDeGIQ+g+QSRPV86u6LmvFr0ZaOqxp6eJDPYewHhKyGLQuUyBjUNkhS+tGzuvsHaeCUYclmbZFN75IQSvBmL0XOsOD7wXPZB1a68D26wyCIbIC8MuFwxreTrvdRKt/5zIfBnku6S6xRgkzH64gfBLbU5e2VCdaKzElWEKRLJgl3R6raNRqFot+XNfa26H5sMZpZkuHrvkPZcvd5zOfL7fnVZoMLo4A3kFpet7tr1ls0ifqodzlOBMNrUdf+o3kJ1seCjzx2WdFP+2liO80d0oHKiv8djuttlPfQkV8WATmyLoZVoPcNovayrVUjTWFMXqIShhhTbIJ3ZRSZrz6rZLok0Xin3+4d28iMsi7tjxnBW/A/eiPrqs7f2v2rLXuf5/XHuzHIYQpiZpnvA90mE1HBB9fv4sETsw9TuL2nXai/c06HGGM06i4o+lRuyvymrlt/QPR7SCPXl5fZFVAavLtu1UtafrK/qcKQTHnVJeZ20+JdDIJDP2qcxQvdw7XA88aa/Y/olM+yHIjpaPpsRFa2o8UB0ct+x1cTAhLhj3vNwhZHoFlVcFzGCAZswggGXAgEBMHAwWTELMAkGA1UEBhMCVVMxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEqMCgGA1UEAxMhTWljcm9zb2Z0IEF6dXJlIFRMUyBJc3N1aW5nIENBIDA1AhMzAIXQK9n2YdJHP1paAAAAhdArMA0GCSqGSIb3DQEBCwUAMA0GCSqGSIb3DQEBAQUABIIBAFuEf//loqaib860Ys5yZkrRj1QiSDSzkU+Vxx9fYXzWzNT4KgMhkEhRRvoE6TR/tIUzbKFQxIVRrlW2lbGSj8JEeLoEVlp2Pc4gNRJeX2N9qVDPvy9lmYuBm1XjypLPwvYjvfPjsLRKkNdQ5MWzrC3F2q2OOQP4sviy/DCcoDitEmqmqiCuog/DiS5xETivde3pTZGiFwKlgzptj4/KYN/iZTzU25fFSCD5Mq2IxHRj39gFkqpFekdSRihSH0W3oyPfic/E3H0rVtSkiFm2SL6nPjILjhaJcV7az+X7Qu4AXYZ/TrabX+OW5dJ69SoJ01DfnqGD0sll0+P3QSUHEvA=",
|
||||
vmID: "990878d4-068a-4ac4-9ee9-1231d2218ef2",
|
||||
date: mustTime(time.RFC3339, "2023-04-01T00:00:00Z"),
|
||||
}, {
|
||||
name: "rsa",
|
||||
payload: "MIILnwYJKoZIhvcNAQcCoIILkDCCC4wCAQExDzANBgkqhkiG9w0BAQsFADCCAUUGCSqGSIb3DQEHAaCCATYEggEyeyJsaWNlbnNlVHlwZSI6IiIsIm5vbmNlIjoiMjAyNDA0MjItMjMzMjQ1IiwicGxhbiI6eyJuYW1lIjoiIiwicHJvZHVjdCI6IiIsInB1Ymxpc2hlciI6IiJ9LCJza3UiOiIyMF8wNC1sdHMtZ2VuMiIsInN1YnNjcmlwdGlvbklkIjoiMDVlOGIyODUtNGNlMS00NmEzLWI0YzktZjUxYmE2N2Q2YWNjIiwidGltZVN0YW1wIjp7ImNyZWF0ZWRPbiI6IjA0LzIyLzI0IDE3OjMyOjQ1IC0wMDAwIiwiZXhwaXJlc09uIjoiMDQvMjIvMjQgMjM6MzI6NDUgLTAwMDAifSwidm1JZCI6Ijk2MGE0YjRhLWRhYjItNDRlZi05YjczLTc3NTMwNDNiNGYxNiJ9oIIIiDCCCIQwggZsoAMCAQICEzMAJtj/yBIW1kk+vsIAAAAm2P8wDQYJKoZIhvcNAQEMBQAwXTELMAkGA1UEBhMCVVMxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEuMCwGA1UEAxMlTWljcm9zb2Z0IEF6dXJlIFJTQSBUTFMgSXNzdWluZyBDQSAwODAeFw0yNDA0MTgwODM1MzdaFw0yNTA0MTMwODM1MzdaMGkxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMRswGQYDVQQDExJtZXRhZGF0YS5henVyZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0T031XgxaebNQjKFQZ4BudeN+wOEHQoFq/x+cKSXM8HJrC2pF8y/ngSsuCLGt72M+30KxdbPHl56kd52uwDw1ZBrQO6Xw+GorRbtM4YQi+gLr8t9x+GUfuOX7E+5juidXax7la5ZhpVVLb3f+8NyxbphvEdFadXcgyQga1pl4v1U8elkbX3PPtEQXzwYotU+RU/ZTwXMYqfvJuaKwc4T2s083kaL3DwAfVxL0f6ey/MXuNQb4+ho15y9/f9gwMyzMDLlYChmY6cGSS4tsyrG5SrybE3jl8LZ1ZLVJ2fAIxbmJzBn1q+Eu4G6TZlnMDEsjznf7gqnP+n/o7N6l0sY1AgMBAAGjggQvMIIEKzCCAX4GCisGAQQB1nkCBAIEggFuBIIBagFoAHYAzxFW7tUufK/zh1vZaS6b6RpxZ0qwF+ysAdJbd87MOwgAAAGO8GIJ/QAABAMARzBFAiEAvJQ2mDRow9TMvLddWpYqNXLiehSFsj2+xUqh8yP/B8YCIBJjVoELj3kdVr3ceAuZFte9FH6sBsgeMsIgfndho6hRAHUAfVkeEuF4KnscYWd8Xv340IdcFKBOlZ65Ay/ZDowuebgAAAGO8GIK2AAABAMARjBEAiAxXD1R9yLASrpMh4ie0wn3AjCoSPniZ8virEVz8tKnkwIgWxGU9DjjQk7gPWYVBsiXP9t1WPJ6mNJ1UkmAw8iDdFoAdwBVgdTCFpA2AUrqC5tXPFPwwOQ4eHAlCBcvo6odBxPTDAAAAY7wYgrtAAAEAwBIMEYCIQCaSjdXbUhrDyPNsRqewp5UdVYABGQAIgNwfKsq/JpbmAIhAPy5qQ6H2enXwuKsorEZTwIkKIoMgLsWs4anx9lXTJMeMCcGCSsGAQQBgjcVCgQaMBgwCgYIKwYBBQUHAwIwCgYIKwYBBQUHAwEwPAYJKwYBBAGCNxUHBC8wLQYlKwYBBAGCNxUIh73XG4Hn60aCgZ0ujtAMh/DaHV2ChOVpgvOnPgIBZAIBJjCBtAYIKwYBBQUHAQEEgacwgaQwcwYIKwYBBQUHMAKGZ2h0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2lvcHMvY2VydHMvTWljcm9zb2Z0JTIwQXp1cmUlMjBSU0ElMjBUTFMlMjBJc3N1aW5nJTIwQ0ElMjAwOCUyMC0lMjB4c2lnbi5jcnQwLQYIKwYBBQUHMAGGIWh0dHA6Ly9vbmVvY3NwLm1pY3Jvc29mdC5jb20vb2NzcDAdBgNVHQ4EFgQUnqRq3WHOZDoNmLD/arJg9RscxLowDgYDVR0PAQH/BAQDAgWgMDgGA1UdEQQxMC+CGWVhc3R1cy5tZXRhZGF0YS5henVyZS5jb22CEm1ldGFkYXRhLmF6dXJlLmNvbTAMBgNVHRMBAf8EAjAAMGoGA1UdHwRjMGEwX6BdoFuGWWh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY3Jvc29mdCUyMEF6dXJlJTIwUlNBJTIwVExTJTIwSXNzdWluZyUyMENBJTIwMDguY3JsMGYGA1UdIARfMF0wUQYMKwYBBAGCN0yDfQEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2lvcHMvRG9jcy9SZXBvc2l0b3J5Lmh0bTAIBgZngQwBAgIwHwYDVR0jBBgwFoAU9n4vvYCjSrJwW+vfmh/Y7cphgAcwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMBMA0GCSqGSIb3DQEBDAUAA4ICAQB4FwyqZFVdmB9Hu+YUJOJrGUYRlXbnCmdXlLi5w2QRCf9RKIykGdv28dH1ezhXJUCj3jCVZMav4GaSl0dPUcTetfnc/UrwsmbGRIMubbGjCz75FcNz/kXy7E/jPeyJrxsuO/ijyZNUSy0EQF3NuhTJw/SfAQtXv48NmVFDM2QMMhMRLDfOV4CPcialAFACFQTt6LMdG2hlB972Bffl+BVPkUKDLj89xQRd/cyWYweYfPCsNLYLDml98rY3v4yVKAvv+l7IOuKOzhlOe9U1oPJK7AP7GZzojKrisPQt4HlP4zEmeUzJtL6RqGdHac7/lUMVPOniE/L+5gBDBsN3nOGJ/QE+bBsmfdn4ewuLj6/LCd/JhCZFDeyTvtuX43JWIr9e0UOtENCG3Ub4SuUftf58+NuedCaNMZW2jqrFvQl+sCX+v1kkxxmRphU7B8TZP0SHaBDqeIqHPNWD7eyn/7+VTY54wrwF1v5S6b5zpL1tjZ55c9wpVBT6m77mNuR/2l7/VSh/qL2LgKVVo06q+Qz2c0pIjOI+7FobLRNtb7C8SqkdwuT1b0vnZslA8ZUEtwUm5RHcGu66sg/hb4lGNZbAklxGeAR3uQju0OQN/Lj4kXiii737dci0lIpIKA92hUKybLrYCyZDhp5I6is0gTdm4+rxVEY1K39R3cF3U5thuzGCAZ8wggGbAgEBMHQwXTELMAkGA1UEBhMCVVMxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEuMCwGA1UEAxMlTWljcm9zb2Z0IEF6dXJlIFJTQSBUTFMgSXNzdWluZyBDQSAwOAITMwAm2P/IEhbWST6+wgAAACbY/zANBgkqhkiG9w0BAQsFADANBgkqhkiG9w0BAQEFAASCAQDRukRXI01EvAoF0J+C1aYCmjwAtMlnQr5fBKod8T75FhM+mTJ2GApCyc5H8hn7IDl8ki8DdKfLjipnuEvjknZcVkfrzE72R9Pu+C2ffKfrSsJmsBHPMEKBPtlzhexCYiPamMGdVg8HqX6mhQkjjavk1SY+ewZvyEeuq+RSQIBVL1lw0UOWv+txDKlu9v69skb1DQ2HSet0sejEb48vqGeN4TMSoQFNeBOzHDkEeoqXxtZqsUhMtQzbwrpAFcUREB8DaCOXcv1DOminJB3Q19bpuMQ/2+Fc3HJtTTWRV3+3b7VnQl/sUDzTjcWXvwjrLGKk3MSTcQ+1rJRlBzkOJ+aK",
|
||||
vmID: "960a4b4a-dab2-44ef-9b73-7753043b4f16",
|
||||
date: mustTime(time.RFC3339, "2024-04-22T17:32:44Z"),
|
||||
}} {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# See: https://learn.microsoft.com/en-us/azure/security/fundamentals/azure-ca-details?tabs=certificate-authority-chains
|
||||
declare -a CERTIFICATES=(
|
||||
"Microsoft RSA TLS CA 01=https://crt.sh/?d=3124375355"
|
||||
"Microsoft RSA TLS CA 02=https://crt.sh/?d=3124375356"
|
||||
"Microsoft Azure RSA TLS Issuing CA 03=https://www.microsoft.com/pkiops/certs/Microsoft%20Azure%20RSA%20TLS%20Issuing%20CA%2003%20-%20xsign.crt"
|
||||
"Microsoft Azure RSA TLS Issuing CA 04=https://www.microsoft.com/pkiops/certs/Microsoft%20Azure%20RSA%20TLS%20Issuing%20CA%2004%20-%20xsign.crt"
|
||||
"Microsoft Azure RSA TLS Issuing CA 07=https://www.microsoft.com/pkiops/certs/Microsoft%20Azure%20RSA%20TLS%20Issuing%20CA%2007%20-%20xsign.crt"
|
||||
"Microsoft Azure RSA TLS Issuing CA 08=https://www.microsoft.com/pkiops/certs/Microsoft%20Azure%20RSA%20TLS%20Issuing%20CA%2008%20-%20xsign.crt"
|
||||
"Microsoft Azure TLS Issuing CA 01=https://www.microsoft.com/pki/certs/Microsoft%20Azure%20TLS%20Issuing%20CA%2001.cer"
|
||||
"Microsoft Azure TLS Issuing CA 02=https://www.microsoft.com/pki/certs/Microsoft%20Azure%20TLS%20Issuing%20CA%2002.cer"
|
||||
"Microsoft Azure TLS Issuing CA 05=https://www.microsoft.com/pki/certs/Microsoft%20Azure%20TLS%20Issuing%20CA%2005.cer"
|
||||
"Microsoft Azure TLS Issuing CA 06=https://www.microsoft.com/pki/certs/Microsoft%20Azure%20TLS%20Issuing%20CA%2006.cer"
|
||||
)
|
||||
|
||||
CONTENT="var Certificates = []string{"
|
||||
|
||||
for CERT in "${CERTIFICATES[@]}"; do
|
||||
IFS="=" read -r NAME URL <<<"$CERT"
|
||||
echo "Downloading certificate: $NAME"
|
||||
PEM=$(curl -sSL "$URL" | openssl x509 -outform PEM)
|
||||
echo "$PEM"
|
||||
|
||||
CONTENT+="\n// $NAME\n\`$PEM\`,"
|
||||
done
|
||||
|
||||
CONTENT+="\n}"
|
||||
|
||||
sed -i '/var Certificates = /,$d' azureidentity.go
|
||||
# shellcheck disable=SC2059
|
||||
printf "$CONTENT" >>azureidentity.go
|
||||
gofmt -w azureidentity.go
|
|
@ -10,6 +10,7 @@ import (
|
|||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
@ -184,20 +185,21 @@ func DumpOnFailure(t testing.TB, connectionURL string) {
|
|||
now := time.Now()
|
||||
timeSuffix := fmt.Sprintf("%d%d%d%d%d%d", now.Year(), now.Month(), now.Day(), now.Hour(), now.Minute(), now.Second())
|
||||
outPath := filepath.Join(cwd, snakeCaseName+"."+timeSuffix+".test.sql")
|
||||
dump, err := pgDump(connectionURL)
|
||||
dump, err := PGDump(connectionURL)
|
||||
if err != nil {
|
||||
t.Errorf("dump on failure: failed to run pg_dump")
|
||||
return
|
||||
}
|
||||
if err := os.WriteFile(outPath, filterDump(dump), 0o600); err != nil {
|
||||
if err := os.WriteFile(outPath, normalizeDump(dump), 0o600); err != nil {
|
||||
t.Errorf("dump on failure: failed to write: %s", err.Error())
|
||||
return
|
||||
}
|
||||
t.Logf("Dumped database to %q due to failed test. I hope you find what you're looking for!", outPath)
|
||||
}
|
||||
|
||||
// pgDump runs pg_dump against dbURL and returns the output.
|
||||
func pgDump(dbURL string) ([]byte, error) {
|
||||
// PGDump runs pg_dump against dbURL and returns the output.
|
||||
// It is used by DumpOnFailure().
|
||||
func PGDump(dbURL string) ([]byte, error) {
|
||||
if _, err := exec.LookPath("pg_dump"); err != nil {
|
||||
return nil, xerrors.Errorf("could not find pg_dump in path: %w", err)
|
||||
}
|
||||
|
@ -230,16 +232,79 @@ func pgDump(dbURL string) ([]byte, error) {
|
|||
return stdout.Bytes(), nil
|
||||
}
|
||||
|
||||
// Unfortunately, some insert expressions span multiple lines.
|
||||
// The below may be over-permissive but better that than truncating data.
|
||||
var insertExpr = regexp.MustCompile(`(?s)\bINSERT[^;]+;`)
|
||||
const minimumPostgreSQLVersion = 13
|
||||
|
||||
func filterDump(dump []byte) []byte {
|
||||
var buf bytes.Buffer
|
||||
matches := insertExpr.FindAll(dump, -1)
|
||||
for _, m := range matches {
|
||||
_, _ = buf.Write(m)
|
||||
_, _ = buf.WriteRune('\n')
|
||||
// PGDumpSchemaOnly is for use by gen/dump only.
|
||||
// It runs pg_dump against dbURL and sets a consistent timezone and encoding.
|
||||
func PGDumpSchemaOnly(dbURL string) ([]byte, error) {
|
||||
hasPGDump := false
|
||||
if _, err := exec.LookPath("pg_dump"); err == nil {
|
||||
out, err := exec.Command("pg_dump", "--version").Output()
|
||||
if err == nil {
|
||||
// Parse output:
|
||||
// pg_dump (PostgreSQL) 14.5 (Ubuntu 14.5-0ubuntu0.22.04.1)
|
||||
parts := strings.Split(string(out), " ")
|
||||
if len(parts) > 2 {
|
||||
version, err := strconv.Atoi(strings.Split(parts[2], ".")[0])
|
||||
if err == nil && version >= minimumPostgreSQLVersion {
|
||||
hasPGDump = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.Bytes()
|
||||
|
||||
cmdArgs := []string{
|
||||
"pg_dump",
|
||||
"--schema-only",
|
||||
dbURL,
|
||||
"--no-privileges",
|
||||
"--no-owner",
|
||||
"--no-privileges",
|
||||
"--no-publication",
|
||||
"--no-security-labels",
|
||||
"--no-subscriptions",
|
||||
"--no-tablespaces",
|
||||
|
||||
// We never want to manually generate
|
||||
// queries executing against this table.
|
||||
"--exclude-table=schema_migrations",
|
||||
}
|
||||
|
||||
if !hasPGDump {
|
||||
cmdArgs = append([]string{
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--network=host",
|
||||
fmt.Sprintf("gcr.io/coder-dev-1/postgres:%d", minimumPostgreSQLVersion),
|
||||
}, cmdArgs...)
|
||||
}
|
||||
cmd := exec.Command(cmdArgs[0], cmdArgs[1:]...) //#nosec
|
||||
cmd.Env = append(os.Environ(), []string{
|
||||
"PGTZ=UTC",
|
||||
"PGCLIENTENCODING=UTF8",
|
||||
}...)
|
||||
var output bytes.Buffer
|
||||
cmd.Stdout = &output
|
||||
cmd.Stderr = os.Stderr
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return normalizeDump(output.Bytes()), nil
|
||||
}
|
||||
|
||||
func normalizeDump(schema []byte) []byte {
|
||||
// Remove all comments.
|
||||
schema = regexp.MustCompile(`(?im)^(--.*)$`).ReplaceAll(schema, []byte{})
|
||||
// Public is implicit in the schema.
|
||||
schema = regexp.MustCompile(`(?im)( |::|'|\()public\.`).ReplaceAll(schema, []byte(`$1`))
|
||||
// Remove database settings.
|
||||
schema = regexp.MustCompile(`(?im)^(SET.*;)`).ReplaceAll(schema, []byte(``))
|
||||
// Remove select statements
|
||||
schema = regexp.MustCompile(`(?im)^(SELECT.*;)`).ReplaceAll(schema, []byte(``))
|
||||
// Removes multiple newlines.
|
||||
schema = regexp.MustCompile(`(?im)\n{3,}`).ReplaceAll(schema, []byte("\n\n"))
|
||||
|
||||
return schema
|
||||
}
|
||||
|
|
|
@ -695,7 +695,7 @@ CREATE TABLE replicas (
|
|||
|
||||
CREATE TABLE site_configs (
|
||||
key character varying(256) NOT NULL,
|
||||
value character varying(8192) NOT NULL
|
||||
value text NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tailnet_agents (
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/database/migrations"
|
||||
)
|
||||
|
||||
const minimumPostgreSQLVersion = 13
|
||||
var preamble = []byte("-- Code generated by 'make coderd/database/generate'. DO NOT EDIT.")
|
||||
|
||||
func main() {
|
||||
connection, closeFn, err := dbtestutil.Open()
|
||||
|
@ -28,95 +23,23 @@ func main() {
|
|||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
err = migrations.Up(db)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
hasPGDump := false
|
||||
if _, err = exec.LookPath("pg_dump"); err == nil {
|
||||
out, err := exec.Command("pg_dump", "--version").Output()
|
||||
if err == nil {
|
||||
// Parse output:
|
||||
// pg_dump (PostgreSQL) 14.5 (Ubuntu 14.5-0ubuntu0.22.04.1)
|
||||
parts := strings.Split(string(out), " ")
|
||||
if len(parts) > 2 {
|
||||
version, err := strconv.Atoi(strings.Split(parts[2], ".")[0])
|
||||
if err == nil && version >= minimumPostgreSQLVersion {
|
||||
hasPGDump = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cmdArgs := []string{
|
||||
"pg_dump",
|
||||
"--schema-only",
|
||||
connection,
|
||||
"--no-privileges",
|
||||
"--no-owner",
|
||||
|
||||
// We never want to manually generate
|
||||
// queries executing against this table.
|
||||
"--exclude-table=schema_migrations",
|
||||
}
|
||||
|
||||
if !hasPGDump {
|
||||
cmdArgs = append([]string{
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--network=host",
|
||||
fmt.Sprintf("gcr.io/coder-dev-1/postgres:%d", minimumPostgreSQLVersion),
|
||||
}, cmdArgs...)
|
||||
}
|
||||
cmd := exec.Command(cmdArgs[0], cmdArgs[1:]...) //#nosec
|
||||
cmd.Env = append(os.Environ(), []string{
|
||||
"PGTZ=UTC",
|
||||
"PGCLIENTENCODING=UTF8",
|
||||
}...)
|
||||
var output bytes.Buffer
|
||||
cmd.Stdout = &output
|
||||
cmd.Stderr = os.Stderr
|
||||
err = cmd.Run()
|
||||
dumpBytes, err := dbtestutil.PGDumpSchemaOnly(connection)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, sed := range []string{
|
||||
// Remove all comments.
|
||||
"/^--/d",
|
||||
// Public is implicit in the schema.
|
||||
"s/ public\\./ /g",
|
||||
"s/::public\\./::/g",
|
||||
"s/'public\\./'/g",
|
||||
"s/(public\\./(/g",
|
||||
// Remove database settings.
|
||||
"s/SET .* = .*;//g",
|
||||
// Remove select statements. These aren't useful
|
||||
// to a reader of the dump.
|
||||
"s/SELECT.*;//g",
|
||||
// Removes multiple newlines.
|
||||
"/^$/N;/^\\n$/D",
|
||||
} {
|
||||
cmd := exec.Command("sed", "-e", sed)
|
||||
cmd.Stdin = bytes.NewReader(output.Bytes())
|
||||
output = bytes.Buffer{}
|
||||
cmd.Stdout = &output
|
||||
cmd.Stderr = os.Stderr
|
||||
err = cmd.Run()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
dump := fmt.Sprintf("-- Code generated by 'make coderd/database/generate'. DO NOT EDIT.\n%s", output.Bytes())
|
||||
_, mainPath, _, ok := runtime.Caller(0)
|
||||
if !ok {
|
||||
panic("couldn't get caller path")
|
||||
}
|
||||
err = os.WriteFile(filepath.Join(mainPath, "..", "..", "..", "dump.sql"), []byte(dump), 0o600)
|
||||
err = os.WriteFile(filepath.Join(mainPath, "..", "..", "..", "dump.sql"), append(preamble, dumpBytes...), 0o600)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE "site_configs" ALTER COLUMN "value" TYPE character varying(8192);
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE "site_configs" ALTER COLUMN "value" TYPE text;
|
|
@ -17,9 +17,12 @@ import (
|
|||
//go:embed *.sql
|
||||
var migrations embed.FS
|
||||
|
||||
func setup(db *sql.DB) (source.Driver, *migrate.Migrate, error) {
|
||||
func setup(db *sql.DB, migs fs.FS) (source.Driver, *migrate.Migrate, error) {
|
||||
if migs == nil {
|
||||
migs = migrations
|
||||
}
|
||||
ctx := context.Background()
|
||||
sourceDriver, err := iofs.New(migrations, ".")
|
||||
sourceDriver, err := iofs.New(migs, ".")
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("create iofs: %w", err)
|
||||
}
|
||||
|
@ -47,8 +50,13 @@ func setup(db *sql.DB) (source.Driver, *migrate.Migrate, error) {
|
|||
}
|
||||
|
||||
// Up runs SQL migrations to ensure the database schema is up-to-date.
|
||||
func Up(db *sql.DB) (retErr error) {
|
||||
_, m, err := setup(db)
|
||||
func Up(db *sql.DB) error {
|
||||
return UpWithFS(db, migrations)
|
||||
}
|
||||
|
||||
// UpWithFS runs SQL migrations in the given fs.
|
||||
func UpWithFS(db *sql.DB, migs fs.FS) (retErr error) {
|
||||
_, m, err := setup(db, migs)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("migrate setup: %w", err)
|
||||
}
|
||||
|
@ -79,7 +87,7 @@ func Up(db *sql.DB) (retErr error) {
|
|||
|
||||
// Down runs all down SQL migrations.
|
||||
func Down(db *sql.DB) error {
|
||||
_, m, err := setup(db)
|
||||
_, m, err := setup(db, migrations)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("migrate setup: %w", err)
|
||||
}
|
||||
|
@ -101,7 +109,7 @@ func Down(db *sql.DB) error {
|
|||
// applied, without making any changes to the database. If not, returns a
|
||||
// non-nil error.
|
||||
func EnsureClean(db *sql.DB) error {
|
||||
sourceDriver, m, err := setup(db)
|
||||
sourceDriver, m, err := setup(db, migrations)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("migrate setup: %w", err)
|
||||
}
|
||||
|
@ -167,7 +175,7 @@ func CheckLatestVersion(sourceDriver source.Driver, currentVersion uint) error {
|
|||
// Stepper cannot be closed pre-emptively, it must be run to completion
|
||||
// (or until an error is encountered).
|
||||
func Stepper(db *sql.DB) (next func() (version uint, more bool, err error), err error) {
|
||||
_, m, err := setup(db)
|
||||
_, m, err := setup(db, migrations)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("migrate setup: %w", err)
|
||||
}
|
||||
|
|
|
@ -10132,11 +10132,11 @@ WHERE
|
|||
template_usage_stats
|
||||
)
|
||||
AND created_at < (
|
||||
-- Delete at most in batches of 3 days (with a batch size of 3 days, we
|
||||
-- can clear out the previous 6 months of data in ~60 iterations) whilst
|
||||
-- keeping the DB load relatively low.
|
||||
-- Delete at most in batches of 4 hours (with this batch size, assuming
|
||||
-- 1 iteration / 10 minutes, we can clear out the previous 6 months of
|
||||
-- data in 7.5 days) whilst keeping the DB load low.
|
||||
SELECT
|
||||
COALESCE(MIN(created_at) + '3 days'::interval, NOW())
|
||||
COALESCE(MIN(created_at) + '4 hours'::interval, NOW())
|
||||
FROM
|
||||
workspace_agent_stats
|
||||
)
|
||||
|
|
|
@ -87,11 +87,11 @@ WHERE
|
|||
template_usage_stats
|
||||
)
|
||||
AND created_at < (
|
||||
-- Delete at most in batches of 3 days (with a batch size of 3 days, we
|
||||
-- can clear out the previous 6 months of data in ~60 iterations) whilst
|
||||
-- keeping the DB load relatively low.
|
||||
-- Delete at most in batches of 4 hours (with this batch size, assuming
|
||||
-- 1 iteration / 10 minutes, we can clear out the previous 6 months of
|
||||
-- data in 7.5 days) whilst keeping the DB load low.
|
||||
SELECT
|
||||
COALESCE(MIN(created_at) + '3 days'::interval, NOW())
|
||||
COALESCE(MIN(created_at) + '4 hours'::interval, NOW())
|
||||
FROM
|
||||
workspace_agent_stats
|
||||
);
|
||||
|
|
|
@ -79,10 +79,23 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R
|
|||
duration = defaultRefreshRate
|
||||
}
|
||||
|
||||
workspaceLatestBuildTotals := prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
// TODO: deprecated: remove in the future
|
||||
// See: https://github.com/coder/coder/issues/12999
|
||||
// Deprecation reason: gauge metrics should avoid suffix `_total``
|
||||
workspaceLatestBuildTotalsDeprecated := prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: "coderd",
|
||||
Subsystem: "api",
|
||||
Name: "workspace_latest_build_total",
|
||||
Help: "DEPRECATED: use coderd_api_workspace_latest_build instead",
|
||||
}, []string{"status"})
|
||||
if err := registerer.Register(workspaceLatestBuildTotalsDeprecated); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
workspaceLatestBuildTotals := prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: "coderd",
|
||||
Subsystem: "api",
|
||||
Name: "workspace_latest_build",
|
||||
Help: "The current number of workspace builds by status.",
|
||||
}, []string{"status"})
|
||||
if err := registerer.Register(workspaceLatestBuildTotals); err != nil {
|
||||
|
@ -131,6 +144,8 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R
|
|||
for _, job := range jobs {
|
||||
status := codersdk.ProvisionerJobStatus(job.JobStatus)
|
||||
workspaceLatestBuildTotals.WithLabelValues(string(status)).Add(1)
|
||||
// TODO: deprecated: remove in the future
|
||||
workspaceLatestBuildTotalsDeprecated.WithLabelValues(string(status)).Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -159,7 +159,7 @@ func TestWorkspaceLatestBuildTotals(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
sum := 0
|
||||
for _, m := range metrics {
|
||||
if m.GetName() != "coderd_api_workspace_latest_build_total" {
|
||||
if m.GetName() != "coderd_api_workspace_latest_build" {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -62,9 +62,11 @@ type metrics struct {
|
|||
|
||||
// if the oauth supports it, rate limit metrics.
|
||||
// rateLimit is the defined limit per interval
|
||||
rateLimit *prometheus.GaugeVec
|
||||
rateLimitRemaining *prometheus.GaugeVec
|
||||
rateLimitUsed *prometheus.GaugeVec
|
||||
rateLimit *prometheus.GaugeVec
|
||||
// TODO: remove deprecated metrics in the future release
|
||||
rateLimitDeprecated *prometheus.GaugeVec
|
||||
rateLimitRemaining *prometheus.GaugeVec
|
||||
rateLimitUsed *prometheus.GaugeVec
|
||||
// rateLimitReset is unix time of the next interval (when the rate limit resets).
|
||||
rateLimitReset *prometheus.GaugeVec
|
||||
// rateLimitResetIn is the time in seconds until the rate limit resets.
|
||||
|
@ -91,7 +93,7 @@ func NewFactory(registry prometheus.Registerer) *Factory {
|
|||
rateLimit: factory.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: "coderd",
|
||||
Subsystem: "oauth2",
|
||||
Name: "external_requests_rate_limit_total",
|
||||
Name: "external_requests_rate_limit",
|
||||
Help: "The total number of allowed requests per interval.",
|
||||
}, []string{
|
||||
"name",
|
||||
|
@ -99,6 +101,18 @@ func NewFactory(registry prometheus.Registerer) *Factory {
|
|||
// Some IDPs have different buckets for different rate limits.
|
||||
"resource",
|
||||
}),
|
||||
// TODO: deprecated: remove in the future
|
||||
// See: https://github.com/coder/coder/issues/12999
|
||||
// Deprecation reason: gauge metrics should avoid suffix `_total``
|
||||
rateLimitDeprecated: factory.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: "coderd",
|
||||
Subsystem: "oauth2",
|
||||
Name: "external_requests_rate_limit_total",
|
||||
Help: "DEPRECATED: use coderd_oauth2_external_requests_rate_limit instead",
|
||||
}, []string{
|
||||
"name",
|
||||
"resource",
|
||||
}),
|
||||
rateLimitRemaining: factory.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: "coderd",
|
||||
Subsystem: "oauth2",
|
||||
|
@ -176,6 +190,8 @@ func (f *Factory) NewGithub(name string, under OAuth2Config) *Config {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: remove this metric in v3
|
||||
f.metrics.rateLimitDeprecated.With(labels).Set(float64(limits.Limit))
|
||||
f.metrics.rateLimit.With(labels).Set(float64(limits.Limit))
|
||||
f.metrics.rateLimitRemaining.With(labels).Set(float64(limits.Remaining))
|
||||
f.metrics.rateLimitUsed.With(labels).Set(float64(limits.Used))
|
||||
|
|
|
@ -246,7 +246,7 @@ func (s *server) heartbeatLoop() {
|
|||
start := s.timeNow()
|
||||
hbCtx, hbCancel := context.WithTimeout(s.lifecycleCtx, s.heartbeatInterval)
|
||||
if err := s.heartbeat(hbCtx); err != nil && !database.IsQueryCanceledError(err) {
|
||||
s.Logger.Error(hbCtx, "heartbeat failed", slog.Error(err))
|
||||
s.Logger.Warn(hbCtx, "heartbeat failed", slog.Error(err))
|
||||
}
|
||||
hbCancel()
|
||||
elapsed := s.timeNow().Sub(start)
|
||||
|
|
|
@ -330,7 +330,7 @@ across multiple regions and diverse cloud platforms.
|
|||
- Since the _Registry_ is isolated from the internet, platform engineers are
|
||||
responsible for maintaining Workspace container images and conducting periodic
|
||||
updates of base Docker images.
|
||||
- It is recommended to keep [Dev Containers](../templates/devcontainers.md) up
|
||||
- It is recommended to keep [Dev Containers](../templates/dev-containers.md) up
|
||||
to date with the latest released
|
||||
[Envbuilder](https://github.com/coder/envbuilder) runtime.
|
||||
|
||||
|
@ -360,7 +360,7 @@ project-oriented [features](https://containers.dev/features) without requiring
|
|||
platform administrators to push altered Docker images.
|
||||
|
||||
Learn more about
|
||||
[Dev containers support](https://coder.com/docs/v2/latest/templates/devcontainers)
|
||||
[Dev containers support](https://coder.com/docs/v2/latest/templates/dev-containers)
|
||||
in Coder.
|
||||
|
||||
![Architecture Diagram](../images/architecture-devcontainers.png)
|
||||
|
|
|
@ -120,7 +120,8 @@ spec:
|
|||
| `coderd_api_request_latencies_seconds` | histogram | Latency distribution of requests in seconds. | `method` `path` |
|
||||
| `coderd_api_requests_processed_total` | counter | The total number of processed API requests | `code` `method` `path` |
|
||||
| `coderd_api_websocket_durations_seconds` | histogram | Websocket duration distribution of requests in seconds. | `path` |
|
||||
| `coderd_api_workspace_latest_build_total` | gauge | The latest workspace builds with a status. | `status` |
|
||||
| `coderd_api_workspace_latest_build` | gauge | The latest workspace builds with a status. | `status` |
|
||||
| `coderd_api_workspace_latest_build_total` | gauge | DEPRECATED: use coderd_api_workspace_latest_build instead | `status` |
|
||||
| `coderd_insights_applications_usage_seconds` | gauge | The application usage per template. | `application_name` `slug` `template_name` |
|
||||
| `coderd_insights_parameters` | gauge | The parameter usage per template. | `parameter_name` `parameter_type` `parameter_value` `template_name` |
|
||||
| `coderd_insights_templates_active_users` | gauge | The number of active users of the template. | `template_name` |
|
||||
|
@ -128,10 +129,11 @@ spec:
|
|||
| `coderd_license_limit_users` | gauge | The user seats limit based on the active Coder license. | |
|
||||
| `coderd_license_user_limit_enabled` | gauge | Returns 1 if the current license enforces the user limit. | |
|
||||
| `coderd_metrics_collector_agents_execution_seconds` | histogram | Histogram for duration of agents metrics collection in seconds. | |
|
||||
| `coderd_oauth2_external_requests_rate_limit` | gauge | The total number of allowed requests per interval. | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_next_reset_unix` | gauge | Unix timestamp of the next interval | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_remaining` | gauge | The remaining number of allowed requests in this interval. | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_reset_in_seconds` | gauge | Seconds until the next interval | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_total` | gauge | The total number of allowed requests per interval. | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_total` | gauge | DEPRECATED: use coderd_oauth2_external_requests_rate_limit instead | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_rate_limit_used` | gauge | The number of requests made in this interval. | `name` `resource` |
|
||||
| `coderd_oauth2_external_requests_total` | counter | The total number of api calls made to external oauth2 providers. 'status_code' will be 0 if the request failed with no response. | `name` `source` `status_code` |
|
||||
| `coderd_provisionerd_job_timings_seconds` | histogram | The provisioner job time duration in seconds. | `provisioner` `status` |
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
### Features
|
||||
|
||||
- [Devcontainer templates](https://coder.com/docs/v2/latest/templates/devcontainers)
|
||||
- [Devcontainer templates](https://coder.com/docs/v2/latest/templates/dev-containers)
|
||||
for Coder (#8256)
|
||||
- The dashboard will warn users when a workspace is unhealthy (#8422)
|
||||
- Audit logs `resource_target` search query allows you to search by resource
|
||||
|
|
|
@ -109,7 +109,7 @@ The following features are hidden or disabled by default as we don't guarantee s
|
|||
- Dashboard: Improve clipboard support on HTTP connections and older browsers (#12178) (@Parkreiner)
|
||||
- Server: Add postgres triggers to remove deleted users from user_links (#12117) (@Emyrk)
|
||||
- Dashboard: Add tests and improve accessibility for useClickable (#12218) (@Parkreiner)
|
||||
- Server: Ignore surronding whitespace for cli config (#12250) (@Emyrk)
|
||||
- Server: Ignore surrounding whitespace for cli config (#12250) (@Emyrk)
|
||||
- Tailnet: Stop waiting for Agent in a goroutine in ssh test (#12268) (@spikecurtis)
|
||||
- d4d8424ce fix: fix GetOrganizationsByUserID error when multiple organizations exist (#12257) (@Emyrk)
|
||||
- Server: Refresh entitlements after creating first user (#12285) (@mtojek)
|
||||
|
|
|
@ -152,7 +152,7 @@ example below:
|
|||
export const getAgentListeningPorts = async (
|
||||
agentID: string,
|
||||
): Promise<TypesGen.ListeningPortsResponse> => {
|
||||
const response = await axios.get(
|
||||
const response = await axiosInstance.get(
|
||||
`/api/v2/workspaceagents/${agentID}/listening-ports`,
|
||||
);
|
||||
return response.data;
|
||||
|
|
|
@ -16,6 +16,11 @@ locally in order to log in and manage templates.
|
|||
> The version flags for both stable and mainline are automatically filled in
|
||||
> this page.
|
||||
|
||||
> If you need help setting up k8s, we have a
|
||||
> [repo with Terraform configuration](https://github.com/ElliotG/coder-oss-tf)
|
||||
> to provision Coder on Google GKE, Azure AKS, AWS EKS, DigitalOcean DOKS,
|
||||
> IBMCloud K8s, OVHCloud K8s, and Scaleway K8s Kapsule.
|
||||
|
||||
## Install Coder with Helm
|
||||
|
||||
1. Create a namespace for Coder, such as `coder`:
|
||||
|
@ -127,7 +132,7 @@ locally in order to log in and manage templates.
|
|||
helm install coder coder-v2/coder \
|
||||
--namespace coder \
|
||||
--values values.yaml \
|
||||
--version 2.10.0
|
||||
--version 2.10.2
|
||||
```
|
||||
|
||||
For the **stable** Coder release:
|
||||
|
@ -136,7 +141,7 @@ locally in order to log in and manage templates.
|
|||
helm install coder coder-v2/coder \
|
||||
--namespace coder \
|
||||
--values values.yaml \
|
||||
--version 2.9.1
|
||||
--version 2.9.4
|
||||
```
|
||||
|
||||
You can watch Coder start up by running `kubectl get pods -n coder`. Once
|
||||
|
|
|
@ -8,9 +8,10 @@ their infrastructure on a staging environment before upgrading a production
|
|||
deployment.
|
||||
|
||||
We support two release channels:
|
||||
[mainline](https://github.com/coder/coder/2.10.0) for the edge version of Coder
|
||||
and [stable](https://github.com/coder/coder/releases/latest) for those with
|
||||
lower tolerance for fault. We field our mainline releases publicly for one month
|
||||
[mainline](https://github.com/coder/coder/releases/tag/v2.10.1) for the bleeding
|
||||
edge version of Coder and
|
||||
[stable](https://github.com/coder/coder/releases/latest) for those with lower
|
||||
tolerance for fault. We field our mainline releases publicly for one month
|
||||
before promoting them to stable.
|
||||
|
||||
### Mainline releases
|
||||
|
|
|
@ -32,7 +32,7 @@ sudo -u coder docker ps
|
|||
|
||||
## Architecture
|
||||
|
||||
Coder supports devcontainers with [envbuilder](https://github.com/coder/envbuilder), an open source project. Read more about this in [Coder's documentation](https://coder.com/docs/v2/latest/templates/devcontainers).
|
||||
Coder supports devcontainers with [envbuilder](https://github.com/coder/envbuilder), an open source project. Read more about this in [Coder's documentation](https://coder.com/docs/v2/latest/templates/dev-containers).
|
||||
|
||||
This template provisions the following resources:
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ This template authenticates using a `~/.kube/config`, if present on the server,
|
|||
|
||||
## Architecture
|
||||
|
||||
Coder supports devcontainers with [envbuilder](https://github.com/coder/envbuilder), an open source project. Read more about this in [Coder's documentation](https://coder.com/docs/v2/latest/templates/devcontainers).
|
||||
Coder supports devcontainers with [envbuilder](https://github.com/coder/envbuilder), an open source project. Read more about this in [Coder's documentation](https://coder.com/docs/v2/latest/templates/dev-containers).
|
||||
|
||||
This template provisions the following resources:
|
||||
|
||||
|
|
17
go.mod
17
go.mod
|
@ -91,7 +91,7 @@ require (
|
|||
github.com/andybalholm/brotli v1.1.0
|
||||
github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2
|
||||
github.com/awalterschulze/gographviz v2.0.3+incompatible
|
||||
github.com/aws/smithy-go v1.20.1
|
||||
github.com/aws/smithy-go v1.20.2
|
||||
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816
|
||||
github.com/bramvdbogaerde/go-scp v1.4.0
|
||||
github.com/briandowns/spinner v1.18.1
|
||||
|
@ -128,7 +128,7 @@ require (
|
|||
github.com/go-ping/ping v1.1.0
|
||||
github.com/go-playground/validator/v10 v10.19.0
|
||||
github.com/gofrs/flock v0.8.1
|
||||
github.com/gohugoio/hugo v0.124.0
|
||||
github.com/gohugoio/hugo v0.125.3
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||
github.com/golang-migrate/migrate/v4 v4.17.0
|
||||
github.com/google/go-cmp v0.6.0
|
||||
|
@ -218,6 +218,7 @@ require (
|
|||
github.com/benbjohnson/clock v1.3.5
|
||||
github.com/coder/serpent v0.7.0
|
||||
github.com/gomarkdown/markdown v0.0.0-20231222211730-1d6d20845b47
|
||||
github.com/google/go-github/v61 v61.0.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
@ -257,13 +258,13 @@ require (
|
|||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.0
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.1
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.7
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.4.3
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssm v1.49.3 // indirect
|
||||
|
@ -374,7 +375,7 @@ require (
|
|||
github.com/opencontainers/image-spec v1.1.0-rc5 // indirect
|
||||
github.com/opencontainers/runc v1.1.12 // indirect
|
||||
github.com/outcaste-io/ristretto v0.2.3 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.1 // indirect
|
||||
github.com/philhofer/fwd v1.1.2 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
|
@ -414,7 +415,7 @@ require (
|
|||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
|
||||
github.com/yashtewari/glob-intersection v0.2.0 // indirect
|
||||
github.com/yuin/goldmark v1.7.0 // indirect
|
||||
github.com/yuin/goldmark v1.7.1 // indirect
|
||||
github.com/yuin/goldmark-emoji v1.0.2 // indirect
|
||||
github.com/zclconf/go-cty v1.14.1
|
||||
github.com/zeebo/errs v1.3.0 // indirect
|
||||
|
@ -429,7 +430,7 @@ require (
|
|||
golang.zx2c4.com/wireguard/windows v0.5.3 // indirect
|
||||
google.golang.org/appengine v1.6.8 // indirect
|
||||
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
howett.net/plist v1.0.0 // indirect
|
||||
|
|
40
go.sum
40
go.sum
|
@ -92,8 +92,8 @@ github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c h1:651/eoCRnQ7YtS
|
|||
github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E=
|
||||
github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs=
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.0 h1:/Ce4OCiM3EkpW7Y+xUnfAFpchU78K7/Ug01sZni9PgA=
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.0/go.mod h1:35hUlJVYd+M++iLI3ALmVwMOyRYMmRqUXpTtRGW+K9I=
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA=
|
||||
github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.7 h1:JSfb5nOQF01iOgxFI5OIKWwDiEXWTyTgg1Mm1mHi0A4=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.7/go.mod h1:PH0/cNpoMO+B04qET699o5W92Ca79fVtbUnvMIZro4I=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.7 h1:WJd+ubWKoBeRh7A5iNMnxEOs982SyVKOJD+K8HIezu4=
|
||||
|
@ -102,10 +102,10 @@ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 h1:p+y7FvkK2dxS+FEwRIDHDe/
|
|||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3/go.mod h1:/fYB+FZbDlwlAiynK9KDXlzZl3ANI9JkD0Uhz5FjNT4=
|
||||
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.4.3 h1:mfxA6HX/mla8BrjVHdVD0G49+0Z+xKel//NCPBk0qbo=
|
||||
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.4.3/go.mod h1:PjvlBlYNNXPrMAGarXrnV+UYv1T9XyTT2Ono41NQjq8=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 h1:ifbIbHZyGl1alsAhPIYsHOg5MuApgqOvVeI8wIugXfs=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3/go.mod h1:oQZXg3c6SNeY6OZrDY+xHcF4VGIEoNotX2B4PrDeoJI=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 h1:Qvodo9gHG9F3E8SfYOspPeBt0bjSbsevK8WhRAUHcoY=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3/go.mod h1:vCKrdLXtybdf/uQd/YfVR2r5pcbNuEYKzMQpcxmeSJw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE=
|
||||
|
@ -120,8 +120,8 @@ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 h1:pi0Skl6mNl2w8qWZXcdOyg19
|
|||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2/go.mod h1:JYzLoEVeLXk+L4tn1+rrkfhkxl6mLDEVaDSvGq9og90=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 h1:Ppup1nVNAOWbBOrcoOxaxPeEnSFB2RnnQdguhXpmeQk=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.4/go.mod h1:+K1rNPVyGxkRuv9NNiaZ4YhBFuyw2MMA9SlIJ1Zlpz8=
|
||||
github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw=
|
||||
github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
|
||||
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
|
||||
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
|
@ -318,8 +318,8 @@ github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uq
|
|||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/gen2brain/beeep v0.0.0-20220402123239-6a3042f4b71a h1:fwNLHrP5Rbg/mGSXCjtPdpbqv2GucVTA/KMi8wEm6mE=
|
||||
github.com/gen2brain/beeep v0.0.0-20220402123239-6a3042f4b71a/go.mod h1:/WeFVhhxMOGypVKS0w8DUJxUBbHypnWkUVnW7p5c9Pw=
|
||||
github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8=
|
||||
github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM=
|
||||
github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M=
|
||||
github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
|
@ -411,8 +411,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e h1:QArsSubW7eDh8APMXkByjQWvuljwPGAGQpJEFn0F0wY=
|
||||
github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e/go.mod h1:3Ltoo9Banwq0gOtcOwxuHG6omk+AwsQPADyw2vQYOJQ=
|
||||
github.com/gohugoio/hugo v0.124.0 h1:qt58dsFTFtDHofAcDBc3ej1RVLPDakdTIkUekSj5VwQ=
|
||||
github.com/gohugoio/hugo v0.124.0/go.mod h1:CPCoslX98OhB9fPukLYbwNzRasEcVclZ2ZSj6eDSEGo=
|
||||
github.com/gohugoio/hugo v0.125.3 h1:94q41c1EpMI+qTEYTSNqC6+O4Tfw/IkaTBYApt3niZ4=
|
||||
github.com/gohugoio/hugo v0.125.3/go.mod h1:gNl67fhGCJSKN+lDnivkzaT8z4oRNLYNKxwYIMA2vpY=
|
||||
github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.2.0 h1:PCtO5l++psZf48yen2LxQ3JiOXxaRC6v0594NeHvGZg=
|
||||
github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.2.0/go.mod h1:g9CCh+Ci2IMbPUrVJuXbBTrA+rIIx5+hDQ4EXYaQDoM=
|
||||
github.com/gohugoio/locales v0.14.0 h1:Q0gpsZwfv7ATHMbcTNepFd59H7GoykzWJIxi113XGDc=
|
||||
|
@ -469,6 +469,8 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
|||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-github/v43 v43.0.1-0.20220414155304-00e42332e405 h1:DdHws/YnnPrSywrjNYu2lEHqYHWp/LnEx56w59esd54=
|
||||
github.com/google/go-github/v43 v43.0.1-0.20220414155304-00e42332e405/go.mod h1:4RgUDSnsxP19d65zJWqvqJ/poJxBCvmna50eXmIvoR8=
|
||||
github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go=
|
||||
github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
|
@ -750,8 +752,8 @@ github.com/outcaste-io/ristretto v0.2.3 h1:AK4zt/fJ76kjlYObOeNwh4T3asEuaCmp26pOv
|
|||
github.com/outcaste-io/ristretto v0.2.3/go.mod h1:W8HywhmtlopSB1jeMg3JtdIhf+DYkLAr0VN/s4+MHac=
|
||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
|
||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
|
||||
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
|
||||
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg=
|
||||
github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||
github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s=
|
||||
github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw=
|
||||
github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw=
|
||||
|
@ -802,8 +804,6 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU
|
|||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
||||
github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo=
|
||||
github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
|
||||
github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b h1:gQZ0qzfKHQIybLANtM3mBXNUtOfsCFXeTsnBqCsx1KM=
|
||||
github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.7.0 h1:OwvJ5jQf9LnIAS83waAjPbcMsODrTQUpJ02eNLUoxBg=
|
||||
|
@ -939,8 +939,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
|
|||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.3.7/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/goldmark v1.7.0 h1:EfOIvIMZIzHdB/R/zVrikYLPPwJlfMcNczJFMs1m6sA=
|
||||
github.com/yuin/goldmark v1.7.0/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||
github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U=
|
||||
github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||
github.com/yuin/goldmark-emoji v1.0.2 h1:c/RgTShNgHTtc6xdz2KKI74jJr6rWi7FPgnP9GAsO5s=
|
||||
github.com/yuin/goldmark-emoji v1.0.2/go.mod h1:RhP/RWpexdp+KHs7ghKnifRoIs/Bq4nDS7tRbCkOwKY=
|
||||
github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
|
||||
|
@ -1170,8 +1170,8 @@ google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98
|
|||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY=
|
||||
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 h1:rIo7ocm2roD9DcFIX67Ym8icoGCKSARAiPljFhh5suQ=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
|
|
|
@ -10,7 +10,11 @@ coderd_oauth2_external_requests_rate_limit_remaining{name="secondary-github",res
|
|||
# TYPE coderd_oauth2_external_requests_rate_limit_reset_in_seconds gauge
|
||||
coderd_oauth2_external_requests_rate_limit_reset_in_seconds{name="primary-github",resource="core"} 63.617162731
|
||||
coderd_oauth2_external_requests_rate_limit_reset_in_seconds{name="secondary-github",resource="core"} 121.82186601
|
||||
# HELP coderd_oauth2_external_requests_rate_limit_total The total number of allowed requests per interval.
|
||||
# HELP coderd_oauth2_external_requests_rate_limit The total number of allowed requests per interval.
|
||||
# TYPE coderd_oauth2_external_requests_rate_limit gauge
|
||||
coderd_oauth2_external_requests_rate_limit{name="primary-github",resource="core-unauthorized"} 5000
|
||||
coderd_oauth2_external_requests_rate_limit{name="secondary-github",resource="core-unauthorized"} 5000
|
||||
# HELP coderd_oauth2_external_requests_rate_limit_total DEPRECATED: use coderd_oauth2_external_requests_rate_limit instead
|
||||
# TYPE coderd_oauth2_external_requests_rate_limit_total gauge
|
||||
coderd_oauth2_external_requests_rate_limit_total{name="primary-github",resource="core-unauthorized"} 5000
|
||||
coderd_oauth2_external_requests_rate_limit_total{name="secondary-github",resource="core-unauthorized"} 5000
|
||||
|
@ -644,7 +648,10 @@ coderd_api_requests_processed_total{code="401",method="GET",path="/api/v2/users/
|
|||
coderd_api_requests_processed_total{code="401",method="GET",path="/api/v2/users/{user}/*"} 2
|
||||
coderd_api_requests_processed_total{code="401",method="GET",path="/api/v2/workspaces"} 1
|
||||
coderd_api_requests_processed_total{code="401",method="POST",path="/api/v2/files"} 1
|
||||
# HELP coderd_api_workspace_latest_build_total The latest workspace builds with a status.
|
||||
# HELP coderd_api_workspace_latest_build The latest workspace builds with a status.
|
||||
# TYPE coderd_api_workspace_latest_build gauge
|
||||
coderd_api_workspace_latest_build{status="succeeded"} 1
|
||||
# HELP coderd_api_workspace_latest_build_total DEPRECATED: use coderd_api_workspace_latest_build instead
|
||||
# TYPE coderd_api_workspace_latest_build_total gauge
|
||||
coderd_api_workspace_latest_build_total{status="succeeded"} 1
|
||||
# HELP coderd_insights_applications_usage_seconds The application usage per template.
|
||||
|
|
|
@ -0,0 +1,147 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"database/sql"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/database/migrations"
|
||||
)
|
||||
|
||||
// This script validates the migration path between two versions.
|
||||
// It performs the following actions:
|
||||
// Given OLD_VERSION and NEW_VERSION:
|
||||
// 1. Checks out $OLD_VERSION and inits schema at that version.
|
||||
// 2. Checks out $NEW_VERSION and runs migrations.
|
||||
// 3. Compares database schema post-migrate to that in VCS.
|
||||
// If any diffs are found, exits with an error.
|
||||
func main() {
|
||||
var (
|
||||
migrateFromVersion string
|
||||
migrateToVersion string
|
||||
postgresURL string
|
||||
skipCleanup bool
|
||||
)
|
||||
|
||||
flag.StringVar(&migrateFromVersion, "from", "", "Migrate from this version")
|
||||
flag.StringVar(&migrateToVersion, "to", "", "Migrate to this version")
|
||||
flag.StringVar(&postgresURL, "postgres-url", "postgresql://postgres:postgres@localhost:5432/postgres?sslmode=disable", "Postgres URL to migrate")
|
||||
flag.BoolVar(&skipCleanup, "skip-cleanup", false, "Do not clean up on exit.")
|
||||
flag.Parse()
|
||||
|
||||
if migrateFromVersion == "" || migrateToVersion == "" {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "must specify --from=<old version> and --to=<new version>")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Read schema at version %q\n", migrateToVersion)
|
||||
expectedSchemaAfter, err := gitShow("coderd/database/dump.sql", migrateToVersion)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Read migrations for %q\n", migrateFromVersion)
|
||||
migrateFromFS, err := makeMigrateFS(migrateFromVersion)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Read migrations for %q\n", migrateToVersion)
|
||||
migrateToFS, err := makeMigrateFS(migrateToVersion)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Connect to postgres\n")
|
||||
conn, err := sql.Open("postgres", postgresURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
ver, err := checkMigrateVersion(conn)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if ver < 0 {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "No previous migration detected.\n")
|
||||
} else {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Detected migration version %d\n", ver)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Init database at version %q\n", migrateFromVersion)
|
||||
if err := migrations.UpWithFS(conn, migrateFromFS); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Migrate to version %q\n", migrateToVersion)
|
||||
if err := migrations.UpWithFS(conn, migrateToFS); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Dump schema at version %q\n", migrateToVersion)
|
||||
dumpBytesAfter, err := dbtestutil.PGDumpSchemaOnly(postgresURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(string(dumpBytesAfter), string(stripGenPreamble(expectedSchemaAfter))); diff != "" {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Schema differs from expected after migration: %s\n", diff)
|
||||
os.Exit(1)
|
||||
}
|
||||
_, _ = fmt.Fprintf(os.Stderr, "OK\n")
|
||||
}
|
||||
|
||||
func makeMigrateFS(version string) (fs.FS, error) {
|
||||
// Export the migrations from the requested version to a zip archive
|
||||
out, err := exec.Command("git", "archive", "--format=zip", version, "coderd/database/migrations").CombinedOutput()
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("git archive: %s\n", out)
|
||||
}
|
||||
// Make a zip.Reader on top of it. This implements fs.fs!
|
||||
zr, err := zip.NewReader(bytes.NewReader(out), int64(len(out)))
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("create zip reader: %w", err)
|
||||
}
|
||||
// Sub-FS to it's rooted at migrations dir.
|
||||
subbed, err := fs.Sub(zr, "coderd/database/migrations")
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("sub fs: %w", err)
|
||||
}
|
||||
return subbed, nil
|
||||
}
|
||||
|
||||
func gitShow(path, version string) ([]byte, error) {
|
||||
out, err := exec.Command("git", "show", version+":"+path).CombinedOutput() //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("git show: %s\n", out)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func stripGenPreamble(bs []byte) []byte {
|
||||
return regexp.MustCompile(`(?im)^(-- Code generated.*DO NOT EDIT.)$`).ReplaceAll(bs, []byte{})
|
||||
}
|
||||
|
||||
func checkMigrateVersion(conn *sql.DB) (int, error) {
|
||||
var version int
|
||||
rows, err := conn.Query(`SELECT version FROM schema_migrations LIMIT 1;`)
|
||||
if err != nil {
|
||||
return -1, nil // not migrated
|
||||
}
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&version); err != nil {
|
||||
return 0, xerrors.Errorf("scan version: %w", err)
|
||||
}
|
||||
}
|
||||
return version, nil
|
||||
}
|
|
@ -44,12 +44,16 @@ EOH
|
|||
}
|
||||
|
||||
branch=main
|
||||
remote=origin
|
||||
dry_run=0
|
||||
ref=
|
||||
increment=
|
||||
force=0
|
||||
script_check=1
|
||||
mainline=1
|
||||
channel=mainline
|
||||
|
||||
args="$(getopt -o h -l dry-run,help,ref:,major,minor,patch,force -- "$@")"
|
||||
args="$(getopt -o h -l dry-run,help,ref:,mainline,stable,major,minor,patch,force,ignore-script-out-of-date -- "$@")"
|
||||
eval set -- "$args"
|
||||
while true; do
|
||||
case "$1" in
|
||||
|
@ -61,6 +65,16 @@ while true; do
|
|||
usage
|
||||
exit 0
|
||||
;;
|
||||
--mainline)
|
||||
mainline=1
|
||||
channel=mainline
|
||||
shift
|
||||
;;
|
||||
--stable)
|
||||
mainline=0
|
||||
channel=stable
|
||||
shift
|
||||
;;
|
||||
--ref)
|
||||
ref="$2"
|
||||
shift 2
|
||||
|
@ -76,6 +90,12 @@ while true; do
|
|||
force=1
|
||||
shift
|
||||
;;
|
||||
# Allow the script to be run with an out-of-date script for
|
||||
# development purposes.
|
||||
--ignore-script-out-of-date)
|
||||
script_check=0
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
|
@ -87,88 +107,226 @@ while true; do
|
|||
done
|
||||
|
||||
# Check dependencies.
|
||||
dependencies gh sort
|
||||
dependencies gh jq sort
|
||||
|
||||
if [[ -z $increment ]]; then
|
||||
# Default to patch versions.
|
||||
increment="patch"
|
||||
fi
|
||||
|
||||
# Check if the working directory is clean.
|
||||
if ! git diff --quiet --exit-code; then
|
||||
log "Working directory is not clean, it is highly recommended to stash changes."
|
||||
while [[ ! ${stash:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Stash changes? (y/n) " -n 1 -r stash
|
||||
log
|
||||
done
|
||||
if [[ ${stash} =~ ^[Yy]$ ]]; then
|
||||
maybedryrun "${dry_run}" git stash push --message "scripts/release.sh: autostash"
|
||||
fi
|
||||
log
|
||||
fi
|
||||
|
||||
# Check if the main is up-to-date with the remote.
|
||||
log "Checking remote ${remote} for repo..."
|
||||
remote_url=$(git remote get-url "${remote}")
|
||||
# Allow either SSH or HTTPS URLs.
|
||||
if ! [[ ${remote_url} =~ [@/]github.com ]] && ! [[ ${remote_url} =~ [:/]coder/coder(\.git)?$ ]]; then
|
||||
error "This script is only intended to be run with github.com/coder/coder repository set as ${remote}."
|
||||
fi
|
||||
|
||||
# Make sure the repository is up-to-date before generating release notes.
|
||||
log "Fetching $branch and tags from origin..."
|
||||
git fetch --quiet --tags origin "$branch"
|
||||
log "Fetching ${branch} and tags from ${remote}..."
|
||||
git fetch --quiet --tags "${remote}" "$branch"
|
||||
|
||||
# Resolve to the latest ref on origin/main unless otherwise specified.
|
||||
ref=$(git rev-parse --short "${ref:-origin/$branch}")
|
||||
ref_name=${ref:-${remote}/${branch}}
|
||||
ref=$(git rev-parse --short "${ref_name}")
|
||||
|
||||
# Make sure that we're running the latest release script.
|
||||
if [[ -n $(git diff --name-status origin/"$branch" -- ./scripts/release.sh) ]]; then
|
||||
script_diff=$(git diff --name-status "${remote}/${branch}" -- scripts/release.sh)
|
||||
if [[ ${script_check} = 1 ]] && [[ -n ${script_diff} ]]; then
|
||||
error "Release script is out-of-date. Please check out the latest version and try again."
|
||||
fi
|
||||
|
||||
# Check the current version tag from GitHub (by number) using the API to
|
||||
# ensure no local tags are considered.
|
||||
log "Checking GitHub for latest release..."
|
||||
versions_out="$(gh api -H "Accept: application/vnd.github+json" /repos/coder/coder/git/refs/tags -q '.[].ref | split("/") | .[2]' | grep '^v' | sort -r -V)"
|
||||
mapfile -t versions <<<"$versions_out"
|
||||
old_version=${versions[0]}
|
||||
log "Latest release: $old_version"
|
||||
# Make sure no other release contains this ref.
|
||||
release_contains_ref="$(git branch --remotes --contains "${ref}" --list "${remote}/release/*" --format='%(refname)')"
|
||||
if [[ -n ${release_contains_ref} ]]; then
|
||||
error "Ref ${ref_name} is already part of another release: $(git describe --always "${ref}") on ${release_contains_ref#"refs/remotes/${remote}/"}."
|
||||
fi
|
||||
|
||||
log "Checking GitHub for latest release(s)..."
|
||||
|
||||
# Check the latest version tag from GitHub (by version) using the API.
|
||||
versions_out="$(gh api -H "Accept: application/vnd.github+json" /repos/coder/coder/git/refs/tags -q '.[].ref | split("/") | .[2]' | grep '^v[0-9]' | sort -r -V)"
|
||||
mapfile -t versions <<<"${versions_out}"
|
||||
latest_mainline_version=${versions[0]}
|
||||
|
||||
latest_stable_version="$(curl -fsSLI -o /dev/null -w "%{url_effective}" https://github.com/coder/coder/releases/latest)"
|
||||
latest_stable_version="${latest_stable_version#https://github.com/coder/coder/releases/tag/}"
|
||||
|
||||
log "Latest mainline release: ${latest_mainline_version}"
|
||||
log "Latest stable release: ${latest_stable_version}"
|
||||
log
|
||||
|
||||
old_version=${latest_mainline_version}
|
||||
if ((!mainline)); then
|
||||
old_version=${latest_stable_version}
|
||||
fi
|
||||
|
||||
trap 'log "Check commit metadata failed, you can try to set \"export CODER_IGNORE_MISSING_COMMIT_METADATA=1\" and try again, if you know what you are doing."' EXIT
|
||||
# shellcheck source=scripts/release/check_commit_metadata.sh
|
||||
source "$SCRIPT_DIR/release/check_commit_metadata.sh" "$old_version" "$ref"
|
||||
trap - EXIT
|
||||
log
|
||||
|
||||
tag_version_args=(--old-version "$old_version" --ref "$ref" --"$increment")
|
||||
if ((force == 1)); then
|
||||
tag_version_args+=(--force)
|
||||
fi
|
||||
log "Executing DRYRUN of release tagging..."
|
||||
new_version="$(execrelative ./release/tag_version.sh "${tag_version_args[@]}" --dry-run)"
|
||||
log
|
||||
read -p "Continue? (y/n) " -n 1 -r continue_release
|
||||
tag_version_out="$(execrelative ./release/tag_version.sh "${tag_version_args[@]}" --dry-run)"
|
||||
log
|
||||
while [[ ! ${continue_release:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Continue? (y/n) " -n 1 -r continue_release
|
||||
log
|
||||
done
|
||||
if ! [[ $continue_release =~ ^[Yy]$ ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
release_notes="$(execrelative ./release/generate_release_notes.sh --check-for-changelog --old-version "$old_version" --new-version "$new_version" --ref "$ref")"
|
||||
|
||||
read -p "Preview release notes? (y/n) " -n 1 -r show_reply
|
||||
log
|
||||
if [[ $show_reply =~ ^[Yy]$ ]]; then
|
||||
|
||||
mapfile -d ' ' -t tag_version <<<"$tag_version_out"
|
||||
release_branch=${tag_version[0]}
|
||||
new_version=${tag_version[1]}
|
||||
new_version="${new_version%$'\n'}" # Remove the trailing newline.
|
||||
|
||||
release_notes="$(execrelative ./release/generate_release_notes.sh --old-version "$old_version" --new-version "$new_version" --ref "$ref")"
|
||||
|
||||
release_notes_file="build/RELEASE-${new_version}.md"
|
||||
if ((dry_run)); then
|
||||
release_notes_file="build/RELEASE-${new_version}-DRYRUN.md"
|
||||
fi
|
||||
get_editor() {
|
||||
if command -v editor >/dev/null; then
|
||||
readlink -f "$(command -v editor || true)"
|
||||
elif [[ -n ${GIT_EDITOR:-} ]]; then
|
||||
echo "${GIT_EDITOR}"
|
||||
elif [[ -n ${EDITOR:-} ]]; then
|
||||
echo "${EDITOR}"
|
||||
fi
|
||||
}
|
||||
editor="$(get_editor)"
|
||||
write_release_notes() {
|
||||
if [[ -z ${editor} ]]; then
|
||||
log "Release notes written to $release_notes_file, you can now edit this file manually."
|
||||
else
|
||||
log "Release notes written to $release_notes_file, you can now edit this file manually or via your editor."
|
||||
fi
|
||||
echo -e "${release_notes}" >"${release_notes_file}"
|
||||
}
|
||||
log "Writing release notes to ${release_notes_file}"
|
||||
if [[ -f ${release_notes_file} ]]; then
|
||||
log
|
||||
while [[ ! ${overwrite:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Release notes already exists, overwrite? (y/n) " -n 1 -r overwrite
|
||||
log
|
||||
done
|
||||
log
|
||||
if [[ ${overwrite} =~ ^[Yy]$ ]]; then
|
||||
write_release_notes
|
||||
else
|
||||
log "Release notes not overwritten, using existing release notes."
|
||||
release_notes="$(<"$release_notes_file")"
|
||||
fi
|
||||
else
|
||||
write_release_notes
|
||||
fi
|
||||
log
|
||||
|
||||
if [[ -z ${editor} ]]; then
|
||||
log "No editor found, please set the \$EDITOR environment variable for edit prompt."
|
||||
else
|
||||
while [[ ! ${edit:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Edit release notes in \"${editor}\"? (y/n) " -n 1 -r edit
|
||||
log
|
||||
done
|
||||
if [[ ${edit} =~ ^[Yy]$ ]]; then
|
||||
"${editor}" "${release_notes_file}"
|
||||
release_notes2="$(<"$release_notes_file")"
|
||||
if [[ "${release_notes}" != "${release_notes2}" ]]; then
|
||||
log "Release notes have been updated!"
|
||||
release_notes="${release_notes2}"
|
||||
else
|
||||
log "No changes detected..."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
log
|
||||
|
||||
while [[ ! ${preview:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Preview release notes? (y/n) " -n 1 -r preview
|
||||
log
|
||||
done
|
||||
if [[ ${preview} =~ ^[Yy]$ ]]; then
|
||||
log
|
||||
echo -e "$release_notes\n"
|
||||
fi
|
||||
|
||||
read -p "Create release? (y/n) " -n 1 -r create
|
||||
log
|
||||
if ! [[ $create =~ ^[Yy]$ ]]; then
|
||||
|
||||
while [[ ! ${create:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Create, build and publish release? (y/n) " -n 1 -r create
|
||||
log
|
||||
done
|
||||
if ! [[ ${create} =~ ^[Yy]$ ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log
|
||||
|
||||
# Run without dry-run to actually create the tag, note we don't update the
|
||||
# new_version variable here to ensure we're pushing what we showed before.
|
||||
maybedryrun "$dry_run" execrelative ./release/tag_version.sh "${tag_version_args[@]}" >/dev/null
|
||||
maybedryrun "$dry_run" git push -u origin "$release_branch"
|
||||
maybedryrun "$dry_run" git push --tags -u origin "$new_version"
|
||||
|
||||
log
|
||||
log "Release tags for ${new_version} created successfully and pushed to ${remote}!"
|
||||
|
||||
log
|
||||
# Write to a tmp file for ease of debugging.
|
||||
release_json_file=$(mktemp -t coder-release.json)
|
||||
log "Writing release JSON to ${release_json_file}"
|
||||
jq -n \
|
||||
--argjson dry_run "${dry_run}" \
|
||||
--arg release_channel "${channel}" \
|
||||
--arg release_notes "${release_notes}" \
|
||||
'{dry_run: ($dry_run > 0) | tostring, release_channel: $release_channel, release_notes: $release_notes}' \
|
||||
>"${release_json_file}"
|
||||
|
||||
log "Running release workflow..."
|
||||
maybedryrun "${dry_run}" cat "${release_json_file}" |
|
||||
maybedryrun "${dry_run}" gh workflow run release.yaml --json --ref "${new_version}"
|
||||
|
||||
log
|
||||
log "Release workflow started successfully!"
|
||||
|
||||
if ((dry_run)); then
|
||||
# We can't watch the release.yaml workflow if we're in dry-run mode.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log
|
||||
read -p "Watch release? (y/n) " -n 1 -r watch
|
||||
log
|
||||
if ! [[ $watch =~ ^[Yy]$ ]]; then
|
||||
while [[ ! ${watch:-} =~ ^[YyNn]$ ]]; do
|
||||
read -p "Watch release? (y/n) " -n 1 -r watch
|
||||
log
|
||||
done
|
||||
if ! [[ ${watch} =~ ^[Yy]$ ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log 'Waiting for job to become "in_progress"...'
|
||||
|
||||
# Wait at most 3 minutes (3*60)/3 = 60 for the job to start.
|
||||
# Wait at most 10 minutes (60*10/60) for the job to start.
|
||||
for _ in $(seq 1 60); do
|
||||
output="$(
|
||||
# Output:
|
||||
|
@ -181,7 +339,7 @@ for _ in $(seq 1 60); do
|
|||
)"
|
||||
mapfile -t run <<<"$output"
|
||||
if [[ ${run[1]} != "in_progress" ]]; then
|
||||
sleep 3
|
||||
sleep 10
|
||||
continue
|
||||
fi
|
||||
gh run watch --exit-status "${run[0]}"
|
||||
|
|
|
@ -19,26 +19,29 @@ source "$(dirname "$(dirname "${BASH_SOURCE[0]}")")/lib.sh"
|
|||
from_ref=${1:-}
|
||||
to_ref=${2:-}
|
||||
|
||||
if [[ -z $from_ref ]]; then
|
||||
if [[ -z ${from_ref} ]]; then
|
||||
error "No from_ref specified"
|
||||
fi
|
||||
if [[ -z $to_ref ]]; then
|
||||
if [[ -z ${to_ref} ]]; then
|
||||
error "No to_ref specified"
|
||||
fi
|
||||
|
||||
range="$from_ref..$to_ref"
|
||||
range="${from_ref}..${to_ref}"
|
||||
|
||||
# Check dependencies.
|
||||
dependencies gh
|
||||
|
||||
COMMIT_METADATA_BREAKING=0
|
||||
declare -A COMMIT_METADATA_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
declare -a COMMIT_METADATA_COMMITS
|
||||
declare -A COMMIT_METADATA_TITLE COMMIT_METADATA_HUMAN_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
|
||||
# This environment variable can be set to 1 to ignore missing commit metadata,
|
||||
# useful for dry-runs.
|
||||
ignore_missing_metadata=${CODER_IGNORE_MISSING_COMMIT_METADATA:-0}
|
||||
|
||||
main() {
|
||||
log "Checking commit metadata for changes between ${from_ref} and ${to_ref}..."
|
||||
|
||||
# Match a commit prefix pattern, e.g. feat: or feat(site):.
|
||||
prefix_pattern="^([a-z]+)(\([^)]+\))?:"
|
||||
|
||||
|
@ -55,14 +58,93 @@ main() {
|
|||
security_label=security
|
||||
security_category=security
|
||||
|
||||
# Get abbreviated and full commit hashes and titles for each commit.
|
||||
git_log_out="$(git log --no-merges --pretty=format:"%h %H %s" "$range")"
|
||||
mapfile -t commits <<<"$git_log_out"
|
||||
# Order is important here, first partial match wins.
|
||||
declare -A humanized_areas=(
|
||||
["agent/agentssh"]="Agent SSH"
|
||||
["coderd/database"]="Database"
|
||||
["enterprise/audit"]="Auditing"
|
||||
["enterprise/cli"]="CLI"
|
||||
["enterprise/coderd"]="Server"
|
||||
["enterprise/dbcrypt"]="Database"
|
||||
["enterprise/derpmesh"]="Networking"
|
||||
["enterprise/provisionerd"]="Provisioner"
|
||||
["enterprise/tailnet"]="Networking"
|
||||
["enterprise/wsproxy"]="Workspace Proxy"
|
||||
[agent]="Agent"
|
||||
[cli]="CLI"
|
||||
[coderd]="Server"
|
||||
[codersdk]="SDK"
|
||||
[docs]="Documentation"
|
||||
[enterprise]="Enterprise"
|
||||
[examples]="Examples"
|
||||
[helm]="Helm"
|
||||
[install.sh]="Installer"
|
||||
[provisionersdk]="SDK"
|
||||
[provisionerd]="Provisioner"
|
||||
[provisioner]="Provisioner"
|
||||
[pty]="CLI"
|
||||
[scaletest]="Scale Testing"
|
||||
[site]="Dashboard"
|
||||
[support]="Support"
|
||||
[tailnet]="Networking"
|
||||
)
|
||||
|
||||
# If this is a tag, use rev-list to find the commit it points to.
|
||||
from_commit=$(git rev-list -n 1 "$from_ref")
|
||||
# Get the committer date of the commit so that we can list PRs merged.
|
||||
from_commit_date=$(git show --no-patch --date=short --format=%cd "$from_commit")
|
||||
# Get hashes for all cherry-picked commits between the selected ref
|
||||
# and main. These are sorted by commit title so that we can group
|
||||
# two cherry-picks together.
|
||||
declare -A cherry_pick_commits
|
||||
git_cherry_out=$(
|
||||
{
|
||||
git log --no-merges --cherry-mark --pretty=format:"%m %H %s" "${to_ref}...origin/main"
|
||||
git log --no-merges --cherry-mark --pretty=format:"%m %H %s" "${from_ref}...origin/main"
|
||||
} | { grep '^=' || true; } | sort -u | sort -k3
|
||||
)
|
||||
if [[ -n ${git_cherry_out} ]]; then
|
||||
mapfile -t cherry_picks <<<"${git_cherry_out}"
|
||||
# Iterate over the array in groups of two
|
||||
for ((i = 0; i < ${#cherry_picks[@]}; i += 2)); do
|
||||
mapfile -d ' ' -t parts1 <<<"${cherry_picks[i]}"
|
||||
mapfile -d ' ' -t parts2 <<<"${cherry_picks[i + 1]}"
|
||||
commit1=${parts1[1]}
|
||||
title1=${parts1[*]:2}
|
||||
commit2=${parts2[1]}
|
||||
title2=${parts2[*]:2}
|
||||
|
||||
if [[ ${title1} != "${title2}" ]]; then
|
||||
error "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}"
|
||||
fi
|
||||
|
||||
cherry_pick_commits[${commit1}]=${commit2}
|
||||
cherry_pick_commits[${commit2}]=${commit1}
|
||||
done
|
||||
fi
|
||||
|
||||
# Get abbreviated and full commit hashes and titles for each commit.
|
||||
git_log_out="$(git log --no-merges --left-right --pretty=format:"%m %h %H %s" "${range}")"
|
||||
if [[ -z ${git_log_out} ]]; then
|
||||
error "No commits found in range ${range}"
|
||||
fi
|
||||
mapfile -t commits <<<"${git_log_out}"
|
||||
|
||||
# Get the lowest committer date of the commits so that we can fetch
|
||||
# the PRs that were merged.
|
||||
lookback_date=$(
|
||||
{
|
||||
# Check all included commits.
|
||||
for commit in "${commits[@]}"; do
|
||||
mapfile -d ' ' -t parts <<<"${commit}"
|
||||
sha_long=${parts[2]}
|
||||
git show --no-patch --date=short --format='%cd' "${sha_long}"
|
||||
done
|
||||
# Include cherry-picks and their original commits (the
|
||||
# original commit may be older than the cherry pick).
|
||||
for cherry_pick in "${cherry_picks[@]}"; do
|
||||
mapfile -d ' ' -t parts <<<"${cherry_pick}"
|
||||
sha_long=${parts[1]}
|
||||
git show --no-patch --date=short --format='%cd' "${sha_long}"
|
||||
done
|
||||
} | sort -t- -n | head -n 1
|
||||
)
|
||||
|
||||
# Get the labels for all PRs merged since the last release, this is
|
||||
# inexact based on date, so a few PRs part of the previous release may
|
||||
|
@ -78,84 +160,135 @@ main() {
|
|||
--base main \
|
||||
--state merged \
|
||||
--limit 10000 \
|
||||
--search "merged:>=$from_commit_date" \
|
||||
--search "merged:>=${lookback_date}" \
|
||||
--json mergeCommit,labels,author \
|
||||
--jq '.[] | "\( .mergeCommit.oid ) author:\( .author.login ) labels:\(["label:\( .labels[].name )"] | join(" "))"'
|
||||
)"
|
||||
|
||||
declare -A authors labels
|
||||
if [[ -n $pr_list_out ]]; then
|
||||
mapfile -t pr_metadata_raw <<<"$pr_list_out"
|
||||
if [[ -n ${pr_list_out} ]]; then
|
||||
mapfile -t pr_metadata_raw <<<"${pr_list_out}"
|
||||
|
||||
for entry in "${pr_metadata_raw[@]}"; do
|
||||
commit_sha_long=${entry%% *}
|
||||
commit_author=${entry#* author:}
|
||||
commit_author=${commit_author%% *}
|
||||
authors[$commit_sha_long]=$commit_author
|
||||
authors[${commit_sha_long}]=${commit_author}
|
||||
all_labels=${entry#* labels:}
|
||||
labels[$commit_sha_long]=$all_labels
|
||||
labels[${commit_sha_long}]=${all_labels}
|
||||
done
|
||||
fi
|
||||
|
||||
for commit in "${commits[@]}"; do
|
||||
mapfile -d ' ' -t parts <<<"$commit"
|
||||
commit_sha_short=${parts[0]}
|
||||
commit_sha_long=${parts[1]}
|
||||
commit_prefix=${parts[2]}
|
||||
mapfile -d ' ' -t parts <<<"${commit}"
|
||||
left_right=${parts[0]} # From `git log --left-right`, see `man git-log` for details.
|
||||
commit_sha_short=${parts[1]}
|
||||
commit_sha_long=${parts[2]}
|
||||
commit_prefix=${parts[3]}
|
||||
title=${parts[*]:3}
|
||||
title=${title%$'\n'}
|
||||
title_no_prefix=${parts[*]:4}
|
||||
title_no_prefix=${title_no_prefix%$'\n'}
|
||||
|
||||
# For COMMIT_METADATA_COMMITS in case of cherry-pick override.
|
||||
commit_sha_long_orig=${commit_sha_long}
|
||||
|
||||
# Check if this is a potential cherry-pick.
|
||||
if [[ -v cherry_pick_commits[${commit_sha_long}] ]]; then
|
||||
# Is this the cherry-picked or the original commit?
|
||||
if [[ ! -v authors[${commit_sha_long}] ]] || [[ ! -v labels[${commit_sha_long}] ]]; then
|
||||
log "Cherry-picked commit ${commit_sha_long}, checking original commit ${cherry_pick_commits[${commit_sha_long}]}"
|
||||
# Use the original commit's metadata from GitHub.
|
||||
commit_sha_long=${cherry_pick_commits[${commit_sha_long}]}
|
||||
else
|
||||
# Skip the cherry-picked commit, we only need the original.
|
||||
log "Skipping commit ${commit_sha_long} cherry-picked into ${from_ref} as ${cherry_pick_commits[${commit_sha_long}]} (${title})"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${left_right} == "<" ]]; then
|
||||
# Skip commits that are already in main.
|
||||
log "Skipping commit ${commit_sha_short} from other branch (${commit_sha_long} ${title})"
|
||||
continue
|
||||
fi
|
||||
|
||||
COMMIT_METADATA_COMMITS+=("${commit_sha_long_orig}")
|
||||
|
||||
# Safety-check, guarantee all commits had their metadata fetched.
|
||||
if [[ ! -v authors[$commit_sha_long] ]] || [[ ! -v labels[$commit_sha_long] ]]; then
|
||||
if [[ $ignore_missing_metadata != 1 ]]; then
|
||||
error "Metadata missing for commit $commit_sha_short"
|
||||
if [[ ! -v authors[${commit_sha_long}] ]] || [[ ! -v labels[${commit_sha_long}] ]]; then
|
||||
if [[ ${ignore_missing_metadata} != 1 ]]; then
|
||||
error "Metadata missing for commit ${commit_sha_short} (${commit_sha_long})"
|
||||
else
|
||||
log "WARNING: Metadata missing for commit $commit_sha_short"
|
||||
log "WARNING: Metadata missing for commit ${commit_sha_short} (${commit_sha_long})"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Store the commit title for later use.
|
||||
title=${parts[*]:2}
|
||||
title=${title%$'\n'}
|
||||
COMMIT_METADATA_TITLE[$commit_sha_short]=$title
|
||||
if [[ -v authors[$commit_sha_long] ]]; then
|
||||
COMMIT_METADATA_AUTHORS[$commit_sha_short]="@${authors[$commit_sha_long]}"
|
||||
COMMIT_METADATA_TITLE[${commit_sha_short}]=${title}
|
||||
if [[ -v authors[${commit_sha_long}] ]]; then
|
||||
COMMIT_METADATA_AUTHORS[${commit_sha_short}]="@${authors[${commit_sha_long}]}"
|
||||
fi
|
||||
|
||||
# Create humanized titles where possible, examples:
|
||||
#
|
||||
# "feat: add foo" -> "Add foo".
|
||||
# "feat(site): add bar" -> "Dashboard: Add bar".
|
||||
COMMIT_METADATA_HUMAN_TITLE[${commit_sha_short}]=${title}
|
||||
if [[ ${commit_prefix} =~ ${prefix_pattern} ]]; then
|
||||
sub=${BASH_REMATCH[2]}
|
||||
if [[ -z ${sub} ]]; then
|
||||
# No parenthesis found, simply drop the prefix.
|
||||
COMMIT_METADATA_HUMAN_TITLE[${commit_sha_short}]="${title_no_prefix^}"
|
||||
else
|
||||
# Drop the prefix and replace it with a humanized area,
|
||||
# leave as-is for unknown areas.
|
||||
sub=${sub#(}
|
||||
for area in "${!humanized_areas[@]}"; do
|
||||
if [[ ${sub} = "${area}"* ]]; then
|
||||
COMMIT_METADATA_HUMAN_TITLE[${commit_sha_short}]="${humanized_areas[${area}]}: ${title_no_prefix^}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
# First, check the title for breaking changes. This avoids doing a
|
||||
# GH API request if there's a match.
|
||||
if [[ $commit_prefix =~ $breaking_title ]] || [[ ${labels[$commit_sha_long]:-} = *"label:$breaking_label"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[$commit_sha_short]=$breaking_category
|
||||
if [[ ${commit_prefix} =~ ${breaking_title} ]] || [[ ${labels[${commit_sha_long}]:-} = *"label:${breaking_label}"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[${commit_sha_short}]=${breaking_category}
|
||||
COMMIT_METADATA_BREAKING=1
|
||||
continue
|
||||
elif [[ ${labels[$commit_sha_long]:-} = *"label:$security_label"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[$commit_sha_short]=$security_category
|
||||
elif [[ ${labels[${commit_sha_long}]:-} = *"label:${security_label}"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[${commit_sha_short}]=${security_category}
|
||||
continue
|
||||
elif [[ ${labels[$commit_sha_long]:-} = *"label:$experimental_label"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[$commit_sha_short]=$experimental_category
|
||||
elif [[ ${labels[${commit_sha_long}]:-} = *"label:${experimental_label}"* ]]; then
|
||||
COMMIT_METADATA_CATEGORY[${commit_sha_short}]=${experimental_category}
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ $commit_prefix =~ $prefix_pattern ]]; then
|
||||
if [[ ${commit_prefix} =~ ${prefix_pattern} ]]; then
|
||||
commit_prefix=${BASH_REMATCH[1]}
|
||||
fi
|
||||
case $commit_prefix in
|
||||
case ${commit_prefix} in
|
||||
# From: https://github.com/commitizen/conventional-commit-types
|
||||
feat | fix | docs | style | refactor | perf | test | build | ci | chore | revert)
|
||||
COMMIT_METADATA_CATEGORY[$commit_sha_short]=$commit_prefix
|
||||
COMMIT_METADATA_CATEGORY[${commit_sha_short}]=${commit_prefix}
|
||||
;;
|
||||
*)
|
||||
COMMIT_METADATA_CATEGORY[$commit_sha_short]=other
|
||||
COMMIT_METADATA_CATEGORY[${commit_sha_short}]=other
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
declare_print_commit_metadata() {
|
||||
declare -p COMMIT_METADATA_BREAKING COMMIT_METADATA_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
declare -p COMMIT_METADATA_COMMITS COMMIT_METADATA_BREAKING COMMIT_METADATA_TITLE COMMIT_METADATA_HUMAN_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
}
|
||||
|
||||
export_commit_metadata() {
|
||||
_COMMIT_METADATA_CACHE="${range}:$(declare_print_commit_metadata)"
|
||||
export _COMMIT_METADATA_CACHE COMMIT_METADATA_BREAKING COMMIT_METADATA_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
export _COMMIT_METADATA_CACHE COMMIT_METADATA_COMMITS COMMIT_METADATA_BREAKING COMMIT_METADATA_TITLE COMMIT_METADATA_HUMAN_TITLE COMMIT_METADATA_CATEGORY COMMIT_METADATA_AUTHORS
|
||||
}
|
||||
|
||||
# _COMMIT_METADATA_CACHE is used to cache the results of this script in
|
||||
|
@ -163,7 +296,7 @@ export_commit_metadata() {
|
|||
if [[ ${_COMMIT_METADATA_CACHE:-} == "${range}:"* ]]; then
|
||||
eval "${_COMMIT_METADATA_CACHE#*:}"
|
||||
else
|
||||
if [[ $ignore_missing_metadata == 1 ]]; then
|
||||
if [[ ${ignore_missing_metadata} == 1 ]]; then
|
||||
log "WARNING: Ignoring missing commit metadata, breaking changes may be missed."
|
||||
fi
|
||||
main
|
||||
|
|
|
@ -18,16 +18,12 @@ source "$(dirname "$(dirname "${BASH_SOURCE[0]}")")/lib.sh"
|
|||
old_version=
|
||||
new_version=
|
||||
ref=
|
||||
check_for_changelog=0
|
||||
mainline=1
|
||||
|
||||
args="$(getopt -o '' -l check-for-changelog,old-version:,new-version:,ref: -- "$@")"
|
||||
eval set -- "$args"
|
||||
args="$(getopt -o '' -l old-version:,new-version:,ref:,mainline,stable -- "$@")"
|
||||
eval set -- "${args}"
|
||||
while true; do
|
||||
case "$1" in
|
||||
--check-for-changelog)
|
||||
check_for_changelog=1
|
||||
shift
|
||||
;;
|
||||
--old-version)
|
||||
old_version="$2"
|
||||
shift 2
|
||||
|
@ -40,6 +36,14 @@ while true; do
|
|||
ref="$2"
|
||||
shift 2
|
||||
;;
|
||||
--mainline)
|
||||
mainline=1
|
||||
shift
|
||||
;;
|
||||
--stable)
|
||||
mainline=0
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
|
@ -53,34 +57,31 @@ done
|
|||
# Check dependencies.
|
||||
dependencies gh sort
|
||||
|
||||
if [[ -z $old_version ]]; then
|
||||
if [[ -z ${old_version} ]]; then
|
||||
error "No old version specified"
|
||||
fi
|
||||
if [[ -z $new_version ]]; then
|
||||
if [[ -z ${new_version} ]]; then
|
||||
error "No new version specified"
|
||||
fi
|
||||
if [[ $new_version != v* ]]; then
|
||||
if [[ ${new_version} != v* ]]; then
|
||||
error "New version must start with a v"
|
||||
fi
|
||||
if [[ -z $ref ]]; then
|
||||
if [[ -z ${ref} ]]; then
|
||||
error "No ref specified"
|
||||
fi
|
||||
|
||||
# Use a manual changelog, if present
|
||||
changelog_path="$(git rev-parse --show-toplevel)/docs/changelogs/$new_version.md"
|
||||
if [ "$check_for_changelog" -eq 1 ]; then
|
||||
if [ -f "$changelog_path" ]; then
|
||||
cat "$changelog_path"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# shellcheck source=scripts/release/check_commit_metadata.sh
|
||||
source "$SCRIPT_DIR/check_commit_metadata.sh" "$old_version" "$ref"
|
||||
source "${SCRIPT_DIR}/check_commit_metadata.sh" "${old_version}" "${ref}"
|
||||
|
||||
# Sort commits by title prefix, then by date, only return sha at the end.
|
||||
git_log_out="$(git log --no-merges --pretty=format:"%ct %h %s" "$old_version..$ref" | sort -k3,3 -k1,1n | cut -d' ' -f2)"
|
||||
mapfile -t commits <<<"$git_log_out"
|
||||
git_show_out="$(
|
||||
{
|
||||
echo "${COMMIT_METADATA_COMMITS[@]}" |
|
||||
tr ' ' '\n' |
|
||||
xargs git show --no-patch --pretty=format:"%ct %h %s"
|
||||
} | sort -k3,3 -k1,1n | cut -d' ' -f2
|
||||
)"
|
||||
mapfile -t commits <<<"${git_show_out}"
|
||||
|
||||
# From: https://github.com/commitizen/conventional-commit-types
|
||||
# NOTE(mafredri): These need to be supported in check_commit_metadata.sh as well.
|
||||
|
@ -121,56 +122,80 @@ declare -A section_titles=(
|
|||
# Verify that all items in section_order exist as keys in section_titles and
|
||||
# vice-versa.
|
||||
for cat in "${section_order[@]}"; do
|
||||
if [[ " ${!section_titles[*]} " != *" $cat "* ]]; then
|
||||
error "BUG: category $cat does not exist in section_titles"
|
||||
if [[ " ${!section_titles[*]} " != *" ${cat} "* ]]; then
|
||||
error "BUG: category ${cat} does not exist in section_titles"
|
||||
fi
|
||||
done
|
||||
for cat in "${!section_titles[@]}"; do
|
||||
if [[ " ${section_order[*]} " != *" $cat "* ]]; then
|
||||
error "BUG: Category $cat does not exist in section_order"
|
||||
if [[ " ${section_order[*]} " != *" ${cat} "* ]]; then
|
||||
error "BUG: Category ${cat} does not exist in section_order"
|
||||
fi
|
||||
done
|
||||
|
||||
for commit in "${commits[@]}"; do
|
||||
line="- $commit ${COMMIT_METADATA_TITLE[$commit]}"
|
||||
if [[ -v COMMIT_METADATA_AUTHORS[$commit] ]]; then
|
||||
line+=" (${COMMIT_METADATA_AUTHORS[$commit]})"
|
||||
title=${COMMIT_METADATA_TITLE[${commit}]}
|
||||
if [[ -v COMMIT_METADATA_HUMAN_TITLE[${commit}] ]]; then
|
||||
title=${COMMIT_METADATA_HUMAN_TITLE[${commit}]}
|
||||
fi
|
||||
|
||||
if [[ ${title} =~ \(#[0-9]*\)$ ]]; then
|
||||
title="${title%)}, ${commit})"
|
||||
else
|
||||
title="${title} (${commit})"
|
||||
fi
|
||||
line="- ${title}"
|
||||
line=${line//) (/, )}
|
||||
if [[ -v COMMIT_METADATA_AUTHORS[${commit}] ]]; then
|
||||
line+=" (${COMMIT_METADATA_AUTHORS[${commit}]})"
|
||||
fi
|
||||
|
||||
# Default to "other" category.
|
||||
cat=other
|
||||
for c in "${!section_titles[@]}"; do
|
||||
if [[ $c == "${COMMIT_METADATA_CATEGORY[$commit]}" ]]; then
|
||||
cat=$c
|
||||
if [[ ${c} == "${COMMIT_METADATA_CATEGORY[${commit}]}" ]]; then
|
||||
cat=${c}
|
||||
break
|
||||
fi
|
||||
done
|
||||
declare "$cat"_changelog+="$line"$'\n'
|
||||
declare "${cat}"_changelog+="${line}"$'\n'
|
||||
done
|
||||
|
||||
changelog="$(
|
||||
for cat in "${section_order[@]}"; do
|
||||
changes="$(eval "echo -e \"\${${cat}_changelog:-}\"")"
|
||||
if ((${#changes} > 0)); then
|
||||
echo -e "\n### ${section_titles["$cat"]}\n"
|
||||
if [[ $cat == experimental ]]; then
|
||||
echo -e "\n### ${section_titles["${cat}"]}\n"
|
||||
if [[ ${cat} == experimental ]]; then
|
||||
echo -e "These changes are feature-flagged and can be enabled with the \`--experiments\` server flag. They may change or be removed in future releases.\n"
|
||||
fi
|
||||
echo -e "$changes"
|
||||
echo -e "${changes}"
|
||||
fi
|
||||
done
|
||||
)"
|
||||
|
||||
image_tag="$(execrelative ../image_tag.sh --version "$new_version")"
|
||||
image_tag="$(execrelative ../image_tag.sh --version "${new_version}")"
|
||||
|
||||
echo -e "## Changelog
|
||||
$changelog
|
||||
blurb=
|
||||
stable_since=
|
||||
if ((mainline)); then
|
||||
blurb="
|
||||
> [!NOTE]
|
||||
> This is a mainline Coder release. We advise enterprise customers without a staging environment to install our [latest stable release](https://github.com/coder/coder/releases/latest) while we refine this version. Learn more about our [Release Schedule](https://coder.com/docs/v2/latest/install/releases).
|
||||
"
|
||||
else
|
||||
# Date format: April 23, 2024
|
||||
d=$(date +'%B %d, %Y')
|
||||
stable_since="> ## Stable (since ${d})"$'\n\n'
|
||||
fi
|
||||
|
||||
Compare: [\`$old_version...$new_version\`](https://github.com/coder/coder/compare/$old_version...$new_version)
|
||||
echo -e "${stable_since}## Changelog
|
||||
${blurb}${changelog}
|
||||
|
||||
Compare: [\`${old_version}...${new_version}\`](https://github.com/coder/coder/compare/${old_version}...${new_version})
|
||||
|
||||
## Container image
|
||||
|
||||
- \`docker pull $image_tag\`
|
||||
- \`docker pull ${image_tag}\`
|
||||
|
||||
## Install/upgrade
|
||||
|
||||
|
|
|
@ -0,0 +1,223 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-github/v61/github"
|
||||
"golang.org/x/mod/semver"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"cdr.dev/slog"
|
||||
"cdr.dev/slog/sloggers/sloghuman"
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
|
||||
const (
|
||||
owner = "coder"
|
||||
repo = "coder"
|
||||
)
|
||||
|
||||
func main() {
|
||||
logger := slog.Make(sloghuman.Sink(os.Stderr)).Leveled(slog.LevelDebug)
|
||||
|
||||
var ghToken string
|
||||
var dryRun bool
|
||||
|
||||
cmd := serpent.Command{
|
||||
Use: "release <subcommand>",
|
||||
Short: "Prepare, create and publish releases.",
|
||||
Options: serpent.OptionSet{
|
||||
{
|
||||
Flag: "gh-token",
|
||||
Description: "GitHub personal access token.",
|
||||
Env: "GH_TOKEN",
|
||||
Value: serpent.StringOf(&ghToken),
|
||||
},
|
||||
{
|
||||
Flag: "dry-run",
|
||||
FlagShorthand: "n",
|
||||
Description: "Do not make any changes, only print what would be done.",
|
||||
Value: serpent.BoolOf(&dryRun),
|
||||
},
|
||||
},
|
||||
Children: []*serpent.Command{
|
||||
{
|
||||
Use: "promote <version>",
|
||||
Short: "Promote version to stable.",
|
||||
Handler: func(inv *serpent.Invocation) error {
|
||||
ctx := inv.Context()
|
||||
if len(inv.Args) == 0 {
|
||||
return xerrors.New("version argument missing")
|
||||
}
|
||||
if !dryRun && ghToken == "" {
|
||||
return xerrors.New("GitHub personal access token is required, use --gh-token or GH_TOKEN")
|
||||
}
|
||||
|
||||
err := promoteVersionToStable(ctx, inv, logger, ghToken, dryRun, inv.Args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := cmd.Invoke().WithOS().Run()
|
||||
if err != nil {
|
||||
if errors.Is(err, cliui.Canceled) {
|
||||
os.Exit(1)
|
||||
}
|
||||
logger.Error(context.Background(), "release command failed", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:revive // Allow dryRun control flag.
|
||||
func promoteVersionToStable(ctx context.Context, inv *serpent.Invocation, logger slog.Logger, ghToken string, dryRun bool, version string) error {
|
||||
client := github.NewClient(nil)
|
||||
if ghToken != "" {
|
||||
client = client.WithAuthToken(ghToken)
|
||||
}
|
||||
|
||||
logger = logger.With(slog.F("dry_run", dryRun), slog.F("version", version))
|
||||
|
||||
logger.Info(ctx, "checking current stable release")
|
||||
|
||||
// Check if the version is already the latest stable release.
|
||||
currentStable, _, err := client.Repositories.GetLatestRelease(ctx, "coder", "coder")
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get latest release failed: %w", err)
|
||||
}
|
||||
|
||||
logger = logger.With(slog.F("stable_version", currentStable.GetTagName()))
|
||||
logger.Info(ctx, "found current stable release")
|
||||
|
||||
if currentStable.GetTagName() == version {
|
||||
return xerrors.Errorf("version %q is already the latest stable release", version)
|
||||
}
|
||||
|
||||
// Ensure the version is a valid release.
|
||||
perPage := 20
|
||||
latestReleases, _, err := client.Repositories.ListReleases(ctx, owner, repo, &github.ListOptions{
|
||||
Page: 0,
|
||||
PerPage: perPage,
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("list releases failed: %w", err)
|
||||
}
|
||||
|
||||
var releaseVersions []string
|
||||
var newStable *github.RepositoryRelease
|
||||
for _, r := range latestReleases {
|
||||
releaseVersions = append(releaseVersions, r.GetTagName())
|
||||
if r.GetTagName() == version {
|
||||
newStable = r
|
||||
}
|
||||
}
|
||||
semver.Sort(releaseVersions)
|
||||
slices.Reverse(releaseVersions)
|
||||
|
||||
switch {
|
||||
case len(releaseVersions) == 0:
|
||||
return xerrors.Errorf("no releases found")
|
||||
case newStable == nil:
|
||||
return xerrors.Errorf("version %q is not found in the last %d releases", version, perPage)
|
||||
}
|
||||
|
||||
logger = logger.With(slog.F("mainline_version", releaseVersions[0]))
|
||||
|
||||
if version != releaseVersions[0] {
|
||||
logger.Warn(ctx, "selected version is not the latest mainline release")
|
||||
}
|
||||
|
||||
if reply, err := cliui.Prompt(inv, cliui.PromptOptions{
|
||||
Text: "Are you sure you want to promote this version to stable?",
|
||||
Default: "no",
|
||||
IsConfirm: true,
|
||||
}); err != nil {
|
||||
if reply == cliui.ConfirmNo {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Info(ctx, "promoting selected version to stable")
|
||||
|
||||
// Update the release to latest.
|
||||
updatedNewStable := cloneRelease(newStable)
|
||||
|
||||
updatedBody := removeMainlineBlurb(newStable.GetBody())
|
||||
updatedBody = addStableSince(time.Now().UTC(), updatedBody)
|
||||
updatedNewStable.Body = github.String(updatedBody)
|
||||
updatedNewStable.Prerelease = github.Bool(false)
|
||||
updatedNewStable.Draft = github.Bool(false)
|
||||
if !dryRun {
|
||||
_, _, err = client.Repositories.EditRelease(ctx, owner, repo, newStable.GetID(), newStable)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("edit release failed: %w", err)
|
||||
}
|
||||
logger.Info(ctx, "selected version promoted to stable", "url", newStable.GetHTMLURL())
|
||||
} else {
|
||||
logger.Info(ctx, "dry-run: release not updated", "uncommitted_changes", cmp.Diff(newStable, updatedNewStable))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func cloneRelease(r *github.RepositoryRelease) *github.RepositoryRelease {
|
||||
rr := *r
|
||||
return &rr
|
||||
}
|
||||
|
||||
// addStableSince adds a stable since note to the release body.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// > ## Stable (since April 23, 2024)
|
||||
func addStableSince(date time.Time, body string) string {
|
||||
return fmt.Sprintf("> ## Stable (since %s)\n\n", date.Format("January 02, 2006")) + body
|
||||
}
|
||||
|
||||
// removeMainlineBlurb removes the mainline blurb from the release body.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// > [!NOTE]
|
||||
// > This is a mainline Coder release. We advise enterprise customers without a staging environment to install our [latest stable release](https://github.com/coder/coder/releases/latest) while we refine this version. Learn more about our [Release Schedule](https://coder.com/docs/v2/latest/install/releases).
|
||||
func removeMainlineBlurb(body string) string {
|
||||
lines := strings.Split(body, "\n")
|
||||
|
||||
var newBody, clip []string
|
||||
var found bool
|
||||
for _, line := range lines {
|
||||
if strings.HasPrefix(strings.TrimSpace(line), "> [!NOTE]") {
|
||||
clip = append(clip, line)
|
||||
found = true
|
||||
continue
|
||||
}
|
||||
if found {
|
||||
clip = append(clip, line)
|
||||
found = strings.HasPrefix(strings.TrimSpace(line), ">")
|
||||
continue
|
||||
}
|
||||
if !found && len(clip) > 0 {
|
||||
if !strings.Contains(strings.ToLower(strings.Join(clip, "\n")), "this is a mainline coder release") {
|
||||
newBody = append(newBody, clip...) // This is some other note, restore it.
|
||||
}
|
||||
clip = nil
|
||||
}
|
||||
newBody = append(newBody, line)
|
||||
}
|
||||
|
||||
return strings.Join(newBody, "\n")
|
||||
}
|
|
@ -0,0 +1,136 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
func Test_removeMainlineBlurb(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
body string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "NoMainlineBlurb",
|
||||
body: `## Changelog
|
||||
|
||||
### Chores
|
||||
|
||||
- Add support for additional Azure Instance Identity RSA Certificates (#13028) (@kylecarbs)
|
||||
|
||||
Compare: [` + "`" + `v2.10.1...v2.10.2` + "`" + `](https://github.com/coder/coder/compare/v2.10.1...v2.10.2)
|
||||
|
||||
## Container image
|
||||
|
||||
- ` + "`" + `docker pull ghcr.io/coder/coder:v2.10.2` + "`" + `
|
||||
|
||||
## Install/upgrade
|
||||
|
||||
Refer to our docs to [install](https://coder.com/docs/v2/latest/install) or [upgrade](https://coder.com/docs/v2/latest/admin/upgrade) Coder, or use a release asset below.
|
||||
`,
|
||||
want: `## Changelog
|
||||
|
||||
### Chores
|
||||
|
||||
- Add support for additional Azure Instance Identity RSA Certificates (#13028) (@kylecarbs)
|
||||
|
||||
Compare: [` + "`" + `v2.10.1...v2.10.2` + "`" + `](https://github.com/coder/coder/compare/v2.10.1...v2.10.2)
|
||||
|
||||
## Container image
|
||||
|
||||
- ` + "`" + `docker pull ghcr.io/coder/coder:v2.10.2` + "`" + `
|
||||
|
||||
## Install/upgrade
|
||||
|
||||
Refer to our docs to [install](https://coder.com/docs/v2/latest/install) or [upgrade](https://coder.com/docs/v2/latest/admin/upgrade) Coder, or use a release asset below.
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "WithMainlineBlurb",
|
||||
body: `## Changelog
|
||||
|
||||
> [!NOTE]
|
||||
> This is a mainline Coder release. We advise enterprise customers without a staging environment to install our [latest stable release](https://github.com/coder/coder/releases/latest) while we refine this version. Learn more about our [Release Schedule](https://coder.com/docs/v2/latest/install/releases).
|
||||
|
||||
### Chores
|
||||
|
||||
- Add support for additional Azure Instance Identity RSA Certificates (#13028) (@kylecarbs)
|
||||
|
||||
Compare: [` + "`" + `v2.10.1...v2.10.2` + "`" + `](https://github.com/coder/coder/compare/v2.10.1...v2.10.2)
|
||||
|
||||
## Container image
|
||||
|
||||
- ` + "`" + `docker pull ghcr.io/coder/coder:v2.10.2` + "`" + `
|
||||
|
||||
## Install/upgrade
|
||||
|
||||
Refer to our docs to [install](https://coder.com/docs/v2/latest/install) or [upgrade](https://coder.com/docs/v2/latest/admin/upgrade) Coder, or use a release asset below.
|
||||
`,
|
||||
want: `## Changelog
|
||||
|
||||
### Chores
|
||||
|
||||
- Add support for additional Azure Instance Identity RSA Certificates (#13028) (@kylecarbs)
|
||||
|
||||
Compare: [` + "`" + `v2.10.1...v2.10.2` + "`" + `](https://github.com/coder/coder/compare/v2.10.1...v2.10.2)
|
||||
|
||||
## Container image
|
||||
|
||||
- ` + "`" + `docker pull ghcr.io/coder/coder:v2.10.2` + "`" + `
|
||||
|
||||
## Install/upgrade
|
||||
|
||||
Refer to our docs to [install](https://coder.com/docs/v2/latest/install) or [upgrade](https://coder.com/docs/v2/latest/admin/upgrade) Coder, or use a release asset below.
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "EntireQuotedBlurbIsRemoved",
|
||||
body: `## Changelog
|
||||
|
||||
> [!NOTE]
|
||||
> This is a mainline Coder release. We advise enterprise customers without a staging environment to install our [latest stable release](https://github.com/coder/coder/releases/latest) while we refine this version. Learn more about our [Release Schedule](https://coder.com/docs/v2/latest/install/releases).
|
||||
> This is an extended note.
|
||||
> This is another extended note.
|
||||
|
||||
### Best release yet!
|
||||
|
||||
Enjoy.
|
||||
`,
|
||||
want: `## Changelog
|
||||
|
||||
### Best release yet!
|
||||
|
||||
Enjoy.
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if diff := cmp.Diff(removeMainlineBlurb(tt.body), tt.want); diff != "" {
|
||||
t.Errorf("removeMainlineBlurb() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_addStableSince(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
date := time.Date(2024, time.April, 23, 0, 0, 0, 0, time.UTC)
|
||||
body := "## Changelog"
|
||||
|
||||
expected := "> ## Stable (since April 23, 2024)\n\n## Changelog"
|
||||
result := addStableSince(date, body)
|
||||
|
||||
if diff := cmp.Diff(expected, result); diff != "" {
|
||||
t.Errorf("addStableSince() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
}
|
|
@ -33,14 +33,19 @@ if [[ "${CI:-}" == "" ]]; then
|
|||
error "This script must be run in CI"
|
||||
fi
|
||||
|
||||
stable=0
|
||||
version=""
|
||||
release_notes_file=""
|
||||
dry_run=0
|
||||
|
||||
args="$(getopt -o "" -l version:,release-notes-file:,dry-run -- "$@")"
|
||||
args="$(getopt -o "" -l stable,version:,release-notes-file:,dry-run -- "$@")"
|
||||
eval set -- "$args"
|
||||
while true; do
|
||||
case "$1" in
|
||||
--stable)
|
||||
stable=1
|
||||
shift
|
||||
;;
|
||||
--version)
|
||||
version="$2"
|
||||
shift 2
|
||||
|
@ -169,10 +174,24 @@ popd
|
|||
log
|
||||
log
|
||||
|
||||
latest=false
|
||||
if [[ "$stable" == 1 ]]; then
|
||||
latest=true
|
||||
fi
|
||||
|
||||
target_commitish=main # This is the default.
|
||||
release_branch_refname=$(git branch --remotes --contains "${new_tag}" --format '%(refname)' '*/release/*')
|
||||
if [[ -n "${release_branch_refname}" ]]; then
|
||||
# refs/remotes/origin/release/2.9 -> release/2.9
|
||||
target_commitish="release/${release_branch_refname#*release/}"
|
||||
fi
|
||||
|
||||
# We pipe `true` into `gh` so that it never tries to be interactive.
|
||||
true |
|
||||
maybedryrun "$dry_run" gh release create \
|
||||
--latest="$latest" \
|
||||
--title "$new_tag" \
|
||||
--target "$target_commitish" \
|
||||
--notes-file "$release_notes_file" \
|
||||
"$new_tag" \
|
||||
"$temp_dir"/*
|
||||
|
|
|
@ -79,13 +79,9 @@ fi
|
|||
if [[ -z $old_version ]]; then
|
||||
old_version="$(git describe --abbrev=0 "$ref^1" --always)"
|
||||
fi
|
||||
cur_tag="$(git describe --abbrev=0 "$ref" --always)"
|
||||
if [[ $old_version != "$cur_tag" ]]; then
|
||||
error "A newer tag than \"$old_version\" already exists for \"$ref\" ($cur_tag), aborting."
|
||||
fi
|
||||
ref_name=${ref}
|
||||
ref=$(git rev-parse --short "$ref")
|
||||
|
||||
log "Checking commit metadata for changes since $old_version..."
|
||||
# shellcheck source=scripts/release/check_commit_metadata.sh
|
||||
source "$SCRIPT_DIR/check_commit_metadata.sh" "$old_version" "$ref"
|
||||
|
||||
|
@ -109,8 +105,23 @@ else
|
|||
fi
|
||||
|
||||
mapfile -d . -t version_parts <<<"${old_version#v}"
|
||||
release_branch_prefix="release/"
|
||||
release_ff=0
|
||||
case "$increment" in
|
||||
patch)
|
||||
release_branch="${release_branch_prefix}${version_parts[0]}.${version_parts[1]}"
|
||||
branch_contains_ref=$(git branch --remotes --contains "${ref}" --list "*/${release_branch}" --format='%(refname)')
|
||||
if [[ -z $branch_contains_ref ]]; then
|
||||
# Allow patch if we can fast-forward to ref, no need for dry-run here
|
||||
# since we're not checking out the branch and deleting it afterwards.
|
||||
git branch --no-track "${release_branch}-ff" "origin/${release_branch}"
|
||||
if ! git merge --ff-only --into-name "${release_branch}-ff" "${ref}" >/dev/null 2>&1; then
|
||||
git branch -D "${release_branch}-ff"
|
||||
error "Provided ref (${ref_name}) is not in the required release branch (${release_branch}) and cannot be fast-forwarded, unable to increment patch version. Please increment minor or major."
|
||||
fi
|
||||
release_ff=1
|
||||
git branch -D "${release_branch}-ff"
|
||||
fi
|
||||
version_parts[2]=$((version_parts[2] + 1))
|
||||
;;
|
||||
minor)
|
||||
|
@ -118,13 +129,7 @@ minor)
|
|||
version_parts[2]=0
|
||||
;;
|
||||
major)
|
||||
# Jump from v0.x to v2.x to avoid naming conflicts
|
||||
# with Coder v1 (https://coder.com/docs/v1)
|
||||
if [ "${version_parts[0]}" -eq 0 ]; then
|
||||
version_parts[0]=2
|
||||
else
|
||||
version_parts[0]=$((version_parts[0] + 1))
|
||||
fi
|
||||
version_parts[0]=$((version_parts[0] + 1))
|
||||
version_parts[1]=0
|
||||
version_parts[2]=0
|
||||
;;
|
||||
|
@ -133,10 +138,25 @@ major)
|
|||
;;
|
||||
esac
|
||||
|
||||
release_branch="${release_branch_prefix}${version_parts[0]}.${version_parts[1]}"
|
||||
new_version="v${version_parts[0]}.${version_parts[1]}.${version_parts[2]}"
|
||||
|
||||
log "Old version: $old_version"
|
||||
log "New version: $new_version"
|
||||
log "Release branch: $release_branch"
|
||||
if [[ ${increment} = patch ]]; then
|
||||
if ((release_ff == 1)); then
|
||||
log "Fast-forwarding release branch"
|
||||
maybedryrun "$dry_run" git checkout "${release_branch}"
|
||||
maybedryrun "$dry_run" git merge --ff-only "${ref}"
|
||||
else
|
||||
log "Using existing release branch"
|
||||
maybedryrun "$dry_run" git checkout "${release_branch}"
|
||||
fi
|
||||
else
|
||||
log "Creating new release branch"
|
||||
maybedryrun "$dry_run" git checkout -b "${release_branch}" "${ref}"
|
||||
fi
|
||||
maybedryrun "$dry_run" git tag -a "$new_version" -m "Release $new_version" "$ref"
|
||||
|
||||
echo "$new_version"
|
||||
echo "${release_branch} ${new_version}"
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
# shellcheck source=scripts/lib.sh
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/lib.sh"
|
||||
|
||||
# This script is a convenience wrapper around the release promote command.
|
||||
#
|
||||
# Sed hack to make help text look like this script.
|
||||
exec go run "${SCRIPT_DIR}/release" promote "$@"
|
|
@ -16,33 +16,35 @@ source "$(dirname "${BASH_SOURCE[0]}")/lib.sh"
|
|||
cdroot
|
||||
|
||||
# If in Sapling, just print the commit since we don't have tags.
|
||||
if [ -d ".sl" ]; then
|
||||
if [[ -d ".sl" ]]; then
|
||||
sl log -l 1 | awk '/changeset/ { printf "0.0.0+sl-%s\n", substr($2, 0, 16) }'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${CODER_FORCE_VERSION:-}" != "" ]]; then
|
||||
echo "$CODER_FORCE_VERSION"
|
||||
if [[ -n "${CODER_FORCE_VERSION:-}" ]]; then
|
||||
echo "${CODER_FORCE_VERSION}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# To make contributing easier, if the upstream isn't coder/coder and there are
|
||||
# no tags we will fall back to 0.1.0 with devel suffix.
|
||||
if [[ "$(git remote get-url origin)" != *coder/coder* ]] && [[ "$(git tag)" == "" ]]; then
|
||||
remote_url=$(git remote get-url origin)
|
||||
tag_list=$(git tag)
|
||||
if ! [[ ${remote_url} =~ [@/]github.com ]] && ! [[ ${remote_url} =~ [:/]coder/coder(\.git)?$ ]] && [[ -z ${tag_list} ]]; then
|
||||
log
|
||||
log "INFO(version.sh): It appears you've checked out a fork of Coder."
|
||||
log "INFO(version.sh): By default GitHub does not include tags when forking."
|
||||
log "INFO(version.sh): We will use the default version 0.1.0 for this build."
|
||||
log "INFO(version.sh): We will use the default version 2.0.0 for this build."
|
||||
log "INFO(version.sh): To pull tags from upstream, use the following commands:"
|
||||
log "INFO(version.sh): - git remote add upstream https://github.com/coder/coder.git"
|
||||
log "INFO(version.sh): - git fetch upstream"
|
||||
log
|
||||
last_tag="v0.1.0"
|
||||
last_tag="v2.0.0"
|
||||
else
|
||||
last_tag="$(git describe --tags --abbrev=0)"
|
||||
fi
|
||||
|
||||
version="$last_tag"
|
||||
version="${last_tag}"
|
||||
|
||||
# If the HEAD has extra commits since the last tag then we are in a dev version.
|
||||
#
|
||||
|
@ -51,11 +53,11 @@ version="$last_tag"
|
|||
if [[ "${CODER_RELEASE:-}" == *t* ]]; then
|
||||
# $last_tag will equal `git describe --always` if we currently have the tag
|
||||
# checked out.
|
||||
if [[ "$last_tag" != "$(git describe --always)" ]]; then
|
||||
if [[ "${last_tag}" != "$(git describe --always)" ]]; then
|
||||
# make won't exit on $(shell cmd) failures, so we have to kill it :(
|
||||
if [[ "$(ps -o comm= "$PPID" || true)" == *make* ]]; then
|
||||
if [[ "$(ps -o comm= "${PPID}" || true)" == *make* ]]; then
|
||||
log "ERROR: version.sh: the current commit is not tagged with an annotated tag"
|
||||
kill "$PPID" || true
|
||||
kill "${PPID}" || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { type BrowserContext, expect, type Page, test } from "@playwright/test";
|
||||
import axios from "axios";
|
||||
import { type ChildProcess, exec, spawn } from "child_process";
|
||||
import { randomUUID } from "crypto";
|
||||
import express from "express";
|
||||
|
@ -7,6 +6,7 @@ import capitalize from "lodash/capitalize";
|
|||
import path from "path";
|
||||
import * as ssh from "ssh2";
|
||||
import { Duplex } from "stream";
|
||||
import { axiosInstance } from "api/api";
|
||||
import type {
|
||||
WorkspaceBuildParameter,
|
||||
UpdateTemplateMeta,
|
||||
|
@ -398,7 +398,7 @@ export const waitUntilUrlIsNotResponding = async (url: string) => {
|
|||
|
||||
while (retries < maxRetries) {
|
||||
try {
|
||||
await axios.get(url);
|
||||
await axiosInstance.get(url);
|
||||
} catch (error) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -8,9 +8,9 @@ import type {
|
|||
Reporter,
|
||||
TestError,
|
||||
} from "@playwright/test/reporter";
|
||||
import axios from "axios";
|
||||
import * as fs from "fs/promises";
|
||||
import type { Writable } from "stream";
|
||||
import { axiosInstance } from "api/api";
|
||||
import { coderdPProfPort, enterpriseLicense } from "./constants";
|
||||
|
||||
class CoderReporter implements Reporter {
|
||||
|
@ -136,9 +136,10 @@ class CoderReporter implements Reporter {
|
|||
const logLines = (chunk: string): string[] => chunk.trimEnd().split("\n");
|
||||
|
||||
const exportDebugPprof = async (outputFile: string) => {
|
||||
const response = await axios.get(
|
||||
const response = await axiosInstance.get(
|
||||
`http://127.0.0.1:${coderdPProfPort}/debug/pprof/goroutine?debug=1`,
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error: Received status code ${response.status}`);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import axios from "axios";
|
||||
import {
|
||||
MockTemplate,
|
||||
MockTemplateVersionParameter1,
|
||||
|
@ -8,6 +7,7 @@ import {
|
|||
MockWorkspaceBuildParameter1,
|
||||
} from "testHelpers/entities";
|
||||
import * as api from "./api";
|
||||
import { axiosInstance } from "./api";
|
||||
import type * as TypesGen from "./typesGenerated";
|
||||
|
||||
describe("api.ts", () => {
|
||||
|
@ -17,13 +17,16 @@ describe("api.ts", () => {
|
|||
const loginResponse: TypesGen.LoginWithPasswordResponse = {
|
||||
session_token: "abc_123_test",
|
||||
};
|
||||
jest.spyOn(axios, "post").mockResolvedValueOnce({ data: loginResponse });
|
||||
|
||||
jest
|
||||
.spyOn(axiosInstance, "post")
|
||||
.mockResolvedValueOnce({ data: loginResponse });
|
||||
|
||||
// when
|
||||
const result = await api.login("test", "123");
|
||||
|
||||
// then
|
||||
expect(axios.post).toHaveBeenCalled();
|
||||
expect(axiosInstance.post).toHaveBeenCalled();
|
||||
expect(result).toStrictEqual(loginResponse);
|
||||
});
|
||||
|
||||
|
@ -38,7 +41,7 @@ describe("api.ts", () => {
|
|||
const axiosMockPost = jest.fn().mockImplementationOnce(() => {
|
||||
return Promise.reject(expectedError);
|
||||
});
|
||||
axios.post = axiosMockPost;
|
||||
axiosInstance.post = axiosMockPost;
|
||||
|
||||
try {
|
||||
await api.login("test", "123");
|
||||
|
@ -54,7 +57,7 @@ describe("api.ts", () => {
|
|||
const axiosMockPost = jest.fn().mockImplementationOnce(() => {
|
||||
return Promise.resolve();
|
||||
});
|
||||
axios.post = axiosMockPost;
|
||||
axiosInstance.post = axiosMockPost;
|
||||
|
||||
// when
|
||||
await api.logout();
|
||||
|
@ -73,7 +76,8 @@ describe("api.ts", () => {
|
|||
const axiosMockPost = jest.fn().mockImplementationOnce(() => {
|
||||
return Promise.reject(expectedError);
|
||||
});
|
||||
axios.post = axiosMockPost;
|
||||
|
||||
axiosInstance.post = axiosMockPost;
|
||||
|
||||
try {
|
||||
await api.logout();
|
||||
|
@ -92,7 +96,8 @@ describe("api.ts", () => {
|
|||
const axiosMockPost = jest.fn().mockImplementationOnce(() => {
|
||||
return Promise.resolve({ data: apiKeyResponse });
|
||||
});
|
||||
axios.post = axiosMockPost;
|
||||
|
||||
axiosInstance.post = axiosMockPost;
|
||||
|
||||
// when
|
||||
const result = await api.getApiKey();
|
||||
|
@ -112,7 +117,8 @@ describe("api.ts", () => {
|
|||
const axiosMockPost = jest.fn().mockImplementationOnce(() => {
|
||||
return Promise.reject(expectedError);
|
||||
});
|
||||
axios.post = axiosMockPost;
|
||||
|
||||
axiosInstance.post = axiosMockPost;
|
||||
|
||||
try {
|
||||
await api.getApiKey();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,4 @@
|
|||
import axios, { type AxiosError, type AxiosResponse } from "axios";
|
||||
import { type AxiosError, type AxiosResponse, isAxiosError } from "axios";
|
||||
|
||||
const Language = {
|
||||
errorsByCode: {
|
||||
|
@ -25,7 +25,7 @@ export type ApiError = AxiosError<ApiErrorResponse> & {
|
|||
|
||||
export const isApiError = (err: unknown): err is ApiError => {
|
||||
return (
|
||||
axios.isAxiosError(err) &&
|
||||
isAxiosError(err) &&
|
||||
err.response !== undefined &&
|
||||
isApiErrorResponse(err.response.data)
|
||||
);
|
||||
|
|
|
@ -2,16 +2,18 @@ import type { QueryClient, UseQueryOptions } from "react-query";
|
|||
import * as API from "api/api";
|
||||
import type { AppearanceConfig } from "api/typesGenerated";
|
||||
import { getMetadataAsJSON } from "utils/metadata";
|
||||
import { cachedQuery } from "./util";
|
||||
|
||||
const initialAppearanceData = getMetadataAsJSON<AppearanceConfig>("appearance");
|
||||
const appearanceConfigKey = ["appearance"] as const;
|
||||
|
||||
export const appearance = (): UseQueryOptions<AppearanceConfig> => {
|
||||
return {
|
||||
queryKey: ["appearance"],
|
||||
// We either have our initial data or should immediately fetch and never again!
|
||||
return cachedQuery({
|
||||
initialData: initialAppearanceData,
|
||||
queryKey: ["appearance"],
|
||||
queryFn: () => API.getAppearance(),
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const updateAppearance = (queryClient: QueryClient) => {
|
||||
|
|
|
@ -2,14 +2,16 @@ import type { UseQueryOptions } from "react-query";
|
|||
import * as API from "api/api";
|
||||
import type { BuildInfoResponse } from "api/typesGenerated";
|
||||
import { getMetadataAsJSON } from "utils/metadata";
|
||||
import { cachedQuery } from "./util";
|
||||
|
||||
const initialBuildInfoData = getMetadataAsJSON<BuildInfoResponse>("build-info");
|
||||
const buildInfoKey = ["buildInfo"] as const;
|
||||
|
||||
export const buildInfo = (): UseQueryOptions<BuildInfoResponse> => {
|
||||
return {
|
||||
queryKey: buildInfoKey,
|
||||
// The version of the app can't change without reloading the page.
|
||||
return cachedQuery({
|
||||
initialData: initialBuildInfoData,
|
||||
queryKey: buildInfoKey,
|
||||
queryFn: () => API.getBuildInfo(),
|
||||
};
|
||||
});
|
||||
};
|
||||
|
|
|
@ -2,16 +2,17 @@ import type { QueryClient, UseQueryOptions } from "react-query";
|
|||
import * as API from "api/api";
|
||||
import type { Entitlements } from "api/typesGenerated";
|
||||
import { getMetadataAsJSON } from "utils/metadata";
|
||||
import { cachedQuery } from "./util";
|
||||
|
||||
const initialEntitlementsData = getMetadataAsJSON<Entitlements>("entitlements");
|
||||
const ENTITLEMENTS_QUERY_KEY = ["entitlements"] as const;
|
||||
const entitlementsQueryKey = ["entitlements"] as const;
|
||||
|
||||
export const entitlements = (): UseQueryOptions<Entitlements> => {
|
||||
return {
|
||||
queryKey: ENTITLEMENTS_QUERY_KEY,
|
||||
queryFn: () => API.getEntitlements(),
|
||||
return cachedQuery({
|
||||
initialData: initialEntitlementsData,
|
||||
};
|
||||
queryKey: entitlementsQueryKey,
|
||||
queryFn: () => API.getEntitlements(),
|
||||
});
|
||||
};
|
||||
|
||||
export const refreshEntitlements = (queryClient: QueryClient) => {
|
||||
|
@ -19,7 +20,7 @@ export const refreshEntitlements = (queryClient: QueryClient) => {
|
|||
mutationFn: API.refreshEntitlements,
|
||||
onSuccess: async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: ENTITLEMENTS_QUERY_KEY,
|
||||
queryKey: entitlementsQueryKey,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -2,16 +2,17 @@ import type { UseQueryOptions } from "react-query";
|
|||
import * as API from "api/api";
|
||||
import type { Experiments } from "api/typesGenerated";
|
||||
import { getMetadataAsJSON } from "utils/metadata";
|
||||
import { cachedQuery } from "./util";
|
||||
|
||||
const initialExperimentsData = getMetadataAsJSON<Experiments>("experiments");
|
||||
const experimentsKey = ["experiments"] as const;
|
||||
|
||||
export const experiments = (): UseQueryOptions<Experiments> => {
|
||||
return {
|
||||
queryKey: experimentsKey,
|
||||
return cachedQuery({
|
||||
initialData: initialExperimentsData,
|
||||
queryKey: experimentsKey,
|
||||
queryFn: () => API.getExperiments(),
|
||||
} satisfies UseQueryOptions<Experiments>;
|
||||
});
|
||||
};
|
||||
|
||||
export const availableExperiments = () => {
|
||||
|
|
|
@ -19,6 +19,7 @@ import type { UsePaginatedQueryOptions } from "hooks/usePaginatedQuery";
|
|||
import { prepareQuery } from "utils/filters";
|
||||
import { getMetadataAsJSON } from "utils/metadata";
|
||||
import { getAuthorizationKey } from "./authCheck";
|
||||
import { cachedQuery } from "./util";
|
||||
|
||||
export function usersKey(req: UsersRequest) {
|
||||
return ["users", req] as const;
|
||||
|
@ -112,6 +113,8 @@ export const updateRoles = (queryClient: QueryClient) => {
|
|||
};
|
||||
};
|
||||
|
||||
const initialUserData = getMetadataAsJSON<User>("user");
|
||||
|
||||
export const authMethods = () => {
|
||||
return {
|
||||
// Even the endpoint being /users/authmethods we don't want to revalidate it
|
||||
|
@ -121,18 +124,16 @@ export const authMethods = () => {
|
|||
};
|
||||
};
|
||||
|
||||
const initialUserData = getMetadataAsJSON<User>("user");
|
||||
|
||||
const meKey = ["me"];
|
||||
|
||||
export const me = (): UseQueryOptions<User> & {
|
||||
queryKey: QueryKey;
|
||||
} => {
|
||||
return {
|
||||
queryKey: meKey,
|
||||
return cachedQuery({
|
||||
initialData: initialUserData,
|
||||
queryKey: meKey,
|
||||
queryFn: API.getAuthenticatedUser,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export function apiKey(): UseQueryOptions<GenerateAPIKeyResponse> {
|
||||
|
@ -142,11 +143,13 @@ export function apiKey(): UseQueryOptions<GenerateAPIKeyResponse> {
|
|||
};
|
||||
}
|
||||
|
||||
export const hasFirstUser = () => {
|
||||
return {
|
||||
export const hasFirstUser = (): UseQueryOptions<boolean> => {
|
||||
return cachedQuery({
|
||||
// This cannot be false otherwise it will not fetch!
|
||||
initialData: Boolean(initialUserData) || undefined,
|
||||
queryKey: ["hasFirstUser"],
|
||||
queryFn: API.hasFirstUser,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const login = (
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
import type { UseQueryOptions } from "react-query";
|
||||
|
||||
/**
|
||||
* cachedQuery allows the caller to only make a request a single time, and use
|
||||
* `initialData` if it is provided. This is particularly helpful for passing
|
||||
* values injected via metadata. We do this for the initial user fetch,
|
||||
* buildinfo, and a few others to reduce page load time.
|
||||
*/
|
||||
export const cachedQuery = <
|
||||
TQueryOptions extends UseQueryOptions<TData>,
|
||||
TData,
|
||||
>(
|
||||
options: TQueryOptions,
|
||||
): TQueryOptions =>
|
||||
// Only do this if there is initial data, otherwise it can conflict with tests.
|
||||
({
|
||||
...(options.initialData
|
||||
? {
|
||||
cacheTime: Infinity,
|
||||
staleTime: Infinity,
|
||||
refetchOnMount: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnWindowFocus: false,
|
||||
}
|
||||
: {}),
|
||||
...options,
|
||||
});
|
|
@ -9,6 +9,7 @@ import {
|
|||
} from "react";
|
||||
import { useQuery } from "react-query";
|
||||
import { getWorkspaceProxies, getWorkspaceProxyRegions } from "api/api";
|
||||
import { cachedQuery } from "api/queries/util";
|
||||
import type { Region, WorkspaceProxy } from "api/typesGenerated";
|
||||
import { useAuthenticated } from "contexts/auth/RequireAuth";
|
||||
import { type ProxyLatencyReport, useProxyLatency } from "./useProxyLatency";
|
||||
|
@ -130,12 +131,13 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
|
|||
error: proxiesError,
|
||||
isLoading: proxiesLoading,
|
||||
isFetched: proxiesFetched,
|
||||
} = useQuery({
|
||||
queryKey,
|
||||
queryFn: query,
|
||||
staleTime: initialData ? Infinity : undefined,
|
||||
initialData,
|
||||
});
|
||||
} = useQuery(
|
||||
cachedQuery({
|
||||
initialData,
|
||||
queryKey,
|
||||
queryFn: query,
|
||||
}),
|
||||
);
|
||||
|
||||
// Every time we get a new proxiesResponse, update the latency check
|
||||
// to each workspace proxy.
|
||||
|
|
|
@ -9,18 +9,13 @@ import { useMutation, useQuery, useQueryClient } from "react-query";
|
|||
import { isApiError } from "api/errors";
|
||||
import { checkAuthorization } from "api/queries/authCheck";
|
||||
import {
|
||||
authMethods,
|
||||
hasFirstUser,
|
||||
login,
|
||||
logout,
|
||||
me,
|
||||
updateProfile as updateProfileOptions,
|
||||
} from "api/queries/users";
|
||||
import type {
|
||||
AuthMethods,
|
||||
UpdateUserProfileRequest,
|
||||
User,
|
||||
} from "api/typesGenerated";
|
||||
import type { UpdateUserProfileRequest, User } from "api/typesGenerated";
|
||||
import { displaySuccess } from "components/GlobalSnackbar/utils";
|
||||
import { permissionsToCheck, type Permissions } from "./permissions";
|
||||
|
||||
|
@ -34,7 +29,6 @@ export type AuthContextValue = {
|
|||
isUpdatingProfile: boolean;
|
||||
user: User | undefined;
|
||||
permissions: Permissions | undefined;
|
||||
authMethods: AuthMethods | undefined;
|
||||
organizationId: string | undefined;
|
||||
signInError: unknown;
|
||||
updateProfileError: unknown;
|
||||
|
@ -51,7 +45,6 @@ export const AuthProvider: FC<PropsWithChildren> = ({ children }) => {
|
|||
const queryClient = useQueryClient();
|
||||
const meOptions = me();
|
||||
const userQuery = useQuery(meOptions);
|
||||
const authMethodsQuery = useQuery(authMethods());
|
||||
const hasFirstUserQuery = useQuery(hasFirstUser());
|
||||
const permissionsQuery = useQuery({
|
||||
...checkAuthorization({ checks: permissionsToCheck }),
|
||||
|
@ -77,7 +70,6 @@ export const AuthProvider: FC<PropsWithChildren> = ({ children }) => {
|
|||
userQuery.error.response.status === 401;
|
||||
const isSigningOut = logoutMutation.isLoading;
|
||||
const isLoading =
|
||||
authMethodsQuery.isLoading ||
|
||||
userQuery.isLoading ||
|
||||
hasFirstUserQuery.isLoading ||
|
||||
(userQuery.isSuccess && permissionsQuery.isLoading);
|
||||
|
@ -120,7 +112,6 @@ export const AuthProvider: FC<PropsWithChildren> = ({ children }) => {
|
|||
updateProfile,
|
||||
user: userQuery.data,
|
||||
permissions: permissionsQuery.data as Permissions | undefined,
|
||||
authMethods: authMethodsQuery.data,
|
||||
signInError: loginMutation.error,
|
||||
updateProfileError: updateProfileMutation.error,
|
||||
organizationId: userQuery.data?.organization_ids[0],
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import axios from "axios";
|
||||
import { type FC, useEffect } from "react";
|
||||
import { Outlet, Navigate, useLocation } from "react-router-dom";
|
||||
import { axiosInstance } from "api/api";
|
||||
import { isApiError } from "api/errors";
|
||||
import { Loader } from "components/Loader/Loader";
|
||||
import { ProxyProvider } from "contexts/ProxyContext";
|
||||
|
@ -22,7 +22,7 @@ export const RequireAuth: FC = () => {
|
|||
return;
|
||||
}
|
||||
|
||||
const interceptorHandle = axios.interceptors.response.use(
|
||||
const interceptorHandle = axiosInstance.interceptors.response.use(
|
||||
(okResponse) => okResponse,
|
||||
(error: unknown) => {
|
||||
// 401 Unauthorized
|
||||
|
@ -32,13 +32,14 @@ export const RequireAuth: FC = () => {
|
|||
signOut();
|
||||
}
|
||||
|
||||
// Otherwise, pass the response through so that it can be displayed in the UI
|
||||
// Otherwise, pass the response through so that it can be displayed in
|
||||
// the UI
|
||||
return Promise.reject(error);
|
||||
},
|
||||
);
|
||||
|
||||
return () => {
|
||||
axios.interceptors.response.eject(interceptorHandle);
|
||||
axiosInstance.interceptors.response.eject(interceptorHandle);
|
||||
};
|
||||
}, [isLoading, isSigningOut, isSignedIn, signOut]);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import PerformanceObserver from "@fastly/performance-observer-polyfill";
|
||||
import axios from "axios";
|
||||
import { useEffect, useReducer, useState } from "react";
|
||||
import { axiosInstance } from "api/api";
|
||||
import type { Region } from "api/typesGenerated";
|
||||
import { generateRandomString } from "utils/random";
|
||||
|
||||
|
@ -198,7 +198,7 @@ export const useProxyLatency = (
|
|||
observer.observe({ entryTypes: ["resource"] });
|
||||
|
||||
const proxyRequests = Object.keys(proxyChecks).map((latencyURL) => {
|
||||
return axios.get(latencyURL, {
|
||||
return axiosInstance.get(latencyURL, {
|
||||
withCredentials: false,
|
||||
// Must add a custom header to make the request not a "simple request".
|
||||
// We want to force a preflight request.
|
||||
|
|
|
@ -7,12 +7,10 @@ import {
|
|||
} from "react";
|
||||
import { useQuery } from "react-query";
|
||||
import { appearance } from "api/queries/appearance";
|
||||
import { buildInfo } from "api/queries/buildInfo";
|
||||
import { entitlements } from "api/queries/entitlements";
|
||||
import { experiments } from "api/queries/experiments";
|
||||
import type {
|
||||
AppearanceConfig,
|
||||
BuildInfoResponse,
|
||||
Entitlements,
|
||||
Experiments,
|
||||
} from "api/typesGenerated";
|
||||
|
@ -27,7 +25,6 @@ interface Appearance {
|
|||
}
|
||||
|
||||
export interface DashboardValue {
|
||||
buildInfo: BuildInfoResponse;
|
||||
entitlements: Entitlements;
|
||||
experiments: Experiments;
|
||||
appearance: Appearance;
|
||||
|
@ -38,16 +35,12 @@ export const DashboardContext = createContext<DashboardValue | undefined>(
|
|||
);
|
||||
|
||||
export const DashboardProvider: FC<PropsWithChildren> = ({ children }) => {
|
||||
const buildInfoQuery = useQuery(buildInfo());
|
||||
const entitlementsQuery = useQuery(entitlements());
|
||||
const experimentsQuery = useQuery(experiments());
|
||||
const appearanceQuery = useQuery(appearance());
|
||||
|
||||
const isLoading =
|
||||
!buildInfoQuery.data ||
|
||||
!entitlementsQuery.data ||
|
||||
!appearanceQuery.data ||
|
||||
!experimentsQuery.data;
|
||||
!entitlementsQuery.data || !appearanceQuery.data || !experimentsQuery.data;
|
||||
|
||||
const [configPreview, setConfigPreview] = useState<AppearanceConfig>();
|
||||
|
||||
|
@ -84,7 +77,6 @@ export const DashboardProvider: FC<PropsWithChildren> = ({ children }) => {
|
|||
return (
|
||||
<DashboardContext.Provider
|
||||
value={{
|
||||
buildInfo: buildInfoQuery.data,
|
||||
entitlements: entitlementsQuery.data,
|
||||
experiments: experimentsQuery.data,
|
||||
appearance: {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import type { FC } from "react";
|
||||
import { useQuery } from "react-query";
|
||||
import { buildInfo } from "api/queries/buildInfo";
|
||||
import { useAuthenticated } from "contexts/auth/RequireAuth";
|
||||
import { useProxy } from "contexts/ProxyContext";
|
||||
import { useDashboard } from "modules/dashboard/useDashboard";
|
||||
|
@ -6,7 +8,8 @@ import { useFeatureVisibility } from "../useFeatureVisibility";
|
|||
import { NavbarView } from "./NavbarView";
|
||||
|
||||
export const Navbar: FC = () => {
|
||||
const { appearance, buildInfo } = useDashboard();
|
||||
const { appearance } = useDashboard();
|
||||
const buildInfoQuery = useQuery(buildInfo());
|
||||
const { user: me, permissions, signOut } = useAuthenticated();
|
||||
const featureVisibility = useFeatureVisibility();
|
||||
const canViewAuditLog =
|
||||
|
@ -19,7 +22,7 @@ export const Navbar: FC = () => {
|
|||
<NavbarView
|
||||
user={me}
|
||||
logo_url={appearance.config.logo_url}
|
||||
buildInfo={buildInfo}
|
||||
buildInfo={buildInfoQuery.data}
|
||||
supportLinks={appearance.config.support_links}
|
||||
onSignOut={signOut}
|
||||
canViewAuditLog={canViewAuditLog}
|
||||
|
|
|
@ -235,6 +235,13 @@ const ProxyMenu: FC<ProxyMenuProps> = ({ proxyContextValue }) => {
|
|||
return proxy.healthy && latency !== undefined && latency.at < refetchDate;
|
||||
};
|
||||
|
||||
// This endpoint returns a 404 when not using enterprise.
|
||||
// If we don't return null, then it looks like this is
|
||||
// loading forever!
|
||||
if (proxyContextValue.error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<Skeleton
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { css, type Interpolation, type Theme, useTheme } from "@emotion/react";
|
||||
import Badge from "@mui/material/Badge";
|
||||
import type { FC, ReactNode } from "react";
|
||||
import type { FC } from "react";
|
||||
import type * as TypesGen from "api/typesGenerated";
|
||||
import { DropdownArrow } from "components/DropdownArrow/DropdownArrow";
|
||||
import {
|
||||
|
@ -17,7 +17,6 @@ export interface UserDropdownProps {
|
|||
buildInfo?: TypesGen.BuildInfoResponse;
|
||||
supportLinks?: readonly TypesGen.LinkConfig[];
|
||||
onSignOut: () => void;
|
||||
children?: ReactNode;
|
||||
}
|
||||
|
||||
export const UserDropdown: FC<UserDropdownProps> = ({
|
||||
|
|
|
@ -24,7 +24,7 @@ export const AgentVersion: FC<AgentVersionProps> = ({
|
|||
);
|
||||
|
||||
if (status === agentVersionStatus.Updated) {
|
||||
return <span>Updated</span>;
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
import type { Meta, StoryObj } from "@storybook/react";
|
||||
import { DashboardContext } from "modules/dashboard/DashboardProvider";
|
||||
import {
|
||||
MockAppearanceConfig,
|
||||
MockBuildInfo,
|
||||
MockCanceledWorkspace,
|
||||
MockCancelingWorkspace,
|
||||
MockDeletedWorkspace,
|
||||
MockDeletingWorkspace,
|
||||
MockEntitlementsWithScheduling,
|
||||
MockExperiments,
|
||||
MockFailedWorkspace,
|
||||
MockPendingWorkspace,
|
||||
MockStartingWorkspace,
|
||||
MockStoppedWorkspace,
|
||||
MockStoppingWorkspace,
|
||||
MockWorkspace,
|
||||
MockBuildInfo,
|
||||
MockEntitlementsWithScheduling,
|
||||
MockExperiments,
|
||||
MockAppearanceConfig,
|
||||
} from "testHelpers/entities";
|
||||
import { WorkspaceStatusBadge } from "./WorkspaceStatusBadge";
|
||||
|
||||
|
@ -27,11 +27,18 @@ const MockedAppearance = {
|
|||
const meta: Meta<typeof WorkspaceStatusBadge> = {
|
||||
title: "modules/workspaces/WorkspaceStatusBadge",
|
||||
component: WorkspaceStatusBadge,
|
||||
parameters: {
|
||||
queries: [
|
||||
{
|
||||
key: ["buildInfo"],
|
||||
data: MockBuildInfo,
|
||||
},
|
||||
],
|
||||
},
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<DashboardContext.Provider
|
||||
value={{
|
||||
buildInfo: MockBuildInfo,
|
||||
entitlements: MockEntitlementsWithScheduling,
|
||||
experiments: MockExperiments,
|
||||
appearance: MockedAppearance,
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import type { FC } from "react";
|
||||
import { Helmet } from "react-helmet-async";
|
||||
import { useQuery } from "react-query";
|
||||
import { Navigate, useLocation, useNavigate } from "react-router-dom";
|
||||
import { buildInfo } from "api/queries/buildInfo";
|
||||
import { authMethods } from "api/queries/users";
|
||||
import { useAuthContext } from "contexts/auth/AuthProvider";
|
||||
import { getApplicationName } from "utils/appearance";
|
||||
import { retrieveRedirect } from "utils/redirect";
|
||||
|
@ -14,12 +17,13 @@ export const LoginPage: FC = () => {
|
|||
isConfiguringTheFirstUser,
|
||||
signIn,
|
||||
isSigningIn,
|
||||
authMethods,
|
||||
signInError,
|
||||
} = useAuthContext();
|
||||
const authMethodsQuery = useQuery(authMethods());
|
||||
const redirectTo = retrieveRedirect(location.search);
|
||||
const applicationName = getApplicationName();
|
||||
const navigate = useNavigate();
|
||||
const buildInfoQuery = useQuery(buildInfo());
|
||||
|
||||
if (isSignedIn) {
|
||||
// If the redirect is going to a workspace application, and we
|
||||
|
@ -60,9 +64,10 @@ export const LoginPage: FC = () => {
|
|||
<title>Sign in to {applicationName}</title>
|
||||
</Helmet>
|
||||
<LoginPageView
|
||||
authMethods={authMethods}
|
||||
authMethods={authMethodsQuery.data}
|
||||
error={signInError}
|
||||
isLoading={isLoading}
|
||||
isLoading={isLoading || authMethodsQuery.isLoading}
|
||||
buildInfo={buildInfoQuery.data}
|
||||
isSigningIn={isSigningIn}
|
||||
onSignIn={async ({ email, password }) => {
|
||||
await signIn(email, password);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import type { Interpolation, Theme } from "@emotion/react";
|
||||
import type { FC } from "react";
|
||||
import { useLocation } from "react-router-dom";
|
||||
import type { AuthMethods } from "api/typesGenerated";
|
||||
import type { AuthMethods, BuildInfoResponse } from "api/typesGenerated";
|
||||
import { CoderIcon } from "components/Icons/CoderIcon";
|
||||
import { Loader } from "components/Loader/Loader";
|
||||
import { getApplicationName, getLogoURL } from "utils/appearance";
|
||||
|
@ -12,6 +12,7 @@ export interface LoginPageViewProps {
|
|||
authMethods: AuthMethods | undefined;
|
||||
error: unknown;
|
||||
isLoading: boolean;
|
||||
buildInfo?: BuildInfoResponse;
|
||||
isSigningIn: boolean;
|
||||
onSignIn: (credentials: { email: string; password: string }) => void;
|
||||
}
|
||||
|
@ -20,6 +21,7 @@ export const LoginPageView: FC<LoginPageViewProps> = ({
|
|||
authMethods,
|
||||
error,
|
||||
isLoading,
|
||||
buildInfo,
|
||||
isSigningIn,
|
||||
onSignIn,
|
||||
}) => {
|
||||
|
@ -64,7 +66,10 @@ export const LoginPageView: FC<LoginPageViewProps> = ({
|
|||
/>
|
||||
)}
|
||||
<footer css={styles.footer}>
|
||||
Copyright © {new Date().getFullYear()} Coder Technologies, Inc.
|
||||
<div>
|
||||
Copyright © {new Date().getFullYear()} Coder Technologies, Inc.
|
||||
</div>
|
||||
<div>{buildInfo?.version}</div>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -47,6 +47,11 @@ const MS_DAY_CONVERSION = 86400000;
|
|||
const FAILURE_CLEANUP_DEFAULT = 7;
|
||||
const INACTIVITY_CLEANUP_DEFAULT = 180;
|
||||
const DORMANT_AUTODELETION_DEFAULT = 30;
|
||||
/**
|
||||
* The default form field space is 4 but since this form is quite heavy I think
|
||||
* increase the space can make it feels lighter.
|
||||
*/
|
||||
const FORM_FIELDS_SPACING = 6;
|
||||
|
||||
export interface TemplateScheduleForm {
|
||||
template: Template;
|
||||
|
@ -318,10 +323,10 @@ export const TemplateScheduleForm: FC<TemplateScheduleForm> = ({
|
|||
aria-label="Template settings form"
|
||||
>
|
||||
<FormSection
|
||||
title="Schedule"
|
||||
title="Autostop"
|
||||
description="Define when workspaces created from this template are stopped."
|
||||
>
|
||||
<Stack direction="row" css={styles.ttlFields}>
|
||||
<FormFields spacing={FORM_FIELDS_SPACING}>
|
||||
<TextField
|
||||
{...getFieldHelpers("default_ttl_ms", {
|
||||
helperText: (
|
||||
|
@ -347,11 +352,91 @@ export const TemplateScheduleForm: FC<TemplateScheduleForm> = ({
|
|||
label="Activity bump (hours)"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<Stack direction="row" css={styles.ttlFields}>
|
||||
<TextField
|
||||
{...getFieldHelpers("autostop_requirement_days_of_week", {
|
||||
helperText: (
|
||||
<AutostopRequirementDaysHelperText
|
||||
days={form.values.autostop_requirement_days_of_week}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={isSubmitting}
|
||||
fullWidth
|
||||
select
|
||||
value={form.values.autostop_requirement_days_of_week}
|
||||
label="Days with required stop"
|
||||
>
|
||||
<MenuItem key="off" value="off">
|
||||
Off
|
||||
</MenuItem>
|
||||
<MenuItem key="daily" value="daily">
|
||||
Daily
|
||||
</MenuItem>
|
||||
<MenuItem key="saturday" value="saturday">
|
||||
Saturday
|
||||
</MenuItem>
|
||||
<MenuItem key="sunday" value="sunday">
|
||||
Sunday
|
||||
</MenuItem>
|
||||
</TextField>
|
||||
|
||||
<TextField
|
||||
{...getFieldHelpers("autostop_requirement_weeks", {
|
||||
helperText: (
|
||||
<AutostopRequirementWeeksHelperText
|
||||
days={form.values.autostop_requirement_days_of_week}
|
||||
weeks={form.values.autostop_requirement_weeks}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
!["saturday", "sunday"].includes(
|
||||
form.values.autostop_requirement_days_of_week || "",
|
||||
)
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 1, max: 16, step: 1 }}
|
||||
label="Weeks between required stops"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<Stack direction="row" alignItems="center">
|
||||
<Checkbox
|
||||
id="allow-user-autostop"
|
||||
size="small"
|
||||
disabled={isSubmitting || !allowAdvancedScheduling}
|
||||
onChange={async () => {
|
||||
await form.setFieldValue(
|
||||
"allow_user_autostop",
|
||||
!form.values.allow_user_autostop,
|
||||
);
|
||||
}}
|
||||
name="allow_user_autostop"
|
||||
checked={form.values.allow_user_autostop}
|
||||
/>
|
||||
<Stack spacing={0.5}>
|
||||
<strong>Enforce these settings across all workspaces</strong>
|
||||
<span
|
||||
css={{
|
||||
fontSize: 12,
|
||||
color: theme.palette.text.secondary,
|
||||
}}
|
||||
>
|
||||
Workspaces by default allow users to set custom autostop timers.
|
||||
Use this to apply the template settings to all workspaces under
|
||||
this template.
|
||||
</span>
|
||||
</Stack>
|
||||
</Stack>
|
||||
</FormFields>
|
||||
</FormSection>
|
||||
|
||||
<FormSection
|
||||
title="Allow users scheduling"
|
||||
title="Autostart"
|
||||
description="Allow users to set custom autostart and autostop scheduling options for workspaces created from this template."
|
||||
>
|
||||
<Stack direction="column">
|
||||
|
@ -390,190 +475,101 @@ export const TemplateScheduleForm: FC<TemplateScheduleForm> = ({
|
|||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Stack direction="row" alignItems="center">
|
||||
<Checkbox
|
||||
id="allow-user-autostop"
|
||||
size="small"
|
||||
disabled={isSubmitting || !allowAdvancedScheduling}
|
||||
onChange={async () => {
|
||||
await form.setFieldValue(
|
||||
"allow_user_autostop",
|
||||
!form.values.allow_user_autostop,
|
||||
);
|
||||
}}
|
||||
name="allow_user_autostop"
|
||||
checked={form.values.allow_user_autostop}
|
||||
/>
|
||||
<Stack spacing={0.5}>
|
||||
<strong>
|
||||
Allow users to customize autostop duration for workspaces.
|
||||
</strong>
|
||||
<span
|
||||
css={{
|
||||
fontSize: 12,
|
||||
color: theme.palette.text.secondary,
|
||||
}}
|
||||
>
|
||||
Workspaces will always use the default TTL if this is set.
|
||||
</span>
|
||||
</Stack>
|
||||
</Stack>
|
||||
</Stack>
|
||||
</FormSection>
|
||||
|
||||
<FormSection
|
||||
title="Autostop Requirement"
|
||||
description="Define when workspaces created from this template are stopped periodically to enforce template updates and ensure idle workspaces are stopped."
|
||||
>
|
||||
<Stack direction="row" css={styles.ttlFields}>
|
||||
<TextField
|
||||
{...getFieldHelpers("autostop_requirement_days_of_week", {
|
||||
helperText: (
|
||||
<AutostopRequirementDaysHelperText
|
||||
days={form.values.autostop_requirement_days_of_week}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={isSubmitting}
|
||||
fullWidth
|
||||
select
|
||||
value={form.values.autostop_requirement_days_of_week}
|
||||
label="Days with required stop"
|
||||
>
|
||||
<MenuItem key="off" value="off">
|
||||
Off
|
||||
</MenuItem>
|
||||
<MenuItem key="daily" value="daily">
|
||||
Daily
|
||||
</MenuItem>
|
||||
<MenuItem key="saturday" value="saturday">
|
||||
Saturday
|
||||
</MenuItem>
|
||||
<MenuItem key="sunday" value="sunday">
|
||||
Sunday
|
||||
</MenuItem>
|
||||
</TextField>
|
||||
|
||||
<TextField
|
||||
{...getFieldHelpers("autostop_requirement_weeks", {
|
||||
helperText: (
|
||||
<AutostopRequirementWeeksHelperText
|
||||
days={form.values.autostop_requirement_days_of_week}
|
||||
weeks={form.values.autostop_requirement_weeks}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
!["saturday", "sunday"].includes(
|
||||
form.values.autostop_requirement_days_of_week || "",
|
||||
)
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 1, max: 16, step: 1 }}
|
||||
label="Weeks between required stops"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
</FormSection>
|
||||
|
||||
{allowAdvancedScheduling && (
|
||||
<>
|
||||
<FormSection
|
||||
title="Failure Cleanup"
|
||||
description="When enabled, Coder will attempt to stop workspaces that are in a failed state after a specified number of days."
|
||||
title="Dormancy"
|
||||
description="Coder's Dormancy Threshold determines when workspaces become dormant due to inactivity, requiring manual activation for access."
|
||||
>
|
||||
<FormFields>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="failureCleanupEnabled"
|
||||
checked={form.values.failure_cleanup_enabled}
|
||||
onChange={handleToggleFailureCleanup}
|
||||
/>
|
||||
}
|
||||
label="Enable Failure Cleanup"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("failure_ttl_ms", {
|
||||
helperText: (
|
||||
<FailureTTLHelperText ttl={form.values.failure_ttl_ms} />
|
||||
),
|
||||
})}
|
||||
disabled={isSubmitting || !form.values.failure_cleanup_enabled}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until cleanup (days)"
|
||||
type="number"
|
||||
/>
|
||||
</FormFields>
|
||||
</FormSection>
|
||||
<FormSection
|
||||
title="Dormancy Threshold"
|
||||
description="When enabled, Coder will mark workspaces as dormant after a period of time with no connections. Dormant workspaces can be auto-deleted (see below) or manually reviewed by the workspace owner or admins."
|
||||
>
|
||||
<FormFields>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="dormancyThreshold"
|
||||
checked={form.values.inactivity_cleanup_enabled}
|
||||
onChange={handleToggleInactivityCleanup}
|
||||
/>
|
||||
}
|
||||
label="Enable Dormancy Threshold"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("time_til_dormant_ms", {
|
||||
helperText: (
|
||||
<DormancyTTLHelperText
|
||||
ttl={form.values.time_til_dormant_ms}
|
||||
<FormFields spacing={FORM_FIELDS_SPACING}>
|
||||
<Stack>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="dormancyThreshold"
|
||||
checked={form.values.inactivity_cleanup_enabled}
|
||||
onChange={handleToggleInactivityCleanup}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting || !form.values.inactivity_cleanup_enabled
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until dormant (days)"
|
||||
type="number"
|
||||
/>
|
||||
</FormFields>
|
||||
</FormSection>
|
||||
<FormSection
|
||||
title="Dormancy Auto-Deletion"
|
||||
description="When enabled, Coder will permanently delete dormant workspaces after a period of time. Once a workspace is deleted it cannot be recovered."
|
||||
>
|
||||
<FormFields>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="dormancyAutoDeletion"
|
||||
checked={form.values.dormant_autodeletion_cleanup_enabled}
|
||||
onChange={handleToggleDormantAutoDeletion}
|
||||
/>
|
||||
}
|
||||
label="Enable Dormancy Auto-Deletion"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("time_til_dormant_autodelete_ms", {
|
||||
helperText: (
|
||||
<DormancyAutoDeletionTTLHelperText
|
||||
ttl={form.values.time_til_dormant_autodelete_ms}
|
||||
}
|
||||
label="Enable Dormancy Threshold"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("time_til_dormant_ms", {
|
||||
helperText: (
|
||||
<DormancyTTLHelperText
|
||||
ttl={form.values.time_til_dormant_ms}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting || !form.values.inactivity_cleanup_enabled
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until dormant (days)"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<Stack>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="dormancyAutoDeletion"
|
||||
checked={form.values.dormant_autodeletion_cleanup_enabled}
|
||||
onChange={handleToggleDormantAutoDeletion}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
!form.values.dormant_autodeletion_cleanup_enabled
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until deletion (days)"
|
||||
type="number"
|
||||
/>
|
||||
}
|
||||
label="Enable Dormancy Auto-Deletion"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("time_til_dormant_autodelete_ms", {
|
||||
helperText: (
|
||||
<DormancyAutoDeletionTTLHelperText
|
||||
ttl={form.values.time_til_dormant_autodelete_ms}
|
||||
/>
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
!form.values.dormant_autodeletion_cleanup_enabled
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until deletion (days)"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<Stack>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
name="failureCleanupEnabled"
|
||||
checked={form.values.failure_cleanup_enabled}
|
||||
onChange={handleToggleFailureCleanup}
|
||||
/>
|
||||
}
|
||||
label="Enable Failure Cleanup"
|
||||
/>
|
||||
<TextField
|
||||
{...getFieldHelpers("failure_ttl_ms", {
|
||||
helperText: (
|
||||
<FailureTTLHelperText ttl={form.values.failure_ttl_ms} />
|
||||
),
|
||||
})}
|
||||
disabled={
|
||||
isSubmitting || !form.values.failure_cleanup_enabled
|
||||
}
|
||||
fullWidth
|
||||
inputProps={{ min: 0, step: "any" }}
|
||||
label="Time until cleanup (days)"
|
||||
type="number"
|
||||
/>
|
||||
</Stack>
|
||||
</FormFields>
|
||||
</FormSection>
|
||||
</>
|
||||
|
|
|
@ -27,6 +27,10 @@ const meta: Meta<typeof Workspace> = {
|
|||
component: Workspace,
|
||||
parameters: {
|
||||
queries: [
|
||||
{
|
||||
key: ["buildInfo"],
|
||||
data: Mocks.MockBuildInfo,
|
||||
},
|
||||
{
|
||||
key: ["portForward", Mocks.MockWorkspaceAgent.id],
|
||||
data: Mocks.MockListeningPortsResponse,
|
||||
|
@ -37,7 +41,6 @@ const meta: Meta<typeof Workspace> = {
|
|||
(Story) => (
|
||||
<DashboardContext.Provider
|
||||
value={{
|
||||
buildInfo: Mocks.MockBuildInfo,
|
||||
entitlements: Mocks.MockEntitlementsWithScheduling,
|
||||
experiments: Mocks.MockExperiments,
|
||||
appearance: MockedAppearance,
|
||||
|
|
|
@ -5,6 +5,7 @@ import { useMutation, useQuery, useQueryClient } from "react-query";
|
|||
import { useNavigate } from "react-router-dom";
|
||||
import { MissingBuildParameters, restartWorkspace } from "api/api";
|
||||
import { getErrorMessage } from "api/errors";
|
||||
import { buildInfo } from "api/queries/buildInfo";
|
||||
import { deploymentConfig, deploymentSSHConfig } from "api/queries/deployment";
|
||||
import { templateVersion, templateVersions } from "api/queries/templates";
|
||||
import {
|
||||
|
@ -27,7 +28,6 @@ import { MemoizedInlineMarkdown } from "components/Markdown/Markdown";
|
|||
import { Stack } from "components/Stack/Stack";
|
||||
import { useAuthenticated } from "contexts/auth/RequireAuth";
|
||||
import { useWorkspaceBuildLogs } from "hooks/useWorkspaceBuildLogs";
|
||||
import { useDashboard } from "modules/dashboard/useDashboard";
|
||||
import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility";
|
||||
import { pageTitle } from "utils/page";
|
||||
import { ChangeVersionDialog } from "./ChangeVersionDialog";
|
||||
|
@ -50,7 +50,7 @@ export const WorkspaceReadyPage: FC<WorkspaceReadyPageProps> = ({
|
|||
}) => {
|
||||
const navigate = useNavigate();
|
||||
const queryClient = useQueryClient();
|
||||
const { buildInfo } = useDashboard();
|
||||
const buildInfoQuery = useQuery(buildInfo());
|
||||
const featureVisibility = useFeatureVisibility();
|
||||
if (workspace === undefined) {
|
||||
throw Error("Workspace is undefined");
|
||||
|
@ -248,7 +248,7 @@ export const WorkspaceReadyPage: FC<WorkspaceReadyPageProps> = ({
|
|||
canChangeVersions={canChangeVersions}
|
||||
hideSSHButton={featureVisibility["browser_only"]}
|
||||
hideVSCodeDesktopButton={featureVisibility["browser_only"]}
|
||||
buildInfo={buildInfo}
|
||||
buildInfo={buildInfoQuery.data}
|
||||
sshPrefix={sshPrefixQuery.data?.hostname_prefix}
|
||||
template={template}
|
||||
buildLogs={
|
||||
|
|
|
@ -139,11 +139,18 @@ const meta: Meta<typeof WorkspacesPageView> = {
|
|||
count: 13,
|
||||
page: 1,
|
||||
},
|
||||
parameters: {
|
||||
queries: [
|
||||
{
|
||||
key: ["buildInfo"],
|
||||
data: MockBuildInfo,
|
||||
},
|
||||
],
|
||||
},
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<DashboardContext.Provider
|
||||
value={{
|
||||
buildInfo: MockBuildInfo,
|
||||
entitlements: MockEntitlementsWithScheduling,
|
||||
experiments: MockExperiments,
|
||||
appearance: MockedAppearance,
|
||||
|
|
|
@ -3,11 +3,7 @@ import type { FC } from "react";
|
|||
import { withDefaultFeatures } from "api/api";
|
||||
import type { Entitlements } from "api/typesGenerated";
|
||||
import { DashboardContext } from "modules/dashboard/DashboardProvider";
|
||||
import {
|
||||
MockAppearanceConfig,
|
||||
MockBuildInfo,
|
||||
MockEntitlements,
|
||||
} from "./entities";
|
||||
import { MockAppearanceConfig, MockEntitlements } from "./entities";
|
||||
|
||||
export const withDashboardProvider = (
|
||||
Story: FC,
|
||||
|
@ -30,7 +26,6 @@ export const withDashboardProvider = (
|
|||
return (
|
||||
<DashboardContext.Provider
|
||||
value={{
|
||||
buildInfo: MockBuildInfo,
|
||||
entitlements,
|
||||
experiments,
|
||||
appearance: {
|
||||
|
|
|
@ -3,8 +3,8 @@ export const getApplicationName = (): string => {
|
|||
.querySelector(`meta[name=application-name]`)
|
||||
?.getAttribute("content");
|
||||
// Fallback to "Coder" if the application name is not available for some reason.
|
||||
// We need to check if the content does not look like {{ .ApplicationName}}
|
||||
// as it means that Coder is running in development mode (port :8080).
|
||||
// We need to check if the content does not look like `{{ .ApplicationName }}`
|
||||
// as it means that Coder is running in development mode.
|
||||
return c && !c.startsWith("{{ .") ? c : "Coder";
|
||||
};
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
export const getMetadataAsJSON = <T extends NonNullable<unknown>>(
|
||||
property: string,
|
||||
): T | undefined => {
|
||||
const appearance = document.querySelector(`meta[property=${property}]`);
|
||||
const metadata = document.querySelector(`meta[property=${property}]`);
|
||||
|
||||
if (appearance) {
|
||||
const rawContent = appearance.getAttribute("content");
|
||||
if (metadata) {
|
||||
const rawContent = metadata.getAttribute("content");
|
||||
|
||||
if (rawContent) {
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue