mirror of https://github.com/coder/coder.git
8356 lines
229 KiB
Go
8356 lines
229 KiB
Go
// Code generated by sqlc. DO NOT EDIT.
|
|
// versions:
|
|
// sqlc v1.17.2
|
|
|
|
package database
|
|
|
|
import (
|
|
"context"
|
|
"database/sql"
|
|
"encoding/json"
|
|
"time"
|
|
|
|
"github.com/coder/coder/coderd/database/dbtype"
|
|
"github.com/google/uuid"
|
|
"github.com/lib/pq"
|
|
"github.com/tabbed/pqtype"
|
|
)
|
|
|
|
const deleteAPIKeyByID = `-- name: DeleteAPIKeyByID :exec
|
|
DELETE FROM
|
|
api_keys
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteAPIKeyByID(ctx context.Context, id string) error {
|
|
_, err := q.db.ExecContext(ctx, deleteAPIKeyByID, id)
|
|
return err
|
|
}
|
|
|
|
const deleteAPIKeysByUserID = `-- name: DeleteAPIKeysByUserID :exec
|
|
DELETE FROM
|
|
api_keys
|
|
WHERE
|
|
user_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error {
|
|
_, err := q.db.ExecContext(ctx, deleteAPIKeysByUserID, userID)
|
|
return err
|
|
}
|
|
|
|
const deleteApplicationConnectAPIKeysByUserID = `-- name: DeleteApplicationConnectAPIKeysByUserID :exec
|
|
DELETE FROM
|
|
api_keys
|
|
WHERE
|
|
user_id = $1 AND
|
|
scope = 'application_connect'::api_key_scope
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error {
|
|
_, err := q.db.ExecContext(ctx, deleteApplicationConnectAPIKeysByUserID, userID)
|
|
return err
|
|
}
|
|
|
|
const getAPIKeyByID = `-- name: GetAPIKeyByID :one
|
|
SELECT
|
|
id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name
|
|
FROM
|
|
api_keys
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetAPIKeyByID(ctx context.Context, id string) (APIKey, error) {
|
|
row := q.db.QueryRowContext(ctx, getAPIKeyByID, id)
|
|
var i APIKey
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getAPIKeyByName = `-- name: GetAPIKeyByName :one
|
|
SELECT
|
|
id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name
|
|
FROM
|
|
api_keys
|
|
WHERE
|
|
user_id = $1 AND
|
|
token_name = $2 AND
|
|
token_name != ''
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetAPIKeyByNameParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
TokenName string `db:"token_name" json:"token_name"`
|
|
}
|
|
|
|
// there is no unique constraint on empty token names
|
|
func (q *sqlQuerier) GetAPIKeyByName(ctx context.Context, arg GetAPIKeyByNameParams) (APIKey, error) {
|
|
row := q.db.QueryRowContext(ctx, getAPIKeyByName, arg.UserID, arg.TokenName)
|
|
var i APIKey
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getAPIKeysByLoginType = `-- name: GetAPIKeysByLoginType :many
|
|
SELECT id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name FROM api_keys WHERE login_type = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetAPIKeysByLoginType(ctx context.Context, loginType LoginType) ([]APIKey, error) {
|
|
rows, err := q.db.QueryContext(ctx, getAPIKeysByLoginType, loginType)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []APIKey
|
|
for rows.Next() {
|
|
var i APIKey
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getAPIKeysByUserID = `-- name: GetAPIKeysByUserID :many
|
|
SELECT id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name FROM api_keys WHERE login_type = $1 AND user_id = $2
|
|
`
|
|
|
|
type GetAPIKeysByUserIDParams struct {
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetAPIKeysByUserID(ctx context.Context, arg GetAPIKeysByUserIDParams) ([]APIKey, error) {
|
|
rows, err := q.db.QueryContext(ctx, getAPIKeysByUserID, arg.LoginType, arg.UserID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []APIKey
|
|
for rows.Next() {
|
|
var i APIKey
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getAPIKeysLastUsedAfter = `-- name: GetAPIKeysLastUsedAfter :many
|
|
SELECT id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name FROM api_keys WHERE last_used > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]APIKey, error) {
|
|
rows, err := q.db.QueryContext(ctx, getAPIKeysLastUsedAfter, lastUsed)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []APIKey
|
|
for rows.Next() {
|
|
var i APIKey
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertAPIKey = `-- name: InsertAPIKey :one
|
|
INSERT INTO
|
|
api_keys (
|
|
id,
|
|
lifetime_seconds,
|
|
hashed_secret,
|
|
ip_address,
|
|
user_id,
|
|
last_used,
|
|
expires_at,
|
|
created_at,
|
|
updated_at,
|
|
login_type,
|
|
scope,
|
|
token_name
|
|
)
|
|
VALUES
|
|
($1,
|
|
-- If the lifetime is set to 0, default to 24hrs
|
|
CASE $2::bigint
|
|
WHEN 0 THEN 86400
|
|
ELSE $2::bigint
|
|
END
|
|
, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING id, hashed_secret, user_id, last_used, expires_at, created_at, updated_at, login_type, lifetime_seconds, ip_address, scope, token_name
|
|
`
|
|
|
|
type InsertAPIKeyParams struct {
|
|
ID string `db:"id" json:"id"`
|
|
LifetimeSeconds int64 `db:"lifetime_seconds" json:"lifetime_seconds"`
|
|
HashedSecret []byte `db:"hashed_secret" json:"hashed_secret"`
|
|
IPAddress pqtype.Inet `db:"ip_address" json:"ip_address"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
LastUsed time.Time `db:"last_used" json:"last_used"`
|
|
ExpiresAt time.Time `db:"expires_at" json:"expires_at"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
Scope APIKeyScope `db:"scope" json:"scope"`
|
|
TokenName string `db:"token_name" json:"token_name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertAPIKey(ctx context.Context, arg InsertAPIKeyParams) (APIKey, error) {
|
|
row := q.db.QueryRowContext(ctx, insertAPIKey,
|
|
arg.ID,
|
|
arg.LifetimeSeconds,
|
|
arg.HashedSecret,
|
|
arg.IPAddress,
|
|
arg.UserID,
|
|
arg.LastUsed,
|
|
arg.ExpiresAt,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.LoginType,
|
|
arg.Scope,
|
|
arg.TokenName,
|
|
)
|
|
var i APIKey
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.HashedSecret,
|
|
&i.UserID,
|
|
&i.LastUsed,
|
|
&i.ExpiresAt,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.LoginType,
|
|
&i.LifetimeSeconds,
|
|
&i.IPAddress,
|
|
&i.Scope,
|
|
&i.TokenName,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateAPIKeyByID = `-- name: UpdateAPIKeyByID :exec
|
|
UPDATE
|
|
api_keys
|
|
SET
|
|
last_used = $2,
|
|
expires_at = $3,
|
|
ip_address = $4
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateAPIKeyByIDParams struct {
|
|
ID string `db:"id" json:"id"`
|
|
LastUsed time.Time `db:"last_used" json:"last_used"`
|
|
ExpiresAt time.Time `db:"expires_at" json:"expires_at"`
|
|
IPAddress pqtype.Inet `db:"ip_address" json:"ip_address"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateAPIKeyByID,
|
|
arg.ID,
|
|
arg.LastUsed,
|
|
arg.ExpiresAt,
|
|
arg.IPAddress,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const getAuditLogsOffset = `-- name: GetAuditLogsOffset :many
|
|
SELECT
|
|
audit_logs.id, audit_logs.time, audit_logs.user_id, audit_logs.organization_id, audit_logs.ip, audit_logs.user_agent, audit_logs.resource_type, audit_logs.resource_id, audit_logs.resource_target, audit_logs.action, audit_logs.diff, audit_logs.status_code, audit_logs.additional_fields, audit_logs.request_id, audit_logs.resource_icon,
|
|
users.username AS user_username,
|
|
users.email AS user_email,
|
|
users.created_at AS user_created_at,
|
|
users.status AS user_status,
|
|
users.rbac_roles AS user_roles,
|
|
users.avatar_url AS user_avatar_url,
|
|
COUNT(audit_logs.*) OVER () AS count
|
|
FROM
|
|
audit_logs
|
|
LEFT JOIN users ON audit_logs.user_id = users.id
|
|
LEFT JOIN
|
|
-- First join on workspaces to get the initial workspace create
|
|
-- to workspace build 1 id. This is because the first create is
|
|
-- is a different audit log than subsequent starts.
|
|
workspaces ON
|
|
audit_logs.resource_type = 'workspace' AND
|
|
audit_logs.resource_id = workspaces.id
|
|
LEFT JOIN
|
|
workspace_builds ON
|
|
-- Get the reason from the build if the resource type
|
|
-- is a workspace_build
|
|
(
|
|
audit_logs.resource_type = 'workspace_build'
|
|
AND audit_logs.resource_id = workspace_builds.id
|
|
)
|
|
OR
|
|
-- Get the reason from the build #1 if this is the first
|
|
-- workspace create.
|
|
(
|
|
audit_logs.resource_type = 'workspace' AND
|
|
audit_logs.action = 'create' AND
|
|
workspaces.id = workspace_builds.workspace_id AND
|
|
workspace_builds.build_number = 1
|
|
)
|
|
WHERE
|
|
-- Filter resource_type
|
|
CASE
|
|
WHEN $3 :: text != '' THEN
|
|
resource_type = $3 :: resource_type
|
|
ELSE true
|
|
END
|
|
-- Filter resource_id
|
|
AND CASE
|
|
WHEN $4 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
|
|
resource_id = $4
|
|
ELSE true
|
|
END
|
|
-- Filter by resource_target
|
|
AND CASE
|
|
WHEN $5 :: text != '' THEN
|
|
resource_target = $5
|
|
ELSE true
|
|
END
|
|
-- Filter action
|
|
AND CASE
|
|
WHEN $6 :: text != '' THEN
|
|
action = $6 :: audit_action
|
|
ELSE true
|
|
END
|
|
-- Filter by username
|
|
AND CASE
|
|
WHEN $7 :: text != '' THEN
|
|
users.username = $7
|
|
ELSE true
|
|
END
|
|
-- Filter by user_email
|
|
AND CASE
|
|
WHEN $8 :: text != '' THEN
|
|
users.email = $8
|
|
ELSE true
|
|
END
|
|
-- Filter by date_from
|
|
AND CASE
|
|
WHEN $9 :: timestamp with time zone != '0001-01-01 00:00:00Z' THEN
|
|
"time" >= $9
|
|
ELSE true
|
|
END
|
|
-- Filter by date_to
|
|
AND CASE
|
|
WHEN $10 :: timestamp with time zone != '0001-01-01 00:00:00Z' THEN
|
|
"time" <= $10
|
|
ELSE true
|
|
END
|
|
-- Filter by build_reason
|
|
AND CASE
|
|
WHEN $11::text != '' THEN
|
|
workspace_builds.reason::text = $11
|
|
ELSE true
|
|
END
|
|
ORDER BY
|
|
"time" DESC
|
|
LIMIT
|
|
$1
|
|
OFFSET
|
|
$2
|
|
`
|
|
|
|
type GetAuditLogsOffsetParams struct {
|
|
Limit int32 `db:"limit" json:"limit"`
|
|
Offset int32 `db:"offset" json:"offset"`
|
|
ResourceType string `db:"resource_type" json:"resource_type"`
|
|
ResourceID uuid.UUID `db:"resource_id" json:"resource_id"`
|
|
ResourceTarget string `db:"resource_target" json:"resource_target"`
|
|
Action string `db:"action" json:"action"`
|
|
Username string `db:"username" json:"username"`
|
|
Email string `db:"email" json:"email"`
|
|
DateFrom time.Time `db:"date_from" json:"date_from"`
|
|
DateTo time.Time `db:"date_to" json:"date_to"`
|
|
BuildReason string `db:"build_reason" json:"build_reason"`
|
|
}
|
|
|
|
type GetAuditLogsOffsetRow struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Time time.Time `db:"time" json:"time"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Ip pqtype.Inet `db:"ip" json:"ip"`
|
|
UserAgent sql.NullString `db:"user_agent" json:"user_agent"`
|
|
ResourceType ResourceType `db:"resource_type" json:"resource_type"`
|
|
ResourceID uuid.UUID `db:"resource_id" json:"resource_id"`
|
|
ResourceTarget string `db:"resource_target" json:"resource_target"`
|
|
Action AuditAction `db:"action" json:"action"`
|
|
Diff json.RawMessage `db:"diff" json:"diff"`
|
|
StatusCode int32 `db:"status_code" json:"status_code"`
|
|
AdditionalFields json.RawMessage `db:"additional_fields" json:"additional_fields"`
|
|
RequestID uuid.UUID `db:"request_id" json:"request_id"`
|
|
ResourceIcon string `db:"resource_icon" json:"resource_icon"`
|
|
UserUsername sql.NullString `db:"user_username" json:"user_username"`
|
|
UserEmail sql.NullString `db:"user_email" json:"user_email"`
|
|
UserCreatedAt sql.NullTime `db:"user_created_at" json:"user_created_at"`
|
|
UserStatus NullUserStatus `db:"user_status" json:"user_status"`
|
|
UserRoles []string `db:"user_roles" json:"user_roles"`
|
|
UserAvatarUrl sql.NullString `db:"user_avatar_url" json:"user_avatar_url"`
|
|
Count int64 `db:"count" json:"count"`
|
|
}
|
|
|
|
// GetAuditLogsBefore retrieves `row_limit` number of audit logs before the provided
|
|
// ID.
|
|
func (q *sqlQuerier) GetAuditLogsOffset(ctx context.Context, arg GetAuditLogsOffsetParams) ([]GetAuditLogsOffsetRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getAuditLogsOffset,
|
|
arg.Limit,
|
|
arg.Offset,
|
|
arg.ResourceType,
|
|
arg.ResourceID,
|
|
arg.ResourceTarget,
|
|
arg.Action,
|
|
arg.Username,
|
|
arg.Email,
|
|
arg.DateFrom,
|
|
arg.DateTo,
|
|
arg.BuildReason,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetAuditLogsOffsetRow
|
|
for rows.Next() {
|
|
var i GetAuditLogsOffsetRow
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Time,
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.Ip,
|
|
&i.UserAgent,
|
|
&i.ResourceType,
|
|
&i.ResourceID,
|
|
&i.ResourceTarget,
|
|
&i.Action,
|
|
&i.Diff,
|
|
&i.StatusCode,
|
|
&i.AdditionalFields,
|
|
&i.RequestID,
|
|
&i.ResourceIcon,
|
|
&i.UserUsername,
|
|
&i.UserEmail,
|
|
&i.UserCreatedAt,
|
|
&i.UserStatus,
|
|
pq.Array(&i.UserRoles),
|
|
&i.UserAvatarUrl,
|
|
&i.Count,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertAuditLog = `-- name: InsertAuditLog :one
|
|
INSERT INTO
|
|
audit_logs (
|
|
id,
|
|
"time",
|
|
user_id,
|
|
organization_id,
|
|
ip,
|
|
user_agent,
|
|
resource_type,
|
|
resource_id,
|
|
resource_target,
|
|
action,
|
|
diff,
|
|
status_code,
|
|
additional_fields,
|
|
request_id,
|
|
resource_icon
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) RETURNING id, time, user_id, organization_id, ip, user_agent, resource_type, resource_id, resource_target, action, diff, status_code, additional_fields, request_id, resource_icon
|
|
`
|
|
|
|
type InsertAuditLogParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Time time.Time `db:"time" json:"time"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Ip pqtype.Inet `db:"ip" json:"ip"`
|
|
UserAgent sql.NullString `db:"user_agent" json:"user_agent"`
|
|
ResourceType ResourceType `db:"resource_type" json:"resource_type"`
|
|
ResourceID uuid.UUID `db:"resource_id" json:"resource_id"`
|
|
ResourceTarget string `db:"resource_target" json:"resource_target"`
|
|
Action AuditAction `db:"action" json:"action"`
|
|
Diff json.RawMessage `db:"diff" json:"diff"`
|
|
StatusCode int32 `db:"status_code" json:"status_code"`
|
|
AdditionalFields json.RawMessage `db:"additional_fields" json:"additional_fields"`
|
|
RequestID uuid.UUID `db:"request_id" json:"request_id"`
|
|
ResourceIcon string `db:"resource_icon" json:"resource_icon"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertAuditLog(ctx context.Context, arg InsertAuditLogParams) (AuditLog, error) {
|
|
row := q.db.QueryRowContext(ctx, insertAuditLog,
|
|
arg.ID,
|
|
arg.Time,
|
|
arg.UserID,
|
|
arg.OrganizationID,
|
|
arg.Ip,
|
|
arg.UserAgent,
|
|
arg.ResourceType,
|
|
arg.ResourceID,
|
|
arg.ResourceTarget,
|
|
arg.Action,
|
|
arg.Diff,
|
|
arg.StatusCode,
|
|
arg.AdditionalFields,
|
|
arg.RequestID,
|
|
arg.ResourceIcon,
|
|
)
|
|
var i AuditLog
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Time,
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.Ip,
|
|
&i.UserAgent,
|
|
&i.ResourceType,
|
|
&i.ResourceID,
|
|
&i.ResourceTarget,
|
|
&i.Action,
|
|
&i.Diff,
|
|
&i.StatusCode,
|
|
&i.AdditionalFields,
|
|
&i.RequestID,
|
|
&i.ResourceIcon,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getFileByHashAndCreator = `-- name: GetFileByHashAndCreator :one
|
|
SELECT
|
|
hash, created_at, created_by, mimetype, data, id
|
|
FROM
|
|
files
|
|
WHERE
|
|
hash = $1
|
|
AND
|
|
created_by = $2
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetFileByHashAndCreatorParams struct {
|
|
Hash string `db:"hash" json:"hash"`
|
|
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetFileByHashAndCreator(ctx context.Context, arg GetFileByHashAndCreatorParams) (File, error) {
|
|
row := q.db.QueryRowContext(ctx, getFileByHashAndCreator, arg.Hash, arg.CreatedBy)
|
|
var i File
|
|
err := row.Scan(
|
|
&i.Hash,
|
|
&i.CreatedAt,
|
|
&i.CreatedBy,
|
|
&i.Mimetype,
|
|
&i.Data,
|
|
&i.ID,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getFileByID = `-- name: GetFileByID :one
|
|
SELECT
|
|
hash, created_at, created_by, mimetype, data, id
|
|
FROM
|
|
files
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetFileByID(ctx context.Context, id uuid.UUID) (File, error) {
|
|
row := q.db.QueryRowContext(ctx, getFileByID, id)
|
|
var i File
|
|
err := row.Scan(
|
|
&i.Hash,
|
|
&i.CreatedAt,
|
|
&i.CreatedBy,
|
|
&i.Mimetype,
|
|
&i.Data,
|
|
&i.ID,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getFileTemplates = `-- name: GetFileTemplates :many
|
|
SELECT
|
|
files.id AS file_id,
|
|
files.created_by AS file_created_by,
|
|
templates.id AS template_id,
|
|
templates.organization_id AS template_organization_id,
|
|
templates.created_by AS template_created_by,
|
|
templates.user_acl,
|
|
templates.group_acl
|
|
FROM
|
|
templates
|
|
INNER JOIN
|
|
template_versions
|
|
ON templates.id = template_versions.template_id
|
|
INNER JOIN
|
|
provisioner_jobs
|
|
ON job_id = provisioner_jobs.id
|
|
INNER JOIN
|
|
files
|
|
ON files.id = provisioner_jobs.file_id
|
|
WHERE
|
|
-- Only fetch template version associated files.
|
|
storage_method = 'file'
|
|
AND provisioner_jobs.type = 'template_version_import'
|
|
AND file_id = $1
|
|
`
|
|
|
|
type GetFileTemplatesRow struct {
|
|
FileID uuid.UUID `db:"file_id" json:"file_id"`
|
|
FileCreatedBy uuid.UUID `db:"file_created_by" json:"file_created_by"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
TemplateOrganizationID uuid.UUID `db:"template_organization_id" json:"template_organization_id"`
|
|
TemplateCreatedBy uuid.UUID `db:"template_created_by" json:"template_created_by"`
|
|
UserACL TemplateACL `db:"user_acl" json:"user_acl"`
|
|
GroupACL TemplateACL `db:"group_acl" json:"group_acl"`
|
|
}
|
|
|
|
// Get all templates that use a file.
|
|
func (q *sqlQuerier) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]GetFileTemplatesRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getFileTemplates, fileID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetFileTemplatesRow
|
|
for rows.Next() {
|
|
var i GetFileTemplatesRow
|
|
if err := rows.Scan(
|
|
&i.FileID,
|
|
&i.FileCreatedBy,
|
|
&i.TemplateID,
|
|
&i.TemplateOrganizationID,
|
|
&i.TemplateCreatedBy,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertFile = `-- name: InsertFile :one
|
|
INSERT INTO
|
|
files (id, hash, created_at, created_by, mimetype, "data")
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6) RETURNING hash, created_at, created_by, mimetype, data, id
|
|
`
|
|
|
|
type InsertFileParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Hash string `db:"hash" json:"hash"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
|
Mimetype string `db:"mimetype" json:"mimetype"`
|
|
Data []byte `db:"data" json:"data"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertFile(ctx context.Context, arg InsertFileParams) (File, error) {
|
|
row := q.db.QueryRowContext(ctx, insertFile,
|
|
arg.ID,
|
|
arg.Hash,
|
|
arg.CreatedAt,
|
|
arg.CreatedBy,
|
|
arg.Mimetype,
|
|
arg.Data,
|
|
)
|
|
var i File
|
|
err := row.Scan(
|
|
&i.Hash,
|
|
&i.CreatedAt,
|
|
&i.CreatedBy,
|
|
&i.Mimetype,
|
|
&i.Data,
|
|
&i.ID,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getGitAuthLink = `-- name: GetGitAuthLink :one
|
|
SELECT provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry FROM git_auth_links WHERE provider_id = $1 AND user_id = $2
|
|
`
|
|
|
|
type GetGitAuthLinkParams struct {
|
|
ProviderID string `db:"provider_id" json:"provider_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetGitAuthLink(ctx context.Context, arg GetGitAuthLinkParams) (GitAuthLink, error) {
|
|
row := q.db.QueryRowContext(ctx, getGitAuthLink, arg.ProviderID, arg.UserID)
|
|
var i GitAuthLink
|
|
err := row.Scan(
|
|
&i.ProviderID,
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertGitAuthLink = `-- name: InsertGitAuthLink :one
|
|
INSERT INTO git_auth_links (
|
|
provider_id,
|
|
user_id,
|
|
created_at,
|
|
updated_at,
|
|
oauth_access_token,
|
|
oauth_refresh_token,
|
|
oauth_expiry
|
|
) VALUES (
|
|
$1,
|
|
$2,
|
|
$3,
|
|
$4,
|
|
$5,
|
|
$6,
|
|
$7
|
|
) RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
`
|
|
|
|
type InsertGitAuthLinkParams struct {
|
|
ProviderID string `db:"provider_id" json:"provider_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OAuthAccessToken string `db:"oauth_access_token" json:"oauth_access_token"`
|
|
OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"`
|
|
OAuthExpiry time.Time `db:"oauth_expiry" json:"oauth_expiry"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertGitAuthLink(ctx context.Context, arg InsertGitAuthLinkParams) (GitAuthLink, error) {
|
|
row := q.db.QueryRowContext(ctx, insertGitAuthLink,
|
|
arg.ProviderID,
|
|
arg.UserID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.OAuthAccessToken,
|
|
arg.OAuthRefreshToken,
|
|
arg.OAuthExpiry,
|
|
)
|
|
var i GitAuthLink
|
|
err := row.Scan(
|
|
&i.ProviderID,
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateGitAuthLink = `-- name: UpdateGitAuthLink :one
|
|
UPDATE git_auth_links SET
|
|
updated_at = $3,
|
|
oauth_access_token = $4,
|
|
oauth_refresh_token = $5,
|
|
oauth_expiry = $6
|
|
WHERE provider_id = $1 AND user_id = $2 RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
`
|
|
|
|
type UpdateGitAuthLinkParams struct {
|
|
ProviderID string `db:"provider_id" json:"provider_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OAuthAccessToken string `db:"oauth_access_token" json:"oauth_access_token"`
|
|
OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"`
|
|
OAuthExpiry time.Time `db:"oauth_expiry" json:"oauth_expiry"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateGitAuthLink(ctx context.Context, arg UpdateGitAuthLinkParams) (GitAuthLink, error) {
|
|
row := q.db.QueryRowContext(ctx, updateGitAuthLink,
|
|
arg.ProviderID,
|
|
arg.UserID,
|
|
arg.UpdatedAt,
|
|
arg.OAuthAccessToken,
|
|
arg.OAuthRefreshToken,
|
|
arg.OAuthExpiry,
|
|
)
|
|
var i GitAuthLink
|
|
err := row.Scan(
|
|
&i.ProviderID,
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const deleteGitSSHKey = `-- name: DeleteGitSSHKey :exec
|
|
DELETE FROM
|
|
gitsshkeys
|
|
WHERE
|
|
user_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error {
|
|
_, err := q.db.ExecContext(ctx, deleteGitSSHKey, userID)
|
|
return err
|
|
}
|
|
|
|
const getGitSSHKey = `-- name: GetGitSSHKey :one
|
|
SELECT
|
|
user_id, created_at, updated_at, private_key, public_key
|
|
FROM
|
|
gitsshkeys
|
|
WHERE
|
|
user_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (GitSSHKey, error) {
|
|
row := q.db.QueryRowContext(ctx, getGitSSHKey, userID)
|
|
var i GitSSHKey
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.PrivateKey,
|
|
&i.PublicKey,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertGitSSHKey = `-- name: InsertGitSSHKey :one
|
|
INSERT INTO
|
|
gitsshkeys (
|
|
user_id,
|
|
created_at,
|
|
updated_at,
|
|
private_key,
|
|
public_key
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5) RETURNING user_id, created_at, updated_at, private_key, public_key
|
|
`
|
|
|
|
type InsertGitSSHKeyParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
PrivateKey string `db:"private_key" json:"private_key"`
|
|
PublicKey string `db:"public_key" json:"public_key"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertGitSSHKey(ctx context.Context, arg InsertGitSSHKeyParams) (GitSSHKey, error) {
|
|
row := q.db.QueryRowContext(ctx, insertGitSSHKey,
|
|
arg.UserID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.PrivateKey,
|
|
arg.PublicKey,
|
|
)
|
|
var i GitSSHKey
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.PrivateKey,
|
|
&i.PublicKey,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateGitSSHKey = `-- name: UpdateGitSSHKey :one
|
|
UPDATE
|
|
gitsshkeys
|
|
SET
|
|
updated_at = $2,
|
|
private_key = $3,
|
|
public_key = $4
|
|
WHERE
|
|
user_id = $1
|
|
RETURNING
|
|
user_id, created_at, updated_at, private_key, public_key
|
|
`
|
|
|
|
type UpdateGitSSHKeyParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
PrivateKey string `db:"private_key" json:"private_key"`
|
|
PublicKey string `db:"public_key" json:"public_key"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateGitSSHKey(ctx context.Context, arg UpdateGitSSHKeyParams) (GitSSHKey, error) {
|
|
row := q.db.QueryRowContext(ctx, updateGitSSHKey,
|
|
arg.UserID,
|
|
arg.UpdatedAt,
|
|
arg.PrivateKey,
|
|
arg.PublicKey,
|
|
)
|
|
var i GitSSHKey
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.PrivateKey,
|
|
&i.PublicKey,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const deleteGroupMemberFromGroup = `-- name: DeleteGroupMemberFromGroup :exec
|
|
DELETE FROM
|
|
group_members
|
|
WHERE
|
|
user_id = $1 AND
|
|
group_id = $2
|
|
`
|
|
|
|
type DeleteGroupMemberFromGroupParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
GroupID uuid.UUID `db:"group_id" json:"group_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) DeleteGroupMemberFromGroup(ctx context.Context, arg DeleteGroupMemberFromGroupParams) error {
|
|
_, err := q.db.ExecContext(ctx, deleteGroupMemberFromGroup, arg.UserID, arg.GroupID)
|
|
return err
|
|
}
|
|
|
|
const deleteGroupMembersByOrgAndUser = `-- name: DeleteGroupMembersByOrgAndUser :exec
|
|
DELETE FROM
|
|
group_members
|
|
WHERE
|
|
group_members.user_id = $1
|
|
AND group_id = ANY(SELECT id FROM groups WHERE organization_id = $2)
|
|
`
|
|
|
|
type DeleteGroupMembersByOrgAndUserParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) DeleteGroupMembersByOrgAndUser(ctx context.Context, arg DeleteGroupMembersByOrgAndUserParams) error {
|
|
_, err := q.db.ExecContext(ctx, deleteGroupMembersByOrgAndUser, arg.UserID, arg.OrganizationID)
|
|
return err
|
|
}
|
|
|
|
const getGroupMembers = `-- name: GetGroupMembers :many
|
|
SELECT
|
|
users.id, users.email, users.username, users.hashed_password, users.created_at, users.updated_at, users.status, users.rbac_roles, users.login_type, users.avatar_url, users.deleted, users.last_seen_at
|
|
FROM
|
|
users
|
|
JOIN
|
|
group_members
|
|
ON
|
|
users.id = group_members.user_id
|
|
WHERE
|
|
group_members.group_id = $1
|
|
AND
|
|
users.status = 'active'
|
|
AND
|
|
users.deleted = 'false'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetGroupMembers(ctx context.Context, groupID uuid.UUID) ([]User, error) {
|
|
rows, err := q.db.QueryContext(ctx, getGroupMembers, groupID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []User
|
|
for rows.Next() {
|
|
var i User
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertGroupMember = `-- name: InsertGroupMember :exec
|
|
INSERT INTO
|
|
group_members (user_id, group_id)
|
|
VALUES
|
|
($1, $2)
|
|
`
|
|
|
|
type InsertGroupMemberParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
GroupID uuid.UUID `db:"group_id" json:"group_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertGroupMember(ctx context.Context, arg InsertGroupMemberParams) error {
|
|
_, err := q.db.ExecContext(ctx, insertGroupMember, arg.UserID, arg.GroupID)
|
|
return err
|
|
}
|
|
|
|
const insertUserGroupsByName = `-- name: InsertUserGroupsByName :exec
|
|
WITH groups AS (
|
|
SELECT
|
|
id
|
|
FROM
|
|
groups
|
|
WHERE
|
|
groups.organization_id = $2 AND
|
|
groups.name = ANY($3 :: text [])
|
|
)
|
|
INSERT INTO
|
|
group_members (user_id, group_id)
|
|
SELECT
|
|
$1,
|
|
groups.id
|
|
FROM
|
|
groups
|
|
`
|
|
|
|
type InsertUserGroupsByNameParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
GroupNames []string `db:"group_names" json:"group_names"`
|
|
}
|
|
|
|
// InsertUserGroupsByName adds a user to all provided groups, if they exist.
|
|
func (q *sqlQuerier) InsertUserGroupsByName(ctx context.Context, arg InsertUserGroupsByNameParams) error {
|
|
_, err := q.db.ExecContext(ctx, insertUserGroupsByName, arg.UserID, arg.OrganizationID, pq.Array(arg.GroupNames))
|
|
return err
|
|
}
|
|
|
|
const deleteGroupByID = `-- name: DeleteGroupByID :exec
|
|
DELETE FROM
|
|
groups
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteGroupByID(ctx context.Context, id uuid.UUID) error {
|
|
_, err := q.db.ExecContext(ctx, deleteGroupByID, id)
|
|
return err
|
|
}
|
|
|
|
const getGroupByID = `-- name: GetGroupByID :one
|
|
SELECT
|
|
id, name, organization_id, avatar_url, quota_allowance
|
|
FROM
|
|
groups
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetGroupByID(ctx context.Context, id uuid.UUID) (Group, error) {
|
|
row := q.db.QueryRowContext(ctx, getGroupByID, id)
|
|
var i Group
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getGroupByOrgAndName = `-- name: GetGroupByOrgAndName :one
|
|
SELECT
|
|
id, name, organization_id, avatar_url, quota_allowance
|
|
FROM
|
|
groups
|
|
WHERE
|
|
organization_id = $1
|
|
AND
|
|
name = $2
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetGroupByOrgAndNameParams struct {
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetGroupByOrgAndName(ctx context.Context, arg GetGroupByOrgAndNameParams) (Group, error) {
|
|
row := q.db.QueryRowContext(ctx, getGroupByOrgAndName, arg.OrganizationID, arg.Name)
|
|
var i Group
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getGroupsByOrganizationID = `-- name: GetGroupsByOrganizationID :many
|
|
SELECT
|
|
id, name, organization_id, avatar_url, quota_allowance
|
|
FROM
|
|
groups
|
|
WHERE
|
|
organization_id = $1
|
|
AND
|
|
id != $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetGroupsByOrganizationID(ctx context.Context, organizationID uuid.UUID) ([]Group, error) {
|
|
rows, err := q.db.QueryContext(ctx, getGroupsByOrganizationID, organizationID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Group
|
|
for rows.Next() {
|
|
var i Group
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertAllUsersGroup = `-- name: InsertAllUsersGroup :one
|
|
INSERT INTO groups (
|
|
id,
|
|
name,
|
|
organization_id
|
|
)
|
|
VALUES
|
|
($1, 'Everyone', $1) RETURNING id, name, organization_id, avatar_url, quota_allowance
|
|
`
|
|
|
|
// We use the organization_id as the id
|
|
// for simplicity since all users is
|
|
// every member of the org.
|
|
func (q *sqlQuerier) InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (Group, error) {
|
|
row := q.db.QueryRowContext(ctx, insertAllUsersGroup, organizationID)
|
|
var i Group
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertGroup = `-- name: InsertGroup :one
|
|
INSERT INTO groups (
|
|
id,
|
|
name,
|
|
organization_id,
|
|
avatar_url,
|
|
quota_allowance
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5) RETURNING id, name, organization_id, avatar_url, quota_allowance
|
|
`
|
|
|
|
type InsertGroupParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Name string `db:"name" json:"name"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
AvatarURL string `db:"avatar_url" json:"avatar_url"`
|
|
QuotaAllowance int32 `db:"quota_allowance" json:"quota_allowance"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertGroup(ctx context.Context, arg InsertGroupParams) (Group, error) {
|
|
row := q.db.QueryRowContext(ctx, insertGroup,
|
|
arg.ID,
|
|
arg.Name,
|
|
arg.OrganizationID,
|
|
arg.AvatarURL,
|
|
arg.QuotaAllowance,
|
|
)
|
|
var i Group
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateGroupByID = `-- name: UpdateGroupByID :one
|
|
UPDATE
|
|
groups
|
|
SET
|
|
name = $1,
|
|
avatar_url = $2,
|
|
quota_allowance = $3
|
|
WHERE
|
|
id = $4
|
|
RETURNING id, name, organization_id, avatar_url, quota_allowance
|
|
`
|
|
|
|
type UpdateGroupByIDParams struct {
|
|
Name string `db:"name" json:"name"`
|
|
AvatarURL string `db:"avatar_url" json:"avatar_url"`
|
|
QuotaAllowance int32 `db:"quota_allowance" json:"quota_allowance"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateGroupByID(ctx context.Context, arg UpdateGroupByIDParams) (Group, error) {
|
|
row := q.db.QueryRowContext(ctx, updateGroupByID,
|
|
arg.Name,
|
|
arg.AvatarURL,
|
|
arg.QuotaAllowance,
|
|
arg.ID,
|
|
)
|
|
var i Group
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.OrganizationID,
|
|
&i.AvatarURL,
|
|
&i.QuotaAllowance,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const deleteLicense = `-- name: DeleteLicense :one
|
|
DELETE
|
|
FROM licenses
|
|
WHERE id = $1
|
|
RETURNING id
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteLicense(ctx context.Context, id int32) (int32, error) {
|
|
row := q.db.QueryRowContext(ctx, deleteLicense, id)
|
|
err := row.Scan(&id)
|
|
return id, err
|
|
}
|
|
|
|
const getLicenseByID = `-- name: GetLicenseByID :one
|
|
SELECT
|
|
id, uploaded_at, jwt, exp, uuid
|
|
FROM
|
|
licenses
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLicenseByID(ctx context.Context, id int32) (License, error) {
|
|
row := q.db.QueryRowContext(ctx, getLicenseByID, id)
|
|
var i License
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.UploadedAt,
|
|
&i.JWT,
|
|
&i.Exp,
|
|
&i.UUID,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getLicenses = `-- name: GetLicenses :many
|
|
SELECT id, uploaded_at, jwt, exp, uuid
|
|
FROM licenses
|
|
ORDER BY (id)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLicenses(ctx context.Context) ([]License, error) {
|
|
rows, err := q.db.QueryContext(ctx, getLicenses)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []License
|
|
for rows.Next() {
|
|
var i License
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.UploadedAt,
|
|
&i.JWT,
|
|
&i.Exp,
|
|
&i.UUID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getUnexpiredLicenses = `-- name: GetUnexpiredLicenses :many
|
|
SELECT id, uploaded_at, jwt, exp, uuid
|
|
FROM licenses
|
|
WHERE exp > NOW()
|
|
ORDER BY (id)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetUnexpiredLicenses(ctx context.Context) ([]License, error) {
|
|
rows, err := q.db.QueryContext(ctx, getUnexpiredLicenses)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []License
|
|
for rows.Next() {
|
|
var i License
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.UploadedAt,
|
|
&i.JWT,
|
|
&i.Exp,
|
|
&i.UUID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertLicense = `-- name: InsertLicense :one
|
|
INSERT INTO
|
|
licenses (
|
|
uploaded_at,
|
|
jwt,
|
|
exp,
|
|
uuid
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4) RETURNING id, uploaded_at, jwt, exp, uuid
|
|
`
|
|
|
|
type InsertLicenseParams struct {
|
|
UploadedAt time.Time `db:"uploaded_at" json:"uploaded_at"`
|
|
JWT string `db:"jwt" json:"jwt"`
|
|
Exp time.Time `db:"exp" json:"exp"`
|
|
UUID uuid.UUID `db:"uuid" json:"uuid"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertLicense(ctx context.Context, arg InsertLicenseParams) (License, error) {
|
|
row := q.db.QueryRowContext(ctx, insertLicense,
|
|
arg.UploadedAt,
|
|
arg.JWT,
|
|
arg.Exp,
|
|
arg.UUID,
|
|
)
|
|
var i License
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.UploadedAt,
|
|
&i.JWT,
|
|
&i.Exp,
|
|
&i.UUID,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const acquireLock = `-- name: AcquireLock :exec
|
|
SELECT pg_advisory_xact_lock($1)
|
|
`
|
|
|
|
// Blocks until the lock is acquired.
|
|
//
|
|
// This must be called from within a transaction. The lock will be automatically
|
|
// released when the transaction ends.
|
|
//
|
|
// Use database.LockID() to generate a unique lock ID from a string.
|
|
func (q *sqlQuerier) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error {
|
|
_, err := q.db.ExecContext(ctx, acquireLock, pgAdvisoryXactLock)
|
|
return err
|
|
}
|
|
|
|
const tryAcquireLock = `-- name: TryAcquireLock :one
|
|
SELECT pg_try_advisory_xact_lock($1)
|
|
`
|
|
|
|
// Non blocking lock. Returns true if the lock was acquired, false otherwise.
|
|
//
|
|
// This must be called from within a transaction. The lock will be automatically
|
|
// released when the transaction ends.
|
|
//
|
|
// Use database.LockID() to generate a unique lock ID from a string.
|
|
func (q *sqlQuerier) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) {
|
|
row := q.db.QueryRowContext(ctx, tryAcquireLock, pgTryAdvisoryXactLock)
|
|
var pg_try_advisory_xact_lock bool
|
|
err := row.Scan(&pg_try_advisory_xact_lock)
|
|
return pg_try_advisory_xact_lock, err
|
|
}
|
|
|
|
const getOrganizationIDsByMemberIDs = `-- name: GetOrganizationIDsByMemberIDs :many
|
|
SELECT
|
|
user_id, array_agg(organization_id) :: uuid [ ] AS "organization_IDs"
|
|
FROM
|
|
organization_members
|
|
WHERE
|
|
user_id = ANY($1 :: uuid [ ])
|
|
GROUP BY
|
|
user_id
|
|
`
|
|
|
|
type GetOrganizationIDsByMemberIDsRow struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrganizationIDs []uuid.UUID `db:"organization_IDs" json:"organization_IDs"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]GetOrganizationIDsByMemberIDsRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getOrganizationIDsByMemberIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetOrganizationIDsByMemberIDsRow
|
|
for rows.Next() {
|
|
var i GetOrganizationIDsByMemberIDsRow
|
|
if err := rows.Scan(&i.UserID, pq.Array(&i.OrganizationIDs)); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getOrganizationMemberByUserID = `-- name: GetOrganizationMemberByUserID :one
|
|
SELECT
|
|
user_id, organization_id, created_at, updated_at, roles
|
|
FROM
|
|
organization_members
|
|
WHERE
|
|
organization_id = $1
|
|
AND user_id = $2
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetOrganizationMemberByUserIDParams struct {
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetOrganizationMemberByUserID(ctx context.Context, arg GetOrganizationMemberByUserIDParams) (OrganizationMember, error) {
|
|
row := q.db.QueryRowContext(ctx, getOrganizationMemberByUserID, arg.OrganizationID, arg.UserID)
|
|
var i OrganizationMember
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
pq.Array(&i.Roles),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getOrganizationMembershipsByUserID = `-- name: GetOrganizationMembershipsByUserID :many
|
|
SELECT
|
|
user_id, organization_id, created_at, updated_at, roles
|
|
FROM
|
|
organization_members
|
|
WHERE
|
|
user_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetOrganizationMembershipsByUserID(ctx context.Context, userID uuid.UUID) ([]OrganizationMember, error) {
|
|
rows, err := q.db.QueryContext(ctx, getOrganizationMembershipsByUserID, userID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []OrganizationMember
|
|
for rows.Next() {
|
|
var i OrganizationMember
|
|
if err := rows.Scan(
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
pq.Array(&i.Roles),
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertOrganizationMember = `-- name: InsertOrganizationMember :one
|
|
INSERT INTO
|
|
organization_members (
|
|
organization_id,
|
|
user_id,
|
|
created_at,
|
|
updated_at,
|
|
roles
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5) RETURNING user_id, organization_id, created_at, updated_at, roles
|
|
`
|
|
|
|
type InsertOrganizationMemberParams struct {
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Roles []string `db:"roles" json:"roles"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertOrganizationMember(ctx context.Context, arg InsertOrganizationMemberParams) (OrganizationMember, error) {
|
|
row := q.db.QueryRowContext(ctx, insertOrganizationMember,
|
|
arg.OrganizationID,
|
|
arg.UserID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
pq.Array(arg.Roles),
|
|
)
|
|
var i OrganizationMember
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
pq.Array(&i.Roles),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateMemberRoles = `-- name: UpdateMemberRoles :one
|
|
UPDATE
|
|
organization_members
|
|
SET
|
|
-- Remove all duplicates from the roles.
|
|
roles = ARRAY(SELECT DISTINCT UNNEST($1 :: text[]))
|
|
WHERE
|
|
user_id = $2
|
|
AND organization_id = $3
|
|
RETURNING user_id, organization_id, created_at, updated_at, roles
|
|
`
|
|
|
|
type UpdateMemberRolesParams struct {
|
|
GrantedRoles []string `db:"granted_roles" json:"granted_roles"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
OrgID uuid.UUID `db:"org_id" json:"org_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateMemberRoles(ctx context.Context, arg UpdateMemberRolesParams) (OrganizationMember, error) {
|
|
row := q.db.QueryRowContext(ctx, updateMemberRoles, pq.Array(arg.GrantedRoles), arg.UserID, arg.OrgID)
|
|
var i OrganizationMember
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
pq.Array(&i.Roles),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getOrganizationByID = `-- name: GetOrganizationByID :one
|
|
SELECT
|
|
id, name, description, created_at, updated_at
|
|
FROM
|
|
organizations
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetOrganizationByID(ctx context.Context, id uuid.UUID) (Organization, error) {
|
|
row := q.db.QueryRowContext(ctx, getOrganizationByID, id)
|
|
var i Organization
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getOrganizationByName = `-- name: GetOrganizationByName :one
|
|
SELECT
|
|
id, name, description, created_at, updated_at
|
|
FROM
|
|
organizations
|
|
WHERE
|
|
LOWER("name") = LOWER($1)
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetOrganizationByName(ctx context.Context, name string) (Organization, error) {
|
|
row := q.db.QueryRowContext(ctx, getOrganizationByName, name)
|
|
var i Organization
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getOrganizations = `-- name: GetOrganizations :many
|
|
SELECT
|
|
id, name, description, created_at, updated_at
|
|
FROM
|
|
organizations
|
|
`
|
|
|
|
func (q *sqlQuerier) GetOrganizations(ctx context.Context) ([]Organization, error) {
|
|
rows, err := q.db.QueryContext(ctx, getOrganizations)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Organization
|
|
for rows.Next() {
|
|
var i Organization
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getOrganizationsByUserID = `-- name: GetOrganizationsByUserID :many
|
|
SELECT
|
|
id, name, description, created_at, updated_at
|
|
FROM
|
|
organizations
|
|
WHERE
|
|
id = (
|
|
SELECT
|
|
organization_id
|
|
FROM
|
|
organization_members
|
|
WHERE
|
|
user_id = $1
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]Organization, error) {
|
|
rows, err := q.db.QueryContext(ctx, getOrganizationsByUserID, userID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Organization
|
|
for rows.Next() {
|
|
var i Organization
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertOrganization = `-- name: InsertOrganization :one
|
|
INSERT INTO
|
|
organizations (id, "name", description, created_at, updated_at)
|
|
VALUES
|
|
($1, $2, $3, $4, $5) RETURNING id, name, description, created_at, updated_at
|
|
`
|
|
|
|
type InsertOrganizationParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Name string `db:"name" json:"name"`
|
|
Description string `db:"description" json:"description"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertOrganization(ctx context.Context, arg InsertOrganizationParams) (Organization, error) {
|
|
row := q.db.QueryRowContext(ctx, insertOrganization,
|
|
arg.ID,
|
|
arg.Name,
|
|
arg.Description,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
)
|
|
var i Organization
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getParameterSchemasByJobID = `-- name: GetParameterSchemasByJobID :many
|
|
SELECT
|
|
id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type, index
|
|
FROM
|
|
parameter_schemas
|
|
WHERE
|
|
job_id = $1
|
|
ORDER BY
|
|
index
|
|
`
|
|
|
|
func (q *sqlQuerier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]ParameterSchema, error) {
|
|
rows, err := q.db.QueryContext(ctx, getParameterSchemasByJobID, jobID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ParameterSchema
|
|
for rows.Next() {
|
|
var i ParameterSchema
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.DefaultSourceScheme,
|
|
&i.DefaultSourceValue,
|
|
&i.AllowOverrideSource,
|
|
&i.DefaultDestinationScheme,
|
|
&i.AllowOverrideDestination,
|
|
&i.DefaultRefresh,
|
|
&i.RedisplayValue,
|
|
&i.ValidationError,
|
|
&i.ValidationCondition,
|
|
&i.ValidationTypeSystem,
|
|
&i.ValidationValueType,
|
|
&i.Index,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getProvisionerDaemons = `-- name: GetProvisionerDaemons :many
|
|
SELECT
|
|
id, created_at, updated_at, name, provisioners, replica_id, tags
|
|
FROM
|
|
provisioner_daemons
|
|
`
|
|
|
|
func (q *sqlQuerier) GetProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, error) {
|
|
rows, err := q.db.QueryContext(ctx, getProvisionerDaemons)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ProvisionerDaemon
|
|
for rows.Next() {
|
|
var i ProvisionerDaemon
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
pq.Array(&i.Provisioners),
|
|
&i.ReplicaID,
|
|
&i.Tags,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertProvisionerDaemon = `-- name: InsertProvisionerDaemon :one
|
|
INSERT INTO
|
|
provisioner_daemons (
|
|
id,
|
|
created_at,
|
|
"name",
|
|
provisioners,
|
|
tags
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5) RETURNING id, created_at, updated_at, name, provisioners, replica_id, tags
|
|
`
|
|
|
|
type InsertProvisionerDaemonParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
Name string `db:"name" json:"name"`
|
|
Provisioners []ProvisionerType `db:"provisioners" json:"provisioners"`
|
|
Tags dbtype.StringMap `db:"tags" json:"tags"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertProvisionerDaemon(ctx context.Context, arg InsertProvisionerDaemonParams) (ProvisionerDaemon, error) {
|
|
row := q.db.QueryRowContext(ctx, insertProvisionerDaemon,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.Name,
|
|
pq.Array(arg.Provisioners),
|
|
arg.Tags,
|
|
)
|
|
var i ProvisionerDaemon
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
pq.Array(&i.Provisioners),
|
|
&i.ReplicaID,
|
|
&i.Tags,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getProvisionerLogsAfterID = `-- name: GetProvisionerLogsAfterID :many
|
|
SELECT
|
|
job_id, created_at, source, level, stage, output, id
|
|
FROM
|
|
provisioner_job_logs
|
|
WHERE
|
|
job_id = $1
|
|
AND (
|
|
id > $2
|
|
) ORDER BY id ASC
|
|
`
|
|
|
|
type GetProvisionerLogsAfterIDParams struct {
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
CreatedAfter int64 `db:"created_after" json:"created_after"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetProvisionerLogsAfterID(ctx context.Context, arg GetProvisionerLogsAfterIDParams) ([]ProvisionerJobLog, error) {
|
|
rows, err := q.db.QueryContext(ctx, getProvisionerLogsAfterID, arg.JobID, arg.CreatedAfter)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ProvisionerJobLog
|
|
for rows.Next() {
|
|
var i ProvisionerJobLog
|
|
if err := rows.Scan(
|
|
&i.JobID,
|
|
&i.CreatedAt,
|
|
&i.Source,
|
|
&i.Level,
|
|
&i.Stage,
|
|
&i.Output,
|
|
&i.ID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertProvisionerJobLogs = `-- name: InsertProvisionerJobLogs :many
|
|
INSERT INTO
|
|
provisioner_job_logs
|
|
SELECT
|
|
$1 :: uuid AS job_id,
|
|
unnest($2 :: timestamptz [ ]) AS created_at,
|
|
unnest($3 :: log_source [ ]) AS source,
|
|
unnest($4 :: log_level [ ]) AS LEVEL,
|
|
unnest($5 :: VARCHAR(128) [ ]) AS stage,
|
|
unnest($6 :: VARCHAR(1024) [ ]) AS output RETURNING job_id, created_at, source, level, stage, output, id
|
|
`
|
|
|
|
type InsertProvisionerJobLogsParams struct {
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
CreatedAt []time.Time `db:"created_at" json:"created_at"`
|
|
Source []LogSource `db:"source" json:"source"`
|
|
Level []LogLevel `db:"level" json:"level"`
|
|
Stage []string `db:"stage" json:"stage"`
|
|
Output []string `db:"output" json:"output"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertProvisionerJobLogs(ctx context.Context, arg InsertProvisionerJobLogsParams) ([]ProvisionerJobLog, error) {
|
|
rows, err := q.db.QueryContext(ctx, insertProvisionerJobLogs,
|
|
arg.JobID,
|
|
pq.Array(arg.CreatedAt),
|
|
pq.Array(arg.Source),
|
|
pq.Array(arg.Level),
|
|
pq.Array(arg.Stage),
|
|
pq.Array(arg.Output),
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ProvisionerJobLog
|
|
for rows.Next() {
|
|
var i ProvisionerJobLog
|
|
if err := rows.Scan(
|
|
&i.JobID,
|
|
&i.CreatedAt,
|
|
&i.Source,
|
|
&i.Level,
|
|
&i.Stage,
|
|
&i.Output,
|
|
&i.ID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const acquireProvisionerJob = `-- name: AcquireProvisionerJob :one
|
|
UPDATE
|
|
provisioner_jobs
|
|
SET
|
|
started_at = $1,
|
|
updated_at = $1,
|
|
worker_id = $2
|
|
WHERE
|
|
id = (
|
|
SELECT
|
|
id
|
|
FROM
|
|
provisioner_jobs AS nested
|
|
WHERE
|
|
nested.started_at IS NULL
|
|
-- Ensure the caller has the correct provisioner.
|
|
AND nested.provisioner = ANY($3 :: provisioner_type [ ])
|
|
-- Ensure the caller satisfies all job tags.
|
|
AND nested.tags <@ $4 :: jsonb
|
|
ORDER BY
|
|
nested.created_at
|
|
FOR UPDATE
|
|
SKIP LOCKED
|
|
LIMIT
|
|
1
|
|
) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata
|
|
`
|
|
|
|
type AcquireProvisionerJobParams struct {
|
|
StartedAt sql.NullTime `db:"started_at" json:"started_at"`
|
|
WorkerID uuid.NullUUID `db:"worker_id" json:"worker_id"`
|
|
Types []ProvisionerType `db:"types" json:"types"`
|
|
Tags json.RawMessage `db:"tags" json:"tags"`
|
|
}
|
|
|
|
// Acquires the lock for a single job that isn't started, completed,
|
|
// canceled, and that matches an array of provisioner types.
|
|
//
|
|
// SKIP LOCKED is used to jump over locked rows. This prevents
|
|
// multiple provisioners from acquiring the same jobs. See:
|
|
// https://www.postgresql.org/docs/9.5/sql-select.html#SQL-FOR-UPDATE-SHARE
|
|
func (q *sqlQuerier) AcquireProvisionerJob(ctx context.Context, arg AcquireProvisionerJobParams) (ProvisionerJob, error) {
|
|
row := q.db.QueryRowContext(ctx, acquireProvisionerJob,
|
|
arg.StartedAt,
|
|
arg.WorkerID,
|
|
pq.Array(arg.Types),
|
|
arg.Tags,
|
|
)
|
|
var i ProvisionerJob
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.StartedAt,
|
|
&i.CanceledAt,
|
|
&i.CompletedAt,
|
|
&i.Error,
|
|
&i.OrganizationID,
|
|
&i.InitiatorID,
|
|
&i.Provisioner,
|
|
&i.StorageMethod,
|
|
&i.Type,
|
|
&i.Input,
|
|
&i.WorkerID,
|
|
&i.FileID,
|
|
&i.Tags,
|
|
&i.ErrorCode,
|
|
&i.TraceMetadata,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one
|
|
SELECT
|
|
id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata
|
|
FROM
|
|
provisioner_jobs
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) {
|
|
row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id)
|
|
var i ProvisionerJob
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.StartedAt,
|
|
&i.CanceledAt,
|
|
&i.CompletedAt,
|
|
&i.Error,
|
|
&i.OrganizationID,
|
|
&i.InitiatorID,
|
|
&i.Provisioner,
|
|
&i.StorageMethod,
|
|
&i.Type,
|
|
&i.Input,
|
|
&i.WorkerID,
|
|
&i.FileID,
|
|
&i.Tags,
|
|
&i.ErrorCode,
|
|
&i.TraceMetadata,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getProvisionerJobsByIDs = `-- name: GetProvisionerJobsByIDs :many
|
|
SELECT
|
|
id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata
|
|
FROM
|
|
provisioner_jobs
|
|
WHERE
|
|
id = ANY($1 :: uuid [ ])
|
|
`
|
|
|
|
func (q *sqlQuerier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]ProvisionerJob, error) {
|
|
rows, err := q.db.QueryContext(ctx, getProvisionerJobsByIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ProvisionerJob
|
|
for rows.Next() {
|
|
var i ProvisionerJob
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.StartedAt,
|
|
&i.CanceledAt,
|
|
&i.CompletedAt,
|
|
&i.Error,
|
|
&i.OrganizationID,
|
|
&i.InitiatorID,
|
|
&i.Provisioner,
|
|
&i.StorageMethod,
|
|
&i.Type,
|
|
&i.Input,
|
|
&i.WorkerID,
|
|
&i.FileID,
|
|
&i.Tags,
|
|
&i.ErrorCode,
|
|
&i.TraceMetadata,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getProvisionerJobsCreatedAfter = `-- name: GetProvisionerJobsCreatedAfter :many
|
|
SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata FROM provisioner_jobs WHERE created_at > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]ProvisionerJob, error) {
|
|
rows, err := q.db.QueryContext(ctx, getProvisionerJobsCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []ProvisionerJob
|
|
for rows.Next() {
|
|
var i ProvisionerJob
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.StartedAt,
|
|
&i.CanceledAt,
|
|
&i.CompletedAt,
|
|
&i.Error,
|
|
&i.OrganizationID,
|
|
&i.InitiatorID,
|
|
&i.Provisioner,
|
|
&i.StorageMethod,
|
|
&i.Type,
|
|
&i.Input,
|
|
&i.WorkerID,
|
|
&i.FileID,
|
|
&i.Tags,
|
|
&i.ErrorCode,
|
|
&i.TraceMetadata,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertProvisionerJob = `-- name: InsertProvisionerJob :one
|
|
INSERT INTO
|
|
provisioner_jobs (
|
|
id,
|
|
created_at,
|
|
updated_at,
|
|
organization_id,
|
|
initiator_id,
|
|
provisioner,
|
|
storage_method,
|
|
file_id,
|
|
"type",
|
|
"input",
|
|
tags,
|
|
trace_metadata
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata
|
|
`
|
|
|
|
type InsertProvisionerJobParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"`
|
|
Provisioner ProvisionerType `db:"provisioner" json:"provisioner"`
|
|
StorageMethod ProvisionerStorageMethod `db:"storage_method" json:"storage_method"`
|
|
FileID uuid.UUID `db:"file_id" json:"file_id"`
|
|
Type ProvisionerJobType `db:"type" json:"type"`
|
|
Input json.RawMessage `db:"input" json:"input"`
|
|
Tags dbtype.StringMap `db:"tags" json:"tags"`
|
|
TraceMetadata pqtype.NullRawMessage `db:"trace_metadata" json:"trace_metadata"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) {
|
|
row := q.db.QueryRowContext(ctx, insertProvisionerJob,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.OrganizationID,
|
|
arg.InitiatorID,
|
|
arg.Provisioner,
|
|
arg.StorageMethod,
|
|
arg.FileID,
|
|
arg.Type,
|
|
arg.Input,
|
|
arg.Tags,
|
|
arg.TraceMetadata,
|
|
)
|
|
var i ProvisionerJob
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.StartedAt,
|
|
&i.CanceledAt,
|
|
&i.CompletedAt,
|
|
&i.Error,
|
|
&i.OrganizationID,
|
|
&i.InitiatorID,
|
|
&i.Provisioner,
|
|
&i.StorageMethod,
|
|
&i.Type,
|
|
&i.Input,
|
|
&i.WorkerID,
|
|
&i.FileID,
|
|
&i.Tags,
|
|
&i.ErrorCode,
|
|
&i.TraceMetadata,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateProvisionerJobByID = `-- name: UpdateProvisionerJobByID :exec
|
|
UPDATE
|
|
provisioner_jobs
|
|
SET
|
|
updated_at = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateProvisionerJobByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateProvisionerJobByID, arg.ID, arg.UpdatedAt)
|
|
return err
|
|
}
|
|
|
|
const updateProvisionerJobWithCancelByID = `-- name: UpdateProvisionerJobWithCancelByID :exec
|
|
UPDATE
|
|
provisioner_jobs
|
|
SET
|
|
canceled_at = $2,
|
|
completed_at = $3
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateProvisionerJobWithCancelByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CanceledAt sql.NullTime `db:"canceled_at" json:"canceled_at"`
|
|
CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateProvisionerJobWithCancelByID, arg.ID, arg.CanceledAt, arg.CompletedAt)
|
|
return err
|
|
}
|
|
|
|
const updateProvisionerJobWithCompleteByID = `-- name: UpdateProvisionerJobWithCompleteByID :exec
|
|
UPDATE
|
|
provisioner_jobs
|
|
SET
|
|
updated_at = $2,
|
|
completed_at = $3,
|
|
error = $4,
|
|
error_code = $5
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateProvisionerJobWithCompleteByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"`
|
|
Error sql.NullString `db:"error" json:"error"`
|
|
ErrorCode sql.NullString `db:"error_code" json:"error_code"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateProvisionerJobWithCompleteByID,
|
|
arg.ID,
|
|
arg.UpdatedAt,
|
|
arg.CompletedAt,
|
|
arg.Error,
|
|
arg.ErrorCode,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const getWorkspaceProxies = `-- name: GetWorkspaceProxies :many
|
|
SELECT
|
|
id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
FROM
|
|
workspace_proxies
|
|
WHERE
|
|
deleted = false
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceProxies(ctx context.Context) ([]WorkspaceProxy, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceProxies)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceProxy
|
|
for rows.Next() {
|
|
var i WorkspaceProxy
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceProxyByHostname = `-- name: GetWorkspaceProxyByHostname :one
|
|
SELECT
|
|
id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
FROM
|
|
workspace_proxies
|
|
WHERE
|
|
-- Validate that the @hostname has been sanitized and is not empty. This
|
|
-- doesn't prevent SQL injection (already prevented by using prepared
|
|
-- queries), but it does prevent carefully crafted hostnames from matching
|
|
-- when they shouldn't.
|
|
--
|
|
-- Periods don't need to be escaped because they're not special characters
|
|
-- in SQL matches unlike regular expressions.
|
|
$1 :: text SIMILAR TO '[a-zA-Z0-9._-]+' AND
|
|
deleted = false AND
|
|
|
|
-- Validate that the hostname matches either the wildcard hostname or the
|
|
-- access URL (ignoring scheme, port and path).
|
|
(
|
|
(
|
|
$2 :: bool = true AND
|
|
url SIMILAR TO '[^:]*://' || $1 :: text || '([:/]?%)*'
|
|
) OR
|
|
(
|
|
$3 :: bool = true AND
|
|
$1 :: text LIKE replace(wildcard_hostname, '*', '%')
|
|
)
|
|
)
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetWorkspaceProxyByHostnameParams struct {
|
|
Hostname string `db:"hostname" json:"hostname"`
|
|
AllowAccessUrl bool `db:"allow_access_url" json:"allow_access_url"`
|
|
AllowWildcardHostname bool `db:"allow_wildcard_hostname" json:"allow_wildcard_hostname"`
|
|
}
|
|
|
|
// Finds a workspace proxy that has an access URL or app hostname that matches
|
|
// the provided hostname. This is to check if a hostname matches any workspace
|
|
// proxy.
|
|
//
|
|
// The hostname must be sanitized to only contain [a-zA-Z0-9.-] before calling
|
|
// this query. The scheme, port and path should be stripped.
|
|
func (q *sqlQuerier) GetWorkspaceProxyByHostname(ctx context.Context, arg GetWorkspaceProxyByHostnameParams) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceProxyByHostname, arg.Hostname, arg.AllowAccessUrl, arg.AllowWildcardHostname)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceProxyByID = `-- name: GetWorkspaceProxyByID :one
|
|
SELECT
|
|
id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
FROM
|
|
workspace_proxies
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceProxyByID(ctx context.Context, id uuid.UUID) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceProxyByID, id)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceProxyByName = `-- name: GetWorkspaceProxyByName :one
|
|
SELECT
|
|
id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
FROM
|
|
workspace_proxies
|
|
WHERE
|
|
name = $1
|
|
AND deleted = false
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceProxyByName(ctx context.Context, name string) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceProxyByName, name)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertWorkspaceProxy = `-- name: InsertWorkspaceProxy :one
|
|
INSERT INTO
|
|
workspace_proxies (
|
|
id,
|
|
url,
|
|
wildcard_hostname,
|
|
name,
|
|
display_name,
|
|
icon,
|
|
token_hashed_secret,
|
|
created_at,
|
|
updated_at,
|
|
deleted
|
|
)
|
|
VALUES
|
|
($1, '', '', $2, $3, $4, $5, $6, $7, false) RETURNING id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
`
|
|
|
|
type InsertWorkspaceProxyParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Name string `db:"name" json:"name"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
TokenHashedSecret []byte `db:"token_hashed_secret" json:"token_hashed_secret"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceProxy(ctx context.Context, arg InsertWorkspaceProxyParams) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceProxy,
|
|
arg.ID,
|
|
arg.Name,
|
|
arg.DisplayName,
|
|
arg.Icon,
|
|
arg.TokenHashedSecret,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const registerWorkspaceProxy = `-- name: RegisterWorkspaceProxy :one
|
|
UPDATE
|
|
workspace_proxies
|
|
SET
|
|
url = $1,
|
|
wildcard_hostname = $2,
|
|
updated_at = Now()
|
|
WHERE
|
|
id = $3
|
|
RETURNING id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
`
|
|
|
|
type RegisterWorkspaceProxyParams struct {
|
|
Url string `db:"url" json:"url"`
|
|
WildcardHostname string `db:"wildcard_hostname" json:"wildcard_hostname"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) RegisterWorkspaceProxy(ctx context.Context, arg RegisterWorkspaceProxyParams) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, registerWorkspaceProxy, arg.Url, arg.WildcardHostname, arg.ID)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceProxy = `-- name: UpdateWorkspaceProxy :one
|
|
UPDATE
|
|
workspace_proxies
|
|
SET
|
|
-- These values should always be provided.
|
|
name = $1,
|
|
display_name = $2,
|
|
icon = $3,
|
|
-- Only update the token if a new one is provided.
|
|
-- So this is an optional field.
|
|
token_hashed_secret = CASE
|
|
WHEN length($4 :: bytea) > 0 THEN $4 :: bytea
|
|
ELSE workspace_proxies.token_hashed_secret
|
|
END,
|
|
-- Always update this timestamp.
|
|
updated_at = Now()
|
|
WHERE
|
|
id = $5
|
|
RETURNING id, name, display_name, icon, url, wildcard_hostname, created_at, updated_at, deleted, token_hashed_secret
|
|
`
|
|
|
|
type UpdateWorkspaceProxyParams struct {
|
|
Name string `db:"name" json:"name"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
TokenHashedSecret []byte `db:"token_hashed_secret" json:"token_hashed_secret"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
// This allows editing the properties of a workspace proxy.
|
|
func (q *sqlQuerier) UpdateWorkspaceProxy(ctx context.Context, arg UpdateWorkspaceProxyParams) (WorkspaceProxy, error) {
|
|
row := q.db.QueryRowContext(ctx, updateWorkspaceProxy,
|
|
arg.Name,
|
|
arg.DisplayName,
|
|
arg.Icon,
|
|
arg.TokenHashedSecret,
|
|
arg.ID,
|
|
)
|
|
var i WorkspaceProxy
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Name,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Url,
|
|
&i.WildcardHostname,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Deleted,
|
|
&i.TokenHashedSecret,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceProxyDeleted = `-- name: UpdateWorkspaceProxyDeleted :exec
|
|
UPDATE
|
|
workspace_proxies
|
|
SET
|
|
updated_at = Now(),
|
|
deleted = $1
|
|
WHERE
|
|
id = $2
|
|
`
|
|
|
|
type UpdateWorkspaceProxyDeletedParams struct {
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceProxyDeleted(ctx context.Context, arg UpdateWorkspaceProxyDeletedParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceProxyDeleted, arg.Deleted, arg.ID)
|
|
return err
|
|
}
|
|
|
|
const getQuotaAllowanceForUser = `-- name: GetQuotaAllowanceForUser :one
|
|
SELECT
|
|
coalesce(SUM(quota_allowance), 0)::BIGINT
|
|
FROM
|
|
group_members gm
|
|
JOIN groups g ON
|
|
g.id = gm.group_id
|
|
WHERE
|
|
user_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetQuotaAllowanceForUser(ctx context.Context, userID uuid.UUID) (int64, error) {
|
|
row := q.db.QueryRowContext(ctx, getQuotaAllowanceForUser, userID)
|
|
var column_1 int64
|
|
err := row.Scan(&column_1)
|
|
return column_1, err
|
|
}
|
|
|
|
const getQuotaConsumedForUser = `-- name: GetQuotaConsumedForUser :one
|
|
WITH latest_builds AS (
|
|
SELECT
|
|
DISTINCT ON
|
|
(workspace_id) id,
|
|
workspace_id,
|
|
daily_cost
|
|
FROM
|
|
workspace_builds wb
|
|
ORDER BY
|
|
workspace_id,
|
|
created_at DESC
|
|
)
|
|
SELECT
|
|
coalesce(SUM(daily_cost), 0)::BIGINT
|
|
FROM
|
|
workspaces
|
|
JOIN latest_builds ON
|
|
latest_builds.workspace_id = workspaces.id
|
|
WHERE NOT deleted AND workspaces.owner_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetQuotaConsumedForUser(ctx context.Context, ownerID uuid.UUID) (int64, error) {
|
|
row := q.db.QueryRowContext(ctx, getQuotaConsumedForUser, ownerID)
|
|
var column_1 int64
|
|
err := row.Scan(&column_1)
|
|
return column_1, err
|
|
}
|
|
|
|
const deleteReplicasUpdatedBefore = `-- name: DeleteReplicasUpdatedBefore :exec
|
|
DELETE FROM replicas WHERE updated_at < $1
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error {
|
|
_, err := q.db.ExecContext(ctx, deleteReplicasUpdatedBefore, updatedAt)
|
|
return err
|
|
}
|
|
|
|
const getReplicasUpdatedAfter = `-- name: GetReplicasUpdatedAfter :many
|
|
SELECT id, created_at, started_at, stopped_at, updated_at, hostname, region_id, relay_address, database_latency, version, error FROM replicas WHERE updated_at > $1 AND stopped_at IS NULL
|
|
`
|
|
|
|
func (q *sqlQuerier) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]Replica, error) {
|
|
rows, err := q.db.QueryContext(ctx, getReplicasUpdatedAfter, updatedAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Replica
|
|
for rows.Next() {
|
|
var i Replica
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.StartedAt,
|
|
&i.StoppedAt,
|
|
&i.UpdatedAt,
|
|
&i.Hostname,
|
|
&i.RegionID,
|
|
&i.RelayAddress,
|
|
&i.DatabaseLatency,
|
|
&i.Version,
|
|
&i.Error,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertReplica = `-- name: InsertReplica :one
|
|
INSERT INTO replicas (
|
|
id,
|
|
created_at,
|
|
started_at,
|
|
updated_at,
|
|
hostname,
|
|
region_id,
|
|
relay_address,
|
|
version,
|
|
database_latency
|
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, created_at, started_at, stopped_at, updated_at, hostname, region_id, relay_address, database_latency, version, error
|
|
`
|
|
|
|
type InsertReplicaParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
StartedAt time.Time `db:"started_at" json:"started_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Hostname string `db:"hostname" json:"hostname"`
|
|
RegionID int32 `db:"region_id" json:"region_id"`
|
|
RelayAddress string `db:"relay_address" json:"relay_address"`
|
|
Version string `db:"version" json:"version"`
|
|
DatabaseLatency int32 `db:"database_latency" json:"database_latency"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertReplica(ctx context.Context, arg InsertReplicaParams) (Replica, error) {
|
|
row := q.db.QueryRowContext(ctx, insertReplica,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.StartedAt,
|
|
arg.UpdatedAt,
|
|
arg.Hostname,
|
|
arg.RegionID,
|
|
arg.RelayAddress,
|
|
arg.Version,
|
|
arg.DatabaseLatency,
|
|
)
|
|
var i Replica
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.StartedAt,
|
|
&i.StoppedAt,
|
|
&i.UpdatedAt,
|
|
&i.Hostname,
|
|
&i.RegionID,
|
|
&i.RelayAddress,
|
|
&i.DatabaseLatency,
|
|
&i.Version,
|
|
&i.Error,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateReplica = `-- name: UpdateReplica :one
|
|
UPDATE replicas SET
|
|
updated_at = $2,
|
|
started_at = $3,
|
|
stopped_at = $4,
|
|
relay_address = $5,
|
|
region_id = $6,
|
|
hostname = $7,
|
|
version = $8,
|
|
error = $9,
|
|
database_latency = $10
|
|
WHERE id = $1 RETURNING id, created_at, started_at, stopped_at, updated_at, hostname, region_id, relay_address, database_latency, version, error
|
|
`
|
|
|
|
type UpdateReplicaParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
StartedAt time.Time `db:"started_at" json:"started_at"`
|
|
StoppedAt sql.NullTime `db:"stopped_at" json:"stopped_at"`
|
|
RelayAddress string `db:"relay_address" json:"relay_address"`
|
|
RegionID int32 `db:"region_id" json:"region_id"`
|
|
Hostname string `db:"hostname" json:"hostname"`
|
|
Version string `db:"version" json:"version"`
|
|
Error string `db:"error" json:"error"`
|
|
DatabaseLatency int32 `db:"database_latency" json:"database_latency"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateReplica(ctx context.Context, arg UpdateReplicaParams) (Replica, error) {
|
|
row := q.db.QueryRowContext(ctx, updateReplica,
|
|
arg.ID,
|
|
arg.UpdatedAt,
|
|
arg.StartedAt,
|
|
arg.StoppedAt,
|
|
arg.RelayAddress,
|
|
arg.RegionID,
|
|
arg.Hostname,
|
|
arg.Version,
|
|
arg.Error,
|
|
arg.DatabaseLatency,
|
|
)
|
|
var i Replica
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.StartedAt,
|
|
&i.StoppedAt,
|
|
&i.UpdatedAt,
|
|
&i.Hostname,
|
|
&i.RegionID,
|
|
&i.RelayAddress,
|
|
&i.DatabaseLatency,
|
|
&i.Version,
|
|
&i.Error,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getAppSecurityKey = `-- name: GetAppSecurityKey :one
|
|
SELECT value FROM site_configs WHERE key = 'app_signing_key'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetAppSecurityKey(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getAppSecurityKey)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const getDERPMeshKey = `-- name: GetDERPMeshKey :one
|
|
SELECT value FROM site_configs WHERE key = 'derp_mesh_key'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetDERPMeshKey(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getDERPMeshKey)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const getDeploymentID = `-- name: GetDeploymentID :one
|
|
SELECT value FROM site_configs WHERE key = 'deployment_id'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetDeploymentID(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getDeploymentID)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const getLastUpdateCheck = `-- name: GetLastUpdateCheck :one
|
|
SELECT value FROM site_configs WHERE key = 'last_update_check'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLastUpdateCheck(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getLastUpdateCheck)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const getLogoURL = `-- name: GetLogoURL :one
|
|
SELECT value FROM site_configs WHERE key = 'logo_url'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLogoURL(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getLogoURL)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const getServiceBanner = `-- name: GetServiceBanner :one
|
|
SELECT value FROM site_configs WHERE key = 'service_banner'
|
|
`
|
|
|
|
func (q *sqlQuerier) GetServiceBanner(ctx context.Context) (string, error) {
|
|
row := q.db.QueryRowContext(ctx, getServiceBanner)
|
|
var value string
|
|
err := row.Scan(&value)
|
|
return value, err
|
|
}
|
|
|
|
const insertDERPMeshKey = `-- name: InsertDERPMeshKey :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('derp_mesh_key', $1)
|
|
`
|
|
|
|
func (q *sqlQuerier) InsertDERPMeshKey(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, insertDERPMeshKey, value)
|
|
return err
|
|
}
|
|
|
|
const insertDeploymentID = `-- name: InsertDeploymentID :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('deployment_id', $1)
|
|
`
|
|
|
|
func (q *sqlQuerier) InsertDeploymentID(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, insertDeploymentID, value)
|
|
return err
|
|
}
|
|
|
|
const upsertAppSecurityKey = `-- name: UpsertAppSecurityKey :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('app_signing_key', $1)
|
|
ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'app_signing_key'
|
|
`
|
|
|
|
func (q *sqlQuerier) UpsertAppSecurityKey(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, upsertAppSecurityKey, value)
|
|
return err
|
|
}
|
|
|
|
const upsertLastUpdateCheck = `-- name: UpsertLastUpdateCheck :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('last_update_check', $1)
|
|
ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'last_update_check'
|
|
`
|
|
|
|
func (q *sqlQuerier) UpsertLastUpdateCheck(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, upsertLastUpdateCheck, value)
|
|
return err
|
|
}
|
|
|
|
const upsertLogoURL = `-- name: UpsertLogoURL :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('logo_url', $1)
|
|
ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'logo_url'
|
|
`
|
|
|
|
func (q *sqlQuerier) UpsertLogoURL(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, upsertLogoURL, value)
|
|
return err
|
|
}
|
|
|
|
const upsertServiceBanner = `-- name: UpsertServiceBanner :exec
|
|
INSERT INTO site_configs (key, value) VALUES ('service_banner', $1)
|
|
ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'service_banner'
|
|
`
|
|
|
|
func (q *sqlQuerier) UpsertServiceBanner(ctx context.Context, value string) error {
|
|
_, err := q.db.ExecContext(ctx, upsertServiceBanner, value)
|
|
return err
|
|
}
|
|
|
|
const getTemplateAverageBuildTime = `-- name: GetTemplateAverageBuildTime :one
|
|
WITH build_times AS (
|
|
SELECT
|
|
EXTRACT(EPOCH FROM (pj.completed_at - pj.started_at))::FLOAT AS exec_time_sec,
|
|
workspace_builds.transition
|
|
FROM
|
|
workspace_builds
|
|
JOIN template_versions ON
|
|
workspace_builds.template_version_id = template_versions.id
|
|
JOIN provisioner_jobs pj ON
|
|
workspace_builds.job_id = pj.id
|
|
WHERE
|
|
template_versions.template_id = $1 AND
|
|
(pj.completed_at IS NOT NULL) AND (pj.started_at IS NOT NULL) AND
|
|
(pj.started_at > $2) AND
|
|
(pj.canceled_at IS NULL) AND
|
|
((pj.error IS NULL) OR (pj.error = ''))
|
|
ORDER BY
|
|
workspace_builds.created_at DESC
|
|
)
|
|
SELECT
|
|
-- Postgres offers no clear way to DRY this short of a function or other
|
|
-- complexities.
|
|
coalesce((PERCENTILE_DISC(0.5) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'start')), -1)::FLOAT AS start_50,
|
|
coalesce((PERCENTILE_DISC(0.5) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'stop')), -1)::FLOAT AS stop_50,
|
|
coalesce((PERCENTILE_DISC(0.5) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'delete')), -1)::FLOAT AS delete_50,
|
|
coalesce((PERCENTILE_DISC(0.95) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'start')), -1)::FLOAT AS start_95,
|
|
coalesce((PERCENTILE_DISC(0.95) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'stop')), -1)::FLOAT AS stop_95,
|
|
coalesce((PERCENTILE_DISC(0.95) WITHIN GROUP(ORDER BY exec_time_sec) FILTER (WHERE transition = 'delete')), -1)::FLOAT AS delete_95
|
|
FROM build_times
|
|
`
|
|
|
|
type GetTemplateAverageBuildTimeParams struct {
|
|
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
|
StartTime sql.NullTime `db:"start_time" json:"start_time"`
|
|
}
|
|
|
|
type GetTemplateAverageBuildTimeRow struct {
|
|
Start50 float64 `db:"start_50" json:"start_50"`
|
|
Stop50 float64 `db:"stop_50" json:"stop_50"`
|
|
Delete50 float64 `db:"delete_50" json:"delete_50"`
|
|
Start95 float64 `db:"start_95" json:"start_95"`
|
|
Stop95 float64 `db:"stop_95" json:"stop_95"`
|
|
Delete95 float64 `db:"delete_95" json:"delete_95"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplateAverageBuildTime(ctx context.Context, arg GetTemplateAverageBuildTimeParams) (GetTemplateAverageBuildTimeRow, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateAverageBuildTime, arg.TemplateID, arg.StartTime)
|
|
var i GetTemplateAverageBuildTimeRow
|
|
err := row.Scan(
|
|
&i.Start50,
|
|
&i.Stop50,
|
|
&i.Delete50,
|
|
&i.Start95,
|
|
&i.Stop95,
|
|
&i.Delete95,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateByID = `-- name: GetTemplateByID :one
|
|
SELECT
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
FROM
|
|
templates
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateByID, id)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateByOrganizationAndName = `-- name: GetTemplateByOrganizationAndName :one
|
|
SELECT
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
FROM
|
|
templates
|
|
WHERE
|
|
organization_id = $1
|
|
AND deleted = $2
|
|
AND LOWER("name") = LOWER($3)
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetTemplateByOrganizationAndNameParams struct {
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg GetTemplateByOrganizationAndNameParams) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateByOrganizationAndName, arg.OrganizationID, arg.Deleted, arg.Name)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplates = `-- name: GetTemplates :many
|
|
SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl FROM templates
|
|
ORDER BY (name, id) ASC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplates)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Template
|
|
for rows.Next() {
|
|
var i Template
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getTemplatesWithFilter = `-- name: GetTemplatesWithFilter :many
|
|
SELECT
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
FROM
|
|
templates
|
|
WHERE
|
|
-- Optionally include deleted templates
|
|
templates.deleted = $1
|
|
-- Filter by organization_id
|
|
AND CASE
|
|
WHEN $2 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
|
|
organization_id = $2
|
|
ELSE true
|
|
END
|
|
-- Filter by exact name
|
|
AND CASE
|
|
WHEN $3 :: text != '' THEN
|
|
LOWER("name") = LOWER($3)
|
|
ELSE true
|
|
END
|
|
-- Filter by ids
|
|
AND CASE
|
|
WHEN array_length($4 :: uuid[], 1) > 0 THEN
|
|
id = ANY($4)
|
|
ELSE true
|
|
END
|
|
-- Authorize Filter clause will be injected below in GetAuthorizedTemplates
|
|
-- @authorize_filter
|
|
ORDER BY (name, id) ASC
|
|
`
|
|
|
|
type GetTemplatesWithFilterParams struct {
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
ExactName string `db:"exact_name" json:"exact_name"`
|
|
IDs []uuid.UUID `db:"ids" json:"ids"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplatesWithFilterParams) ([]Template, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplatesWithFilter,
|
|
arg.Deleted,
|
|
arg.OrganizationID,
|
|
arg.ExactName,
|
|
pq.Array(arg.IDs),
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Template
|
|
for rows.Next() {
|
|
var i Template
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertTemplate = `-- name: InsertTemplate :one
|
|
INSERT INTO
|
|
templates (
|
|
id,
|
|
created_at,
|
|
updated_at,
|
|
organization_id,
|
|
"name",
|
|
provisioner,
|
|
active_version_id,
|
|
description,
|
|
created_by,
|
|
icon,
|
|
user_acl,
|
|
group_acl,
|
|
display_name,
|
|
allow_user_cancel_workspace_jobs
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) RETURNING id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
`
|
|
|
|
type InsertTemplateParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Name string `db:"name" json:"name"`
|
|
Provisioner ProvisionerType `db:"provisioner" json:"provisioner"`
|
|
ActiveVersionID uuid.UUID `db:"active_version_id" json:"active_version_id"`
|
|
Description string `db:"description" json:"description"`
|
|
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
UserACL TemplateACL `db:"user_acl" json:"user_acl"`
|
|
GroupACL TemplateACL `db:"group_acl" json:"group_acl"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
AllowUserCancelWorkspaceJobs bool `db:"allow_user_cancel_workspace_jobs" json:"allow_user_cancel_workspace_jobs"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertTemplate(ctx context.Context, arg InsertTemplateParams) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, insertTemplate,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.OrganizationID,
|
|
arg.Name,
|
|
arg.Provisioner,
|
|
arg.ActiveVersionID,
|
|
arg.Description,
|
|
arg.CreatedBy,
|
|
arg.Icon,
|
|
arg.UserACL,
|
|
arg.GroupACL,
|
|
arg.DisplayName,
|
|
arg.AllowUserCancelWorkspaceJobs,
|
|
)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateTemplateACLByID = `-- name: UpdateTemplateACLByID :one
|
|
UPDATE
|
|
templates
|
|
SET
|
|
group_acl = $1,
|
|
user_acl = $2
|
|
WHERE
|
|
id = $3
|
|
RETURNING
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
`
|
|
|
|
type UpdateTemplateACLByIDParams struct {
|
|
GroupACL TemplateACL `db:"group_acl" json:"group_acl"`
|
|
UserACL TemplateACL `db:"user_acl" json:"user_acl"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateACLByID(ctx context.Context, arg UpdateTemplateACLByIDParams) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, updateTemplateACLByID, arg.GroupACL, arg.UserACL, arg.ID)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateTemplateActiveVersionByID = `-- name: UpdateTemplateActiveVersionByID :exec
|
|
UPDATE
|
|
templates
|
|
SET
|
|
active_version_id = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateTemplateActiveVersionByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
ActiveVersionID uuid.UUID `db:"active_version_id" json:"active_version_id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateActiveVersionByID(ctx context.Context, arg UpdateTemplateActiveVersionByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateTemplateActiveVersionByID, arg.ID, arg.ActiveVersionID, arg.UpdatedAt)
|
|
return err
|
|
}
|
|
|
|
const updateTemplateDeletedByID = `-- name: UpdateTemplateDeletedByID :exec
|
|
UPDATE
|
|
templates
|
|
SET
|
|
deleted = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateTemplateDeletedByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateDeletedByID(ctx context.Context, arg UpdateTemplateDeletedByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateTemplateDeletedByID, arg.ID, arg.Deleted, arg.UpdatedAt)
|
|
return err
|
|
}
|
|
|
|
const updateTemplateMetaByID = `-- name: UpdateTemplateMetaByID :one
|
|
UPDATE
|
|
templates
|
|
SET
|
|
updated_at = $2,
|
|
description = $3,
|
|
name = $4,
|
|
icon = $5,
|
|
display_name = $6,
|
|
allow_user_cancel_workspace_jobs = $7
|
|
WHERE
|
|
id = $1
|
|
RETURNING
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
`
|
|
|
|
type UpdateTemplateMetaByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Description string `db:"description" json:"description"`
|
|
Name string `db:"name" json:"name"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
AllowUserCancelWorkspaceJobs bool `db:"allow_user_cancel_workspace_jobs" json:"allow_user_cancel_workspace_jobs"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTemplateMetaByIDParams) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, updateTemplateMetaByID,
|
|
arg.ID,
|
|
arg.UpdatedAt,
|
|
arg.Description,
|
|
arg.Name,
|
|
arg.Icon,
|
|
arg.DisplayName,
|
|
arg.AllowUserCancelWorkspaceJobs,
|
|
)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateTemplateScheduleByID = `-- name: UpdateTemplateScheduleByID :one
|
|
UPDATE
|
|
templates
|
|
SET
|
|
updated_at = $2,
|
|
allow_user_autostart = $3,
|
|
allow_user_autostop = $4,
|
|
default_ttl = $5,
|
|
max_ttl = $6,
|
|
failure_ttl = $7,
|
|
inactivity_ttl = $8
|
|
WHERE
|
|
id = $1
|
|
RETURNING
|
|
id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, max_ttl, allow_user_autostart, allow_user_autostop, failure_ttl, inactivity_ttl
|
|
`
|
|
|
|
type UpdateTemplateScheduleByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
AllowUserAutostart bool `db:"allow_user_autostart" json:"allow_user_autostart"`
|
|
AllowUserAutostop bool `db:"allow_user_autostop" json:"allow_user_autostop"`
|
|
DefaultTTL int64 `db:"default_ttl" json:"default_ttl"`
|
|
MaxTTL int64 `db:"max_ttl" json:"max_ttl"`
|
|
FailureTTL int64 `db:"failure_ttl" json:"failure_ttl"`
|
|
InactivityTTL int64 `db:"inactivity_ttl" json:"inactivity_ttl"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateScheduleByID(ctx context.Context, arg UpdateTemplateScheduleByIDParams) (Template, error) {
|
|
row := q.db.QueryRowContext(ctx, updateTemplateScheduleByID,
|
|
arg.ID,
|
|
arg.UpdatedAt,
|
|
arg.AllowUserAutostart,
|
|
arg.AllowUserAutostop,
|
|
arg.DefaultTTL,
|
|
arg.MaxTTL,
|
|
arg.FailureTTL,
|
|
arg.InactivityTTL,
|
|
)
|
|
var i Template
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OrganizationID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.Provisioner,
|
|
&i.ActiveVersionID,
|
|
&i.Description,
|
|
&i.DefaultTTL,
|
|
&i.CreatedBy,
|
|
&i.Icon,
|
|
&i.UserACL,
|
|
&i.GroupACL,
|
|
&i.DisplayName,
|
|
&i.AllowUserCancelWorkspaceJobs,
|
|
&i.MaxTTL,
|
|
&i.AllowUserAutostart,
|
|
&i.AllowUserAutostop,
|
|
&i.FailureTTL,
|
|
&i.InactivityTTL,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateVersionParameters = `-- name: GetTemplateVersionParameters :many
|
|
SELECT template_version_id, name, description, type, mutable, default_value, icon, options, validation_regex, validation_min, validation_max, validation_error, validation_monotonic, required, legacy_variable_name, display_name FROM template_version_parameters WHERE template_version_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionParameter, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateVersionParameters, templateVersionID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []TemplateVersionParameter
|
|
for rows.Next() {
|
|
var i TemplateVersionParameter
|
|
if err := rows.Scan(
|
|
&i.TemplateVersionID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.Type,
|
|
&i.Mutable,
|
|
&i.DefaultValue,
|
|
&i.Icon,
|
|
&i.Options,
|
|
&i.ValidationRegex,
|
|
&i.ValidationMin,
|
|
&i.ValidationMax,
|
|
&i.ValidationError,
|
|
&i.ValidationMonotonic,
|
|
&i.Required,
|
|
&i.LegacyVariableName,
|
|
&i.DisplayName,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertTemplateVersionParameter = `-- name: InsertTemplateVersionParameter :one
|
|
INSERT INTO
|
|
template_version_parameters (
|
|
template_version_id,
|
|
name,
|
|
description,
|
|
type,
|
|
mutable,
|
|
default_value,
|
|
icon,
|
|
options,
|
|
validation_regex,
|
|
validation_min,
|
|
validation_max,
|
|
validation_error,
|
|
validation_monotonic,
|
|
required,
|
|
legacy_variable_name,
|
|
display_name
|
|
)
|
|
VALUES
|
|
(
|
|
$1,
|
|
$2,
|
|
$3,
|
|
$4,
|
|
$5,
|
|
$6,
|
|
$7,
|
|
$8,
|
|
$9,
|
|
$10,
|
|
$11,
|
|
$12,
|
|
$13,
|
|
$14,
|
|
$15,
|
|
$16
|
|
) RETURNING template_version_id, name, description, type, mutable, default_value, icon, options, validation_regex, validation_min, validation_max, validation_error, validation_monotonic, required, legacy_variable_name, display_name
|
|
`
|
|
|
|
type InsertTemplateVersionParameterParams struct {
|
|
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
|
|
Name string `db:"name" json:"name"`
|
|
Description string `db:"description" json:"description"`
|
|
Type string `db:"type" json:"type"`
|
|
Mutable bool `db:"mutable" json:"mutable"`
|
|
DefaultValue string `db:"default_value" json:"default_value"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
Options json.RawMessage `db:"options" json:"options"`
|
|
ValidationRegex string `db:"validation_regex" json:"validation_regex"`
|
|
ValidationMin sql.NullInt32 `db:"validation_min" json:"validation_min"`
|
|
ValidationMax sql.NullInt32 `db:"validation_max" json:"validation_max"`
|
|
ValidationError string `db:"validation_error" json:"validation_error"`
|
|
ValidationMonotonic string `db:"validation_monotonic" json:"validation_monotonic"`
|
|
Required bool `db:"required" json:"required"`
|
|
LegacyVariableName string `db:"legacy_variable_name" json:"legacy_variable_name"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertTemplateVersionParameter(ctx context.Context, arg InsertTemplateVersionParameterParams) (TemplateVersionParameter, error) {
|
|
row := q.db.QueryRowContext(ctx, insertTemplateVersionParameter,
|
|
arg.TemplateVersionID,
|
|
arg.Name,
|
|
arg.Description,
|
|
arg.Type,
|
|
arg.Mutable,
|
|
arg.DefaultValue,
|
|
arg.Icon,
|
|
arg.Options,
|
|
arg.ValidationRegex,
|
|
arg.ValidationMin,
|
|
arg.ValidationMax,
|
|
arg.ValidationError,
|
|
arg.ValidationMonotonic,
|
|
arg.Required,
|
|
arg.LegacyVariableName,
|
|
arg.DisplayName,
|
|
)
|
|
var i TemplateVersionParameter
|
|
err := row.Scan(
|
|
&i.TemplateVersionID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.Type,
|
|
&i.Mutable,
|
|
&i.DefaultValue,
|
|
&i.Icon,
|
|
&i.Options,
|
|
&i.ValidationRegex,
|
|
&i.ValidationMin,
|
|
&i.ValidationMax,
|
|
&i.ValidationError,
|
|
&i.ValidationMonotonic,
|
|
&i.Required,
|
|
&i.LegacyVariableName,
|
|
&i.DisplayName,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getPreviousTemplateVersion = `-- name: GetPreviousTemplateVersion :one
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
created_at < (
|
|
SELECT created_at
|
|
FROM template_versions AS tv
|
|
WHERE tv.organization_id = $1 AND tv.name = $2 AND tv.template_id = $3
|
|
)
|
|
AND organization_id = $1
|
|
AND template_id = $3
|
|
ORDER BY created_at DESC
|
|
LIMIT 1
|
|
`
|
|
|
|
type GetPreviousTemplateVersionParams struct {
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
Name string `db:"name" json:"name"`
|
|
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetPreviousTemplateVersion(ctx context.Context, arg GetPreviousTemplateVersionParams) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, getPreviousTemplateVersion, arg.OrganizationID, arg.Name, arg.TemplateID)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateVersionByID = `-- name: GetTemplateVersionByID :one
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateVersionByID, id)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateVersionByJobID = `-- name: GetTemplateVersionByJobID :one
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
job_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateVersionByJobID, jobID)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateVersionByTemplateIDAndName = `-- name: GetTemplateVersionByTemplateIDAndName :one
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
template_id = $1
|
|
AND "name" = $2
|
|
`
|
|
|
|
type GetTemplateVersionByTemplateIDAndNameParams struct {
|
|
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg GetTemplateVersionByTemplateIDAndNameParams) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, getTemplateVersionByTemplateIDAndName, arg.TemplateID, arg.Name)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateVersionsByIDs = `-- name: GetTemplateVersionsByIDs :many
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
id = ANY($1 :: uuid [ ])
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]TemplateVersion, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateVersionsByIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []TemplateVersion
|
|
for rows.Next() {
|
|
var i TemplateVersion
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getTemplateVersionsByTemplateID = `-- name: GetTemplateVersionsByTemplateID :many
|
|
SELECT
|
|
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
template_id = $1 :: uuid
|
|
AND CASE
|
|
-- This allows using the last element on a page as effectively a cursor.
|
|
-- This is an important option for scripts that need to paginate without
|
|
-- duplicating or missing data.
|
|
WHEN $2 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN (
|
|
-- The pagination cursor is the last ID of the previous page.
|
|
-- The query is ordered by the created_at field, so select all
|
|
-- rows after the cursor.
|
|
(created_at, id) > (
|
|
SELECT
|
|
created_at, id
|
|
FROM
|
|
template_versions
|
|
WHERE
|
|
id = $2
|
|
)
|
|
)
|
|
ELSE true
|
|
END
|
|
ORDER BY
|
|
-- Deterministic and consistent ordering of all rows, even if they share
|
|
-- a timestamp. This is to ensure consistent pagination.
|
|
(created_at, id) ASC OFFSET $3
|
|
LIMIT
|
|
-- A null limit means "no limit", so 0 means return all
|
|
NULLIF($4 :: int, 0)
|
|
`
|
|
|
|
type GetTemplateVersionsByTemplateIDParams struct {
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
AfterID uuid.UUID `db:"after_id" json:"after_id"`
|
|
OffsetOpt int32 `db:"offset_opt" json:"offset_opt"`
|
|
LimitOpt int32 `db:"limit_opt" json:"limit_opt"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionsByTemplateID(ctx context.Context, arg GetTemplateVersionsByTemplateIDParams) ([]TemplateVersion, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateVersionsByTemplateID,
|
|
arg.TemplateID,
|
|
arg.AfterID,
|
|
arg.OffsetOpt,
|
|
arg.LimitOpt,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []TemplateVersion
|
|
for rows.Next() {
|
|
var i TemplateVersion
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getTemplateVersionsCreatedAfter = `-- name: GetTemplateVersionsCreatedAfter :many
|
|
SELECT id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers FROM template_versions WHERE created_at > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]TemplateVersion, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateVersionsCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []TemplateVersion
|
|
for rows.Next() {
|
|
var i TemplateVersion
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertTemplateVersion = `-- name: InsertTemplateVersion :one
|
|
INSERT INTO
|
|
template_versions (
|
|
id,
|
|
template_id,
|
|
organization_id,
|
|
created_at,
|
|
updated_at,
|
|
"name",
|
|
readme,
|
|
job_id,
|
|
created_by
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
`
|
|
|
|
type InsertTemplateVersionParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Name string `db:"name" json:"name"`
|
|
Readme string `db:"readme" json:"readme"`
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertTemplateVersion(ctx context.Context, arg InsertTemplateVersionParams) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, insertTemplateVersion,
|
|
arg.ID,
|
|
arg.TemplateID,
|
|
arg.OrganizationID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.Name,
|
|
arg.Readme,
|
|
arg.JobID,
|
|
arg.CreatedBy,
|
|
)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateTemplateVersionByID = `-- name: UpdateTemplateVersionByID :one
|
|
UPDATE
|
|
template_versions
|
|
SET
|
|
template_id = $2,
|
|
updated_at = $3,
|
|
name = $4
|
|
WHERE
|
|
id = $1 RETURNING id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, git_auth_providers
|
|
`
|
|
|
|
type UpdateTemplateVersionByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateVersionByID(ctx context.Context, arg UpdateTemplateVersionByIDParams) (TemplateVersion, error) {
|
|
row := q.db.QueryRowContext(ctx, updateTemplateVersionByID,
|
|
arg.ID,
|
|
arg.TemplateID,
|
|
arg.UpdatedAt,
|
|
arg.Name,
|
|
)
|
|
var i TemplateVersion
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.TemplateID,
|
|
&i.OrganizationID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.Readme,
|
|
&i.JobID,
|
|
&i.CreatedBy,
|
|
pq.Array(&i.GitAuthProviders),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateTemplateVersionDescriptionByJobID = `-- name: UpdateTemplateVersionDescriptionByJobID :exec
|
|
UPDATE
|
|
template_versions
|
|
SET
|
|
readme = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
job_id = $1
|
|
`
|
|
|
|
type UpdateTemplateVersionDescriptionByJobIDParams struct {
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
Readme string `db:"readme" json:"readme"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg UpdateTemplateVersionDescriptionByJobIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateTemplateVersionDescriptionByJobID, arg.JobID, arg.Readme, arg.UpdatedAt)
|
|
return err
|
|
}
|
|
|
|
const updateTemplateVersionGitAuthProvidersByJobID = `-- name: UpdateTemplateVersionGitAuthProvidersByJobID :exec
|
|
UPDATE
|
|
template_versions
|
|
SET
|
|
git_auth_providers = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
job_id = $1
|
|
`
|
|
|
|
type UpdateTemplateVersionGitAuthProvidersByJobIDParams struct {
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
GitAuthProviders []string `db:"git_auth_providers" json:"git_auth_providers"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateTemplateVersionGitAuthProvidersByJobID(ctx context.Context, arg UpdateTemplateVersionGitAuthProvidersByJobIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateTemplateVersionGitAuthProvidersByJobID, arg.JobID, pq.Array(arg.GitAuthProviders), arg.UpdatedAt)
|
|
return err
|
|
}
|
|
|
|
const getTemplateVersionVariables = `-- name: GetTemplateVersionVariables :many
|
|
SELECT template_version_id, name, description, type, value, default_value, required, sensitive FROM template_version_variables WHERE template_version_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionVariable, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateVersionVariables, templateVersionID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []TemplateVersionVariable
|
|
for rows.Next() {
|
|
var i TemplateVersionVariable
|
|
if err := rows.Scan(
|
|
&i.TemplateVersionID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.Type,
|
|
&i.Value,
|
|
&i.DefaultValue,
|
|
&i.Required,
|
|
&i.Sensitive,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertTemplateVersionVariable = `-- name: InsertTemplateVersionVariable :one
|
|
INSERT INTO
|
|
template_version_variables (
|
|
template_version_id,
|
|
name,
|
|
description,
|
|
type,
|
|
value,
|
|
default_value,
|
|
required,
|
|
sensitive
|
|
)
|
|
VALUES
|
|
(
|
|
$1,
|
|
$2,
|
|
$3,
|
|
$4,
|
|
$5,
|
|
$6,
|
|
$7,
|
|
$8
|
|
) RETURNING template_version_id, name, description, type, value, default_value, required, sensitive
|
|
`
|
|
|
|
type InsertTemplateVersionVariableParams struct {
|
|
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
|
|
Name string `db:"name" json:"name"`
|
|
Description string `db:"description" json:"description"`
|
|
Type string `db:"type" json:"type"`
|
|
Value string `db:"value" json:"value"`
|
|
DefaultValue string `db:"default_value" json:"default_value"`
|
|
Required bool `db:"required" json:"required"`
|
|
Sensitive bool `db:"sensitive" json:"sensitive"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertTemplateVersionVariable(ctx context.Context, arg InsertTemplateVersionVariableParams) (TemplateVersionVariable, error) {
|
|
row := q.db.QueryRowContext(ctx, insertTemplateVersionVariable,
|
|
arg.TemplateVersionID,
|
|
arg.Name,
|
|
arg.Description,
|
|
arg.Type,
|
|
arg.Value,
|
|
arg.DefaultValue,
|
|
arg.Required,
|
|
arg.Sensitive,
|
|
)
|
|
var i TemplateVersionVariable
|
|
err := row.Scan(
|
|
&i.TemplateVersionID,
|
|
&i.Name,
|
|
&i.Description,
|
|
&i.Type,
|
|
&i.Value,
|
|
&i.DefaultValue,
|
|
&i.Required,
|
|
&i.Sensitive,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getUserLinkByLinkedID = `-- name: GetUserLinkByLinkedID :one
|
|
SELECT
|
|
user_id, login_type, linked_id, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
FROM
|
|
user_links
|
|
WHERE
|
|
linked_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (UserLink, error) {
|
|
row := q.db.QueryRowContext(ctx, getUserLinkByLinkedID, linkedID)
|
|
var i UserLink
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.LoginType,
|
|
&i.LinkedID,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getUserLinkByUserIDLoginType = `-- name: GetUserLinkByUserIDLoginType :one
|
|
SELECT
|
|
user_id, login_type, linked_id, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
FROM
|
|
user_links
|
|
WHERE
|
|
user_id = $1 AND login_type = $2
|
|
`
|
|
|
|
type GetUserLinkByUserIDLoginTypeParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetUserLinkByUserIDLoginType(ctx context.Context, arg GetUserLinkByUserIDLoginTypeParams) (UserLink, error) {
|
|
row := q.db.QueryRowContext(ctx, getUserLinkByUserIDLoginType, arg.UserID, arg.LoginType)
|
|
var i UserLink
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.LoginType,
|
|
&i.LinkedID,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertUserLink = `-- name: InsertUserLink :one
|
|
INSERT INTO
|
|
user_links (
|
|
user_id,
|
|
login_type,
|
|
linked_id,
|
|
oauth_access_token,
|
|
oauth_refresh_token,
|
|
oauth_expiry
|
|
)
|
|
VALUES
|
|
( $1, $2, $3, $4, $5, $6 ) RETURNING user_id, login_type, linked_id, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
`
|
|
|
|
type InsertUserLinkParams struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
LinkedID string `db:"linked_id" json:"linked_id"`
|
|
OAuthAccessToken string `db:"oauth_access_token" json:"oauth_access_token"`
|
|
OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"`
|
|
OAuthExpiry time.Time `db:"oauth_expiry" json:"oauth_expiry"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertUserLink(ctx context.Context, arg InsertUserLinkParams) (UserLink, error) {
|
|
row := q.db.QueryRowContext(ctx, insertUserLink,
|
|
arg.UserID,
|
|
arg.LoginType,
|
|
arg.LinkedID,
|
|
arg.OAuthAccessToken,
|
|
arg.OAuthRefreshToken,
|
|
arg.OAuthExpiry,
|
|
)
|
|
var i UserLink
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.LoginType,
|
|
&i.LinkedID,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserLink = `-- name: UpdateUserLink :one
|
|
UPDATE
|
|
user_links
|
|
SET
|
|
oauth_access_token = $1,
|
|
oauth_refresh_token = $2,
|
|
oauth_expiry = $3
|
|
WHERE
|
|
user_id = $4 AND login_type = $5 RETURNING user_id, login_type, linked_id, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
`
|
|
|
|
type UpdateUserLinkParams struct {
|
|
OAuthAccessToken string `db:"oauth_access_token" json:"oauth_access_token"`
|
|
OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"`
|
|
OAuthExpiry time.Time `db:"oauth_expiry" json:"oauth_expiry"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserLink(ctx context.Context, arg UpdateUserLinkParams) (UserLink, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserLink,
|
|
arg.OAuthAccessToken,
|
|
arg.OAuthRefreshToken,
|
|
arg.OAuthExpiry,
|
|
arg.UserID,
|
|
arg.LoginType,
|
|
)
|
|
var i UserLink
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.LoginType,
|
|
&i.LinkedID,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserLinkedID = `-- name: UpdateUserLinkedID :one
|
|
UPDATE
|
|
user_links
|
|
SET
|
|
linked_id = $1
|
|
WHERE
|
|
user_id = $2 AND login_type = $3 RETURNING user_id, login_type, linked_id, oauth_access_token, oauth_refresh_token, oauth_expiry
|
|
`
|
|
|
|
type UpdateUserLinkedIDParams struct {
|
|
LinkedID string `db:"linked_id" json:"linked_id"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserLinkedID(ctx context.Context, arg UpdateUserLinkedIDParams) (UserLink, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserLinkedID, arg.LinkedID, arg.UserID, arg.LoginType)
|
|
var i UserLink
|
|
err := row.Scan(
|
|
&i.UserID,
|
|
&i.LoginType,
|
|
&i.LinkedID,
|
|
&i.OAuthAccessToken,
|
|
&i.OAuthRefreshToken,
|
|
&i.OAuthExpiry,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getActiveUserCount = `-- name: GetActiveUserCount :one
|
|
SELECT
|
|
COUNT(*)
|
|
FROM
|
|
users
|
|
WHERE
|
|
status = 'active'::user_status AND deleted = false
|
|
`
|
|
|
|
func (q *sqlQuerier) GetActiveUserCount(ctx context.Context) (int64, error) {
|
|
row := q.db.QueryRowContext(ctx, getActiveUserCount)
|
|
var count int64
|
|
err := row.Scan(&count)
|
|
return count, err
|
|
}
|
|
|
|
const getAuthorizationUserRoles = `-- name: GetAuthorizationUserRoles :one
|
|
SELECT
|
|
-- username is returned just to help for logging purposes
|
|
-- status is used to enforce 'suspended' users, as all roles are ignored
|
|
-- when suspended.
|
|
id, username, status,
|
|
-- All user roles, including their org roles.
|
|
array_cat(
|
|
-- All users are members
|
|
array_append(users.rbac_roles, 'member'),
|
|
(
|
|
SELECT
|
|
array_agg(org_roles)
|
|
FROM
|
|
organization_members,
|
|
-- All org_members get the org-member role for their orgs
|
|
unnest(
|
|
array_append(roles, 'organization-member:' || organization_members.organization_id::text)
|
|
) AS org_roles
|
|
WHERE
|
|
user_id = users.id
|
|
)
|
|
) :: text[] AS roles,
|
|
-- All groups the user is in.
|
|
(
|
|
SELECT
|
|
array_agg(
|
|
group_members.group_id :: text
|
|
)
|
|
FROM
|
|
group_members
|
|
WHERE
|
|
user_id = users.id
|
|
) :: text[] AS groups
|
|
FROM
|
|
users
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type GetAuthorizationUserRolesRow struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Username string `db:"username" json:"username"`
|
|
Status UserStatus `db:"status" json:"status"`
|
|
Roles []string `db:"roles" json:"roles"`
|
|
Groups []string `db:"groups" json:"groups"`
|
|
}
|
|
|
|
// This function returns roles for authorization purposes. Implied member roles
|
|
// are included.
|
|
func (q *sqlQuerier) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (GetAuthorizationUserRolesRow, error) {
|
|
row := q.db.QueryRowContext(ctx, getAuthorizationUserRoles, userID)
|
|
var i GetAuthorizationUserRolesRow
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Username,
|
|
&i.Status,
|
|
pq.Array(&i.Roles),
|
|
pq.Array(&i.Groups),
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getFilteredUserCount = `-- name: GetFilteredUserCount :one
|
|
SELECT
|
|
COUNT(*)
|
|
FROM
|
|
users
|
|
WHERE
|
|
users.deleted = false
|
|
-- Start filters
|
|
-- Filter by name, email or username
|
|
AND CASE
|
|
WHEN $1 :: text != '' THEN (
|
|
email ILIKE concat('%', $1, '%')
|
|
OR username ILIKE concat('%', $1, '%')
|
|
)
|
|
ELSE true
|
|
END
|
|
-- Filter by status
|
|
AND CASE
|
|
-- @status needs to be a text because it can be empty, If it was
|
|
-- user_status enum, it would not.
|
|
WHEN cardinality($2 :: user_status[]) > 0 THEN
|
|
status = ANY($2 :: user_status[])
|
|
ELSE true
|
|
END
|
|
-- Filter by rbac_roles
|
|
AND CASE
|
|
-- @rbac_role allows filtering by rbac roles. If 'member' is included, show everyone, as everyone is a member.
|
|
WHEN cardinality($3 :: text[]) > 0 AND 'member' != ANY($3 :: text[])
|
|
THEN rbac_roles && $3 :: text[]
|
|
ELSE true
|
|
END
|
|
-- Authorize Filter clause will be injected below in GetAuthorizedUserCount
|
|
-- @authorize_filter
|
|
`
|
|
|
|
type GetFilteredUserCountParams struct {
|
|
Search string `db:"search" json:"search"`
|
|
Status []UserStatus `db:"status" json:"status"`
|
|
RbacRole []string `db:"rbac_role" json:"rbac_role"`
|
|
}
|
|
|
|
// This will never count deleted users.
|
|
func (q *sqlQuerier) GetFilteredUserCount(ctx context.Context, arg GetFilteredUserCountParams) (int64, error) {
|
|
row := q.db.QueryRowContext(ctx, getFilteredUserCount, arg.Search, pq.Array(arg.Status), pq.Array(arg.RbacRole))
|
|
var count int64
|
|
err := row.Scan(&count)
|
|
return count, err
|
|
}
|
|
|
|
const getUserByEmailOrUsername = `-- name: GetUserByEmailOrUsername :one
|
|
SELECT
|
|
id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
FROM
|
|
users
|
|
WHERE
|
|
(LOWER(username) = LOWER($1) OR LOWER(email) = LOWER($2)) AND
|
|
deleted = false
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
type GetUserByEmailOrUsernameParams struct {
|
|
Username string `db:"username" json:"username"`
|
|
Email string `db:"email" json:"email"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetUserByEmailOrUsername(ctx context.Context, arg GetUserByEmailOrUsernameParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, getUserByEmailOrUsername, arg.Username, arg.Email)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getUserByID = `-- name: GetUserByID :one
|
|
SELECT
|
|
id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
FROM
|
|
users
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetUserByID(ctx context.Context, id uuid.UUID) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, getUserByID, id)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getUserCount = `-- name: GetUserCount :one
|
|
SELECT
|
|
COUNT(*)
|
|
FROM
|
|
users
|
|
WHERE
|
|
deleted = false
|
|
`
|
|
|
|
func (q *sqlQuerier) GetUserCount(ctx context.Context) (int64, error) {
|
|
row := q.db.QueryRowContext(ctx, getUserCount)
|
|
var count int64
|
|
err := row.Scan(&count)
|
|
return count, err
|
|
}
|
|
|
|
const getUsers = `-- name: GetUsers :many
|
|
SELECT
|
|
id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, COUNT(*) OVER() AS count
|
|
FROM
|
|
users
|
|
WHERE
|
|
users.deleted = false
|
|
AND CASE
|
|
-- This allows using the last element on a page as effectively a cursor.
|
|
-- This is an important option for scripts that need to paginate without
|
|
-- duplicating or missing data.
|
|
WHEN $1 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN (
|
|
-- The pagination cursor is the last ID of the previous page.
|
|
-- The query is ordered by the created_at field, so select all
|
|
-- rows after the cursor.
|
|
(created_at, id) > (
|
|
SELECT
|
|
created_at, id
|
|
FROM
|
|
users
|
|
WHERE
|
|
id = $1
|
|
)
|
|
)
|
|
ELSE true
|
|
END
|
|
-- Start filters
|
|
-- Filter by name, email or username
|
|
AND CASE
|
|
WHEN $2 :: text != '' THEN (
|
|
email ILIKE concat('%', $2, '%')
|
|
OR username ILIKE concat('%', $2, '%')
|
|
)
|
|
ELSE true
|
|
END
|
|
-- Filter by status
|
|
AND CASE
|
|
-- @status needs to be a text because it can be empty, If it was
|
|
-- user_status enum, it would not.
|
|
WHEN cardinality($3 :: user_status[]) > 0 THEN
|
|
status = ANY($3 :: user_status[])
|
|
ELSE true
|
|
END
|
|
-- Filter by rbac_roles
|
|
AND CASE
|
|
-- @rbac_role allows filtering by rbac roles. If 'member' is included, show everyone, as
|
|
-- everyone is a member.
|
|
WHEN cardinality($4 :: text[]) > 0 AND 'member' != ANY($4 :: text[]) THEN
|
|
rbac_roles && $4 :: text[]
|
|
ELSE true
|
|
END
|
|
-- End of filters
|
|
ORDER BY
|
|
-- Deterministic and consistent ordering of all users, even if they share
|
|
-- a timestamp. This is to ensure consistent pagination.
|
|
(created_at, id) ASC OFFSET $5
|
|
LIMIT
|
|
-- A null limit means "no limit", so 0 means return all
|
|
NULLIF($6 :: int, 0)
|
|
`
|
|
|
|
type GetUsersParams struct {
|
|
AfterID uuid.UUID `db:"after_id" json:"after_id"`
|
|
Search string `db:"search" json:"search"`
|
|
Status []UserStatus `db:"status" json:"status"`
|
|
RbacRole []string `db:"rbac_role" json:"rbac_role"`
|
|
OffsetOpt int32 `db:"offset_opt" json:"offset_opt"`
|
|
LimitOpt int32 `db:"limit_opt" json:"limit_opt"`
|
|
}
|
|
|
|
type GetUsersRow struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Email string `db:"email" json:"email"`
|
|
Username string `db:"username" json:"username"`
|
|
HashedPassword []byte `db:"hashed_password" json:"hashed_password"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Status UserStatus `db:"status" json:"status"`
|
|
RBACRoles pq.StringArray `db:"rbac_roles" json:"rbac_roles"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
AvatarURL sql.NullString `db:"avatar_url" json:"avatar_url"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
LastSeenAt time.Time `db:"last_seen_at" json:"last_seen_at"`
|
|
Count int64 `db:"count" json:"count"`
|
|
}
|
|
|
|
// This will never return deleted users.
|
|
func (q *sqlQuerier) GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUsersRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getUsers,
|
|
arg.AfterID,
|
|
arg.Search,
|
|
pq.Array(arg.Status),
|
|
pq.Array(arg.RbacRole),
|
|
arg.OffsetOpt,
|
|
arg.LimitOpt,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetUsersRow
|
|
for rows.Next() {
|
|
var i GetUsersRow
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
&i.Count,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getUsersByIDs = `-- name: GetUsersByIDs :many
|
|
SELECT id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at FROM users WHERE id = ANY($1 :: uuid [ ])
|
|
`
|
|
|
|
// This shouldn't check for deleted, because it's frequently used
|
|
// to look up references to actions. eg. a user could build a workspace
|
|
// for another user, then be deleted... we still want them to appear!
|
|
func (q *sqlQuerier) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]User, error) {
|
|
rows, err := q.db.QueryContext(ctx, getUsersByIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []User
|
|
for rows.Next() {
|
|
var i User
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertUser = `-- name: InsertUser :one
|
|
INSERT INTO
|
|
users (
|
|
id,
|
|
email,
|
|
username,
|
|
hashed_password,
|
|
created_at,
|
|
updated_at,
|
|
rbac_roles,
|
|
login_type
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
`
|
|
|
|
type InsertUserParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Email string `db:"email" json:"email"`
|
|
Username string `db:"username" json:"username"`
|
|
HashedPassword []byte `db:"hashed_password" json:"hashed_password"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
RBACRoles pq.StringArray `db:"rbac_roles" json:"rbac_roles"`
|
|
LoginType LoginType `db:"login_type" json:"login_type"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertUser(ctx context.Context, arg InsertUserParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, insertUser,
|
|
arg.ID,
|
|
arg.Email,
|
|
arg.Username,
|
|
arg.HashedPassword,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.RBACRoles,
|
|
arg.LoginType,
|
|
)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserDeletedByID = `-- name: UpdateUserDeletedByID :exec
|
|
UPDATE
|
|
users
|
|
SET
|
|
deleted = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateUserDeletedByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserDeletedByID(ctx context.Context, arg UpdateUserDeletedByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateUserDeletedByID, arg.ID, arg.Deleted)
|
|
return err
|
|
}
|
|
|
|
const updateUserHashedPassword = `-- name: UpdateUserHashedPassword :exec
|
|
UPDATE
|
|
users
|
|
SET
|
|
hashed_password = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateUserHashedPasswordParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
HashedPassword []byte `db:"hashed_password" json:"hashed_password"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserHashedPassword(ctx context.Context, arg UpdateUserHashedPasswordParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateUserHashedPassword, arg.ID, arg.HashedPassword)
|
|
return err
|
|
}
|
|
|
|
const updateUserLastSeenAt = `-- name: UpdateUserLastSeenAt :one
|
|
UPDATE
|
|
users
|
|
SET
|
|
last_seen_at = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
`
|
|
|
|
type UpdateUserLastSeenAtParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
LastSeenAt time.Time `db:"last_seen_at" json:"last_seen_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserLastSeenAt(ctx context.Context, arg UpdateUserLastSeenAtParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserLastSeenAt, arg.ID, arg.LastSeenAt, arg.UpdatedAt)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserProfile = `-- name: UpdateUserProfile :one
|
|
UPDATE
|
|
users
|
|
SET
|
|
email = $2,
|
|
username = $3,
|
|
avatar_url = $4,
|
|
updated_at = $5
|
|
WHERE
|
|
id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
`
|
|
|
|
type UpdateUserProfileParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Email string `db:"email" json:"email"`
|
|
Username string `db:"username" json:"username"`
|
|
AvatarURL sql.NullString `db:"avatar_url" json:"avatar_url"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserProfile(ctx context.Context, arg UpdateUserProfileParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserProfile,
|
|
arg.ID,
|
|
arg.Email,
|
|
arg.Username,
|
|
arg.AvatarURL,
|
|
arg.UpdatedAt,
|
|
)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserRoles = `-- name: UpdateUserRoles :one
|
|
UPDATE
|
|
users
|
|
SET
|
|
-- Remove all duplicates from the roles.
|
|
rbac_roles = ARRAY(SELECT DISTINCT UNNEST($1 :: text[]))
|
|
WHERE
|
|
id = $2
|
|
RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
`
|
|
|
|
type UpdateUserRolesParams struct {
|
|
GrantedRoles []string `db:"granted_roles" json:"granted_roles"`
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserRoles(ctx context.Context, arg UpdateUserRolesParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserRoles, pq.Array(arg.GrantedRoles), arg.ID)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateUserStatus = `-- name: UpdateUserStatus :one
|
|
UPDATE
|
|
users
|
|
SET
|
|
status = $2,
|
|
updated_at = $3
|
|
WHERE
|
|
id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at
|
|
`
|
|
|
|
type UpdateUserStatusParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Status UserStatus `db:"status" json:"status"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusParams) (User, error) {
|
|
row := q.db.QueryRowContext(ctx, updateUserStatus, arg.ID, arg.Status, arg.UpdatedAt)
|
|
var i User
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.Email,
|
|
&i.Username,
|
|
&i.HashedPassword,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Status,
|
|
&i.RBACRoles,
|
|
&i.LoginType,
|
|
&i.AvatarURL,
|
|
&i.Deleted,
|
|
&i.LastSeenAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const deleteOldWorkspaceAgentStartupLogs = `-- name: DeleteOldWorkspaceAgentStartupLogs :exec
|
|
DELETE FROM workspace_agent_startup_logs WHERE agent_id IN
|
|
(SELECT id FROM workspace_agents WHERE last_connected_at IS NOT NULL
|
|
AND last_connected_at < NOW() - INTERVAL '7 day')
|
|
`
|
|
|
|
// If an agent hasn't connected in the last 7 days, we purge it's logs.
|
|
// Logs can take up a lot of space, so it's important we clean up frequently.
|
|
func (q *sqlQuerier) DeleteOldWorkspaceAgentStartupLogs(ctx context.Context) error {
|
|
_, err := q.db.ExecContext(ctx, deleteOldWorkspaceAgentStartupLogs)
|
|
return err
|
|
}
|
|
|
|
const getWorkspaceAgentByAuthToken = `-- name: GetWorkspaceAgentByAuthToken :one
|
|
SELECT
|
|
id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
auth_token = $1
|
|
ORDER BY
|
|
created_at DESC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentByAuthToken(ctx context.Context, authToken uuid.UUID) (WorkspaceAgent, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceAgentByAuthToken, authToken)
|
|
var i WorkspaceAgent
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceAgentByID = `-- name: GetWorkspaceAgentByID :one
|
|
SELECT
|
|
id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (WorkspaceAgent, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceAgentByID, id)
|
|
var i WorkspaceAgent
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceAgentByInstanceID = `-- name: GetWorkspaceAgentByInstanceID :one
|
|
SELECT
|
|
id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
auth_instance_id = $1 :: TEXT
|
|
ORDER BY
|
|
created_at DESC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentByInstanceID(ctx context.Context, authInstanceID string) (WorkspaceAgent, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceAgentByInstanceID, authInstanceID)
|
|
var i WorkspaceAgent
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceAgentMetadata = `-- name: GetWorkspaceAgentMetadata :many
|
|
SELECT
|
|
workspace_agent_id, display_name, key, script, value, error, timeout, interval, collected_at
|
|
FROM
|
|
workspace_agent_metadata
|
|
WHERE
|
|
workspace_agent_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentMetadata(ctx context.Context, workspaceAgentID uuid.UUID) ([]WorkspaceAgentMetadatum, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentMetadata, workspaceAgentID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgentMetadatum
|
|
for rows.Next() {
|
|
var i WorkspaceAgentMetadatum
|
|
if err := rows.Scan(
|
|
&i.WorkspaceAgentID,
|
|
&i.DisplayName,
|
|
&i.Key,
|
|
&i.Script,
|
|
&i.Value,
|
|
&i.Error,
|
|
&i.Timeout,
|
|
&i.Interval,
|
|
&i.CollectedAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentStartupLogsAfter = `-- name: GetWorkspaceAgentStartupLogsAfter :many
|
|
SELECT
|
|
agent_id, created_at, output, id, level
|
|
FROM
|
|
workspace_agent_startup_logs
|
|
WHERE
|
|
agent_id = $1
|
|
AND (
|
|
id > $2
|
|
) ORDER BY id ASC
|
|
`
|
|
|
|
type GetWorkspaceAgentStartupLogsAfterParams struct {
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
CreatedAfter int64 `db:"created_after" json:"created_after"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentStartupLogsAfter(ctx context.Context, arg GetWorkspaceAgentStartupLogsAfterParams) ([]WorkspaceAgentStartupLog, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentStartupLogsAfter, arg.AgentID, arg.CreatedAfter)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgentStartupLog
|
|
for rows.Next() {
|
|
var i WorkspaceAgentStartupLog
|
|
if err := rows.Scan(
|
|
&i.AgentID,
|
|
&i.CreatedAt,
|
|
&i.Output,
|
|
&i.ID,
|
|
&i.Level,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentsByResourceIDs = `-- name: GetWorkspaceAgentsByResourceIDs :many
|
|
SELECT
|
|
id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
resource_id = ANY($1 :: uuid [ ])
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAgent, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentsByResourceIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgent
|
|
for rows.Next() {
|
|
var i WorkspaceAgent
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentsCreatedAfter = `-- name: GetWorkspaceAgentsCreatedAfter :many
|
|
SELECT id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem FROM workspace_agents WHERE created_at > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceAgent, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentsCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgent
|
|
for rows.Next() {
|
|
var i WorkspaceAgent
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentsInLatestBuildByWorkspaceID = `-- name: GetWorkspaceAgentsInLatestBuildByWorkspaceID :many
|
|
SELECT
|
|
workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.startup_script, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.login_before_ready, workspace_agents.startup_script_timeout_seconds, workspace_agents.expanded_directory, workspace_agents.shutdown_script, workspace_agents.shutdown_script_timeout_seconds, workspace_agents.startup_logs_length, workspace_agents.startup_logs_overflowed, workspace_agents.subsystem
|
|
FROM
|
|
workspace_agents
|
|
JOIN
|
|
workspace_resources ON workspace_agents.resource_id = workspace_resources.id
|
|
JOIN
|
|
workspace_builds ON workspace_resources.job_id = workspace_builds.job_id
|
|
WHERE
|
|
workspace_builds.workspace_id = $1 :: uuid AND
|
|
workspace_builds.build_number = (
|
|
SELECT
|
|
MAX(build_number)
|
|
FROM
|
|
workspace_builds AS wb
|
|
WHERE
|
|
wb.workspace_id = $1 :: uuid
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgent, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentsInLatestBuildByWorkspaceID, workspaceID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgent
|
|
for rows.Next() {
|
|
var i WorkspaceAgent
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceAgent = `-- name: InsertWorkspaceAgent :one
|
|
INSERT INTO
|
|
workspace_agents (
|
|
id,
|
|
created_at,
|
|
updated_at,
|
|
name,
|
|
resource_id,
|
|
auth_token,
|
|
auth_instance_id,
|
|
architecture,
|
|
environment_variables,
|
|
operating_system,
|
|
startup_script,
|
|
directory,
|
|
instance_metadata,
|
|
resource_metadata,
|
|
connection_timeout_seconds,
|
|
troubleshooting_url,
|
|
motd_file,
|
|
login_before_ready,
|
|
startup_script_timeout_seconds,
|
|
shutdown_script,
|
|
shutdown_script_timeout_seconds
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21) RETURNING id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, startup_script, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, login_before_ready, startup_script_timeout_seconds, expanded_directory, shutdown_script, shutdown_script_timeout_seconds, startup_logs_length, startup_logs_overflowed, subsystem
|
|
`
|
|
|
|
type InsertWorkspaceAgentParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
Name string `db:"name" json:"name"`
|
|
ResourceID uuid.UUID `db:"resource_id" json:"resource_id"`
|
|
AuthToken uuid.UUID `db:"auth_token" json:"auth_token"`
|
|
AuthInstanceID sql.NullString `db:"auth_instance_id" json:"auth_instance_id"`
|
|
Architecture string `db:"architecture" json:"architecture"`
|
|
EnvironmentVariables pqtype.NullRawMessage `db:"environment_variables" json:"environment_variables"`
|
|
OperatingSystem string `db:"operating_system" json:"operating_system"`
|
|
StartupScript sql.NullString `db:"startup_script" json:"startup_script"`
|
|
Directory string `db:"directory" json:"directory"`
|
|
InstanceMetadata pqtype.NullRawMessage `db:"instance_metadata" json:"instance_metadata"`
|
|
ResourceMetadata pqtype.NullRawMessage `db:"resource_metadata" json:"resource_metadata"`
|
|
ConnectionTimeoutSeconds int32 `db:"connection_timeout_seconds" json:"connection_timeout_seconds"`
|
|
TroubleshootingURL string `db:"troubleshooting_url" json:"troubleshooting_url"`
|
|
MOTDFile string `db:"motd_file" json:"motd_file"`
|
|
LoginBeforeReady bool `db:"login_before_ready" json:"login_before_ready"`
|
|
StartupScriptTimeoutSeconds int32 `db:"startup_script_timeout_seconds" json:"startup_script_timeout_seconds"`
|
|
ShutdownScript sql.NullString `db:"shutdown_script" json:"shutdown_script"`
|
|
ShutdownScriptTimeoutSeconds int32 `db:"shutdown_script_timeout_seconds" json:"shutdown_script_timeout_seconds"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspaceAgentParams) (WorkspaceAgent, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceAgent,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.Name,
|
|
arg.ResourceID,
|
|
arg.AuthToken,
|
|
arg.AuthInstanceID,
|
|
arg.Architecture,
|
|
arg.EnvironmentVariables,
|
|
arg.OperatingSystem,
|
|
arg.StartupScript,
|
|
arg.Directory,
|
|
arg.InstanceMetadata,
|
|
arg.ResourceMetadata,
|
|
arg.ConnectionTimeoutSeconds,
|
|
arg.TroubleshootingURL,
|
|
arg.MOTDFile,
|
|
arg.LoginBeforeReady,
|
|
arg.StartupScriptTimeoutSeconds,
|
|
arg.ShutdownScript,
|
|
arg.ShutdownScriptTimeoutSeconds,
|
|
)
|
|
var i WorkspaceAgent
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.Name,
|
|
&i.FirstConnectedAt,
|
|
&i.LastConnectedAt,
|
|
&i.DisconnectedAt,
|
|
&i.ResourceID,
|
|
&i.AuthToken,
|
|
&i.AuthInstanceID,
|
|
&i.Architecture,
|
|
&i.EnvironmentVariables,
|
|
&i.OperatingSystem,
|
|
&i.StartupScript,
|
|
&i.InstanceMetadata,
|
|
&i.ResourceMetadata,
|
|
&i.Directory,
|
|
&i.Version,
|
|
&i.LastConnectedReplicaID,
|
|
&i.ConnectionTimeoutSeconds,
|
|
&i.TroubleshootingURL,
|
|
&i.MOTDFile,
|
|
&i.LifecycleState,
|
|
&i.LoginBeforeReady,
|
|
&i.StartupScriptTimeoutSeconds,
|
|
&i.ExpandedDirectory,
|
|
&i.ShutdownScript,
|
|
&i.ShutdownScriptTimeoutSeconds,
|
|
&i.StartupLogsLength,
|
|
&i.StartupLogsOverflowed,
|
|
&i.Subsystem,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertWorkspaceAgentMetadata = `-- name: InsertWorkspaceAgentMetadata :exec
|
|
INSERT INTO
|
|
workspace_agent_metadata (
|
|
workspace_agent_id,
|
|
display_name,
|
|
key,
|
|
script,
|
|
timeout,
|
|
interval
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6)
|
|
`
|
|
|
|
type InsertWorkspaceAgentMetadataParams struct {
|
|
WorkspaceAgentID uuid.UUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
Key string `db:"key" json:"key"`
|
|
Script string `db:"script" json:"script"`
|
|
Timeout int64 `db:"timeout" json:"timeout"`
|
|
Interval int64 `db:"interval" json:"interval"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceAgentMetadata(ctx context.Context, arg InsertWorkspaceAgentMetadataParams) error {
|
|
_, err := q.db.ExecContext(ctx, insertWorkspaceAgentMetadata,
|
|
arg.WorkspaceAgentID,
|
|
arg.DisplayName,
|
|
arg.Key,
|
|
arg.Script,
|
|
arg.Timeout,
|
|
arg.Interval,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const insertWorkspaceAgentStartupLogs = `-- name: InsertWorkspaceAgentStartupLogs :many
|
|
WITH new_length AS (
|
|
UPDATE workspace_agents SET
|
|
startup_logs_length = startup_logs_length + $5 WHERE workspace_agents.id = $1
|
|
)
|
|
INSERT INTO
|
|
workspace_agent_startup_logs (agent_id, created_at, output, level)
|
|
SELECT
|
|
$1 :: uuid AS agent_id,
|
|
unnest($2 :: timestamptz [ ]) AS created_at,
|
|
unnest($3 :: VARCHAR(1024) [ ]) AS output,
|
|
unnest($4 :: log_level [ ]) AS level
|
|
RETURNING workspace_agent_startup_logs.agent_id, workspace_agent_startup_logs.created_at, workspace_agent_startup_logs.output, workspace_agent_startup_logs.id, workspace_agent_startup_logs.level
|
|
`
|
|
|
|
type InsertWorkspaceAgentStartupLogsParams struct {
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
CreatedAt []time.Time `db:"created_at" json:"created_at"`
|
|
Output []string `db:"output" json:"output"`
|
|
Level []LogLevel `db:"level" json:"level"`
|
|
OutputLength int32 `db:"output_length" json:"output_length"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceAgentStartupLogs(ctx context.Context, arg InsertWorkspaceAgentStartupLogsParams) ([]WorkspaceAgentStartupLog, error) {
|
|
rows, err := q.db.QueryContext(ctx, insertWorkspaceAgentStartupLogs,
|
|
arg.AgentID,
|
|
pq.Array(arg.CreatedAt),
|
|
pq.Array(arg.Output),
|
|
pq.Array(arg.Level),
|
|
arg.OutputLength,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceAgentStartupLog
|
|
for rows.Next() {
|
|
var i WorkspaceAgentStartupLog
|
|
if err := rows.Scan(
|
|
&i.AgentID,
|
|
&i.CreatedAt,
|
|
&i.Output,
|
|
&i.ID,
|
|
&i.Level,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const updateWorkspaceAgentConnectionByID = `-- name: UpdateWorkspaceAgentConnectionByID :exec
|
|
UPDATE
|
|
workspace_agents
|
|
SET
|
|
first_connected_at = $2,
|
|
last_connected_at = $3,
|
|
last_connected_replica_id = $4,
|
|
disconnected_at = $5,
|
|
updated_at = $6
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAgentConnectionByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
FirstConnectedAt sql.NullTime `db:"first_connected_at" json:"first_connected_at"`
|
|
LastConnectedAt sql.NullTime `db:"last_connected_at" json:"last_connected_at"`
|
|
LastConnectedReplicaID uuid.NullUUID `db:"last_connected_replica_id" json:"last_connected_replica_id"`
|
|
DisconnectedAt sql.NullTime `db:"disconnected_at" json:"disconnected_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg UpdateWorkspaceAgentConnectionByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentConnectionByID,
|
|
arg.ID,
|
|
arg.FirstConnectedAt,
|
|
arg.LastConnectedAt,
|
|
arg.LastConnectedReplicaID,
|
|
arg.DisconnectedAt,
|
|
arg.UpdatedAt,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceAgentLifecycleStateByID = `-- name: UpdateWorkspaceAgentLifecycleStateByID :exec
|
|
UPDATE
|
|
workspace_agents
|
|
SET
|
|
lifecycle_state = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAgentLifecycleStateByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
LifecycleState WorkspaceAgentLifecycleState `db:"lifecycle_state" json:"lifecycle_state"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg UpdateWorkspaceAgentLifecycleStateByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentLifecycleStateByID, arg.ID, arg.LifecycleState)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceAgentMetadata = `-- name: UpdateWorkspaceAgentMetadata :exec
|
|
UPDATE
|
|
workspace_agent_metadata
|
|
SET
|
|
value = $3,
|
|
error = $4,
|
|
collected_at = $5
|
|
WHERE
|
|
workspace_agent_id = $1
|
|
AND key = $2
|
|
`
|
|
|
|
type UpdateWorkspaceAgentMetadataParams struct {
|
|
WorkspaceAgentID uuid.UUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
|
Key string `db:"key" json:"key"`
|
|
Value string `db:"value" json:"value"`
|
|
Error string `db:"error" json:"error"`
|
|
CollectedAt time.Time `db:"collected_at" json:"collected_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAgentMetadata(ctx context.Context, arg UpdateWorkspaceAgentMetadataParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentMetadata,
|
|
arg.WorkspaceAgentID,
|
|
arg.Key,
|
|
arg.Value,
|
|
arg.Error,
|
|
arg.CollectedAt,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceAgentStartupByID = `-- name: UpdateWorkspaceAgentStartupByID :exec
|
|
UPDATE
|
|
workspace_agents
|
|
SET
|
|
version = $2,
|
|
expanded_directory = $3,
|
|
subsystem = $4
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAgentStartupByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Version string `db:"version" json:"version"`
|
|
ExpandedDirectory string `db:"expanded_directory" json:"expanded_directory"`
|
|
Subsystem WorkspaceAgentSubsystem `db:"subsystem" json:"subsystem"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg UpdateWorkspaceAgentStartupByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentStartupByID,
|
|
arg.ID,
|
|
arg.Version,
|
|
arg.ExpandedDirectory,
|
|
arg.Subsystem,
|
|
)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceAgentStartupLogOverflowByID = `-- name: UpdateWorkspaceAgentStartupLogOverflowByID :exec
|
|
UPDATE
|
|
workspace_agents
|
|
SET
|
|
startup_logs_overflowed = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAgentStartupLogOverflowByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
StartupLogsOverflowed bool `db:"startup_logs_overflowed" json:"startup_logs_overflowed"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAgentStartupLogOverflowByID(ctx context.Context, arg UpdateWorkspaceAgentStartupLogOverflowByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentStartupLogOverflowByID, arg.ID, arg.StartupLogsOverflowed)
|
|
return err
|
|
}
|
|
|
|
const deleteOldWorkspaceAgentStats = `-- name: DeleteOldWorkspaceAgentStats :exec
|
|
DELETE FROM workspace_agent_stats WHERE created_at < NOW() - INTERVAL '30 days'
|
|
`
|
|
|
|
func (q *sqlQuerier) DeleteOldWorkspaceAgentStats(ctx context.Context) error {
|
|
_, err := q.db.ExecContext(ctx, deleteOldWorkspaceAgentStats)
|
|
return err
|
|
}
|
|
|
|
const getDeploymentDAUs = `-- name: GetDeploymentDAUs :many
|
|
SELECT
|
|
(created_at at TIME ZONE cast($1::integer as text))::date as date,
|
|
user_id
|
|
FROM
|
|
workspace_agent_stats
|
|
WHERE
|
|
connection_count > 0
|
|
GROUP BY
|
|
date, user_id
|
|
ORDER BY
|
|
date ASC
|
|
`
|
|
|
|
type GetDeploymentDAUsRow struct {
|
|
Date time.Time `db:"date" json:"date"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]GetDeploymentDAUsRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getDeploymentDAUs, tzOffset)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetDeploymentDAUsRow
|
|
for rows.Next() {
|
|
var i GetDeploymentDAUsRow
|
|
if err := rows.Scan(&i.Date, &i.UserID); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getDeploymentWorkspaceAgentStats = `-- name: GetDeploymentWorkspaceAgentStats :one
|
|
WITH agent_stats AS (
|
|
SELECT
|
|
coalesce(SUM(rx_bytes), 0)::bigint AS workspace_rx_bytes,
|
|
coalesce(SUM(tx_bytes), 0)::bigint AS workspace_tx_bytes,
|
|
coalesce((PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_50,
|
|
coalesce((PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_95
|
|
FROM workspace_agent_stats
|
|
-- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms.
|
|
WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0
|
|
), latest_agent_stats AS (
|
|
SELECT
|
|
coalesce(SUM(session_count_vscode), 0)::bigint AS session_count_vscode,
|
|
coalesce(SUM(session_count_ssh), 0)::bigint AS session_count_ssh,
|
|
coalesce(SUM(session_count_jetbrains), 0)::bigint AS session_count_jetbrains,
|
|
coalesce(SUM(session_count_reconnecting_pty), 0)::bigint AS session_count_reconnecting_pty
|
|
FROM (
|
|
SELECT id, created_at, user_id, agent_id, workspace_id, template_id, connections_by_proto, connection_count, rx_packets, rx_bytes, tx_packets, tx_bytes, connection_median_latency_ms, session_count_vscode, session_count_jetbrains, session_count_reconnecting_pty, session_count_ssh, ROW_NUMBER() OVER(PARTITION BY agent_id ORDER BY created_at DESC) AS rn
|
|
FROM workspace_agent_stats WHERE created_at > $1
|
|
) AS a WHERE a.rn = 1
|
|
)
|
|
SELECT workspace_rx_bytes, workspace_tx_bytes, workspace_connection_latency_50, workspace_connection_latency_95, session_count_vscode, session_count_ssh, session_count_jetbrains, session_count_reconnecting_pty FROM agent_stats, latest_agent_stats
|
|
`
|
|
|
|
type GetDeploymentWorkspaceAgentStatsRow struct {
|
|
WorkspaceRxBytes int64 `db:"workspace_rx_bytes" json:"workspace_rx_bytes"`
|
|
WorkspaceTxBytes int64 `db:"workspace_tx_bytes" json:"workspace_tx_bytes"`
|
|
WorkspaceConnectionLatency50 float64 `db:"workspace_connection_latency_50" json:"workspace_connection_latency_50"`
|
|
WorkspaceConnectionLatency95 float64 `db:"workspace_connection_latency_95" json:"workspace_connection_latency_95"`
|
|
SessionCountVSCode int64 `db:"session_count_vscode" json:"session_count_vscode"`
|
|
SessionCountSSH int64 `db:"session_count_ssh" json:"session_count_ssh"`
|
|
SessionCountJetBrains int64 `db:"session_count_jetbrains" json:"session_count_jetbrains"`
|
|
SessionCountReconnectingPTY int64 `db:"session_count_reconnecting_pty" json:"session_count_reconnecting_pty"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (GetDeploymentWorkspaceAgentStatsRow, error) {
|
|
row := q.db.QueryRowContext(ctx, getDeploymentWorkspaceAgentStats, createdAt)
|
|
var i GetDeploymentWorkspaceAgentStatsRow
|
|
err := row.Scan(
|
|
&i.WorkspaceRxBytes,
|
|
&i.WorkspaceTxBytes,
|
|
&i.WorkspaceConnectionLatency50,
|
|
&i.WorkspaceConnectionLatency95,
|
|
&i.SessionCountVSCode,
|
|
&i.SessionCountSSH,
|
|
&i.SessionCountJetBrains,
|
|
&i.SessionCountReconnectingPTY,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getTemplateDAUs = `-- name: GetTemplateDAUs :many
|
|
SELECT
|
|
(created_at at TIME ZONE cast($2::integer as text))::date as date,
|
|
user_id
|
|
FROM
|
|
workspace_agent_stats
|
|
WHERE
|
|
template_id = $1 AND
|
|
connection_count > 0
|
|
GROUP BY
|
|
date, user_id
|
|
ORDER BY
|
|
date ASC
|
|
`
|
|
|
|
type GetTemplateDAUsParams struct {
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
TzOffset int32 `db:"tz_offset" json:"tz_offset"`
|
|
}
|
|
|
|
type GetTemplateDAUsRow struct {
|
|
Date time.Time `db:"date" json:"date"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetTemplateDAUs(ctx context.Context, arg GetTemplateDAUsParams) ([]GetTemplateDAUsRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getTemplateDAUs, arg.TemplateID, arg.TzOffset)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetTemplateDAUsRow
|
|
for rows.Next() {
|
|
var i GetTemplateDAUsRow
|
|
if err := rows.Scan(&i.Date, &i.UserID); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentStats = `-- name: GetWorkspaceAgentStats :many
|
|
WITH agent_stats AS (
|
|
SELECT
|
|
user_id,
|
|
agent_id,
|
|
workspace_id,
|
|
template_id,
|
|
MIN(created_at)::timestamptz AS aggregated_from,
|
|
coalesce(SUM(rx_bytes), 0)::bigint AS workspace_rx_bytes,
|
|
coalesce(SUM(tx_bytes), 0)::bigint AS workspace_tx_bytes,
|
|
coalesce((PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_50,
|
|
coalesce((PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_95
|
|
FROM workspace_agent_stats
|
|
-- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms.
|
|
WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0 GROUP BY user_id, agent_id, workspace_id, template_id
|
|
), latest_agent_stats AS (
|
|
SELECT
|
|
a.agent_id,
|
|
coalesce(SUM(session_count_vscode), 0)::bigint AS session_count_vscode,
|
|
coalesce(SUM(session_count_ssh), 0)::bigint AS session_count_ssh,
|
|
coalesce(SUM(session_count_jetbrains), 0)::bigint AS session_count_jetbrains,
|
|
coalesce(SUM(session_count_reconnecting_pty), 0)::bigint AS session_count_reconnecting_pty
|
|
FROM (
|
|
SELECT id, created_at, user_id, agent_id, workspace_id, template_id, connections_by_proto, connection_count, rx_packets, rx_bytes, tx_packets, tx_bytes, connection_median_latency_ms, session_count_vscode, session_count_jetbrains, session_count_reconnecting_pty, session_count_ssh, ROW_NUMBER() OVER(PARTITION BY agent_id ORDER BY created_at DESC) AS rn
|
|
FROM workspace_agent_stats WHERE created_at > $1
|
|
) AS a WHERE a.rn = 1 GROUP BY a.user_id, a.agent_id, a.workspace_id, a.template_id
|
|
)
|
|
SELECT user_id, agent_stats.agent_id, workspace_id, template_id, aggregated_from, workspace_rx_bytes, workspace_tx_bytes, workspace_connection_latency_50, workspace_connection_latency_95, latest_agent_stats.agent_id, session_count_vscode, session_count_ssh, session_count_jetbrains, session_count_reconnecting_pty FROM agent_stats JOIN latest_agent_stats ON agent_stats.agent_id = latest_agent_stats.agent_id
|
|
`
|
|
|
|
type GetWorkspaceAgentStatsRow struct {
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
AggregatedFrom time.Time `db:"aggregated_from" json:"aggregated_from"`
|
|
WorkspaceRxBytes int64 `db:"workspace_rx_bytes" json:"workspace_rx_bytes"`
|
|
WorkspaceTxBytes int64 `db:"workspace_tx_bytes" json:"workspace_tx_bytes"`
|
|
WorkspaceConnectionLatency50 float64 `db:"workspace_connection_latency_50" json:"workspace_connection_latency_50"`
|
|
WorkspaceConnectionLatency95 float64 `db:"workspace_connection_latency_95" json:"workspace_connection_latency_95"`
|
|
AgentID_2 uuid.UUID `db:"agent_id_2" json:"agent_id_2"`
|
|
SessionCountVSCode int64 `db:"session_count_vscode" json:"session_count_vscode"`
|
|
SessionCountSSH int64 `db:"session_count_ssh" json:"session_count_ssh"`
|
|
SessionCountJetBrains int64 `db:"session_count_jetbrains" json:"session_count_jetbrains"`
|
|
SessionCountReconnectingPTY int64 `db:"session_count_reconnecting_pty" json:"session_count_reconnecting_pty"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentStats(ctx context.Context, createdAt time.Time) ([]GetWorkspaceAgentStatsRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentStats, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetWorkspaceAgentStatsRow
|
|
for rows.Next() {
|
|
var i GetWorkspaceAgentStatsRow
|
|
if err := rows.Scan(
|
|
&i.UserID,
|
|
&i.AgentID,
|
|
&i.WorkspaceID,
|
|
&i.TemplateID,
|
|
&i.AggregatedFrom,
|
|
&i.WorkspaceRxBytes,
|
|
&i.WorkspaceTxBytes,
|
|
&i.WorkspaceConnectionLatency50,
|
|
&i.WorkspaceConnectionLatency95,
|
|
&i.AgentID_2,
|
|
&i.SessionCountVSCode,
|
|
&i.SessionCountSSH,
|
|
&i.SessionCountJetBrains,
|
|
&i.SessionCountReconnectingPTY,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAgentStatsAndLabels = `-- name: GetWorkspaceAgentStatsAndLabels :many
|
|
WITH agent_stats AS (
|
|
SELECT
|
|
user_id,
|
|
agent_id,
|
|
workspace_id,
|
|
coalesce(SUM(rx_bytes), 0)::bigint AS rx_bytes,
|
|
coalesce(SUM(tx_bytes), 0)::bigint AS tx_bytes
|
|
FROM workspace_agent_stats
|
|
WHERE workspace_agent_stats.created_at > $1
|
|
GROUP BY user_id, agent_id, workspace_id
|
|
), latest_agent_stats AS (
|
|
SELECT
|
|
a.agent_id,
|
|
coalesce(SUM(session_count_vscode), 0)::bigint AS session_count_vscode,
|
|
coalesce(SUM(session_count_ssh), 0)::bigint AS session_count_ssh,
|
|
coalesce(SUM(session_count_jetbrains), 0)::bigint AS session_count_jetbrains,
|
|
coalesce(SUM(session_count_reconnecting_pty), 0)::bigint AS session_count_reconnecting_pty,
|
|
coalesce(SUM(connection_count), 0)::bigint AS connection_count,
|
|
coalesce(MAX(connection_median_latency_ms), 0)::float AS connection_median_latency_ms
|
|
FROM (
|
|
SELECT id, created_at, user_id, agent_id, workspace_id, template_id, connections_by_proto, connection_count, rx_packets, rx_bytes, tx_packets, tx_bytes, connection_median_latency_ms, session_count_vscode, session_count_jetbrains, session_count_reconnecting_pty, session_count_ssh, ROW_NUMBER() OVER(PARTITION BY agent_id ORDER BY created_at DESC) AS rn
|
|
FROM workspace_agent_stats
|
|
-- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms.
|
|
WHERE created_at > $1 AND connection_median_latency_ms > 0
|
|
) AS a
|
|
WHERE a.rn = 1
|
|
GROUP BY a.user_id, a.agent_id, a.workspace_id
|
|
)
|
|
SELECT
|
|
users.username, workspace_agents.name AS agent_name, workspaces.name AS workspace_name, rx_bytes, tx_bytes,
|
|
session_count_vscode, session_count_ssh, session_count_jetbrains, session_count_reconnecting_pty,
|
|
connection_count, connection_median_latency_ms
|
|
FROM
|
|
agent_stats
|
|
JOIN
|
|
latest_agent_stats
|
|
ON
|
|
agent_stats.agent_id = latest_agent_stats.agent_id
|
|
JOIN
|
|
users
|
|
ON
|
|
users.id = agent_stats.user_id
|
|
JOIN
|
|
workspace_agents
|
|
ON
|
|
workspace_agents.id = agent_stats.agent_id
|
|
JOIN
|
|
workspaces
|
|
ON
|
|
workspaces.id = agent_stats.workspace_id
|
|
`
|
|
|
|
type GetWorkspaceAgentStatsAndLabelsRow struct {
|
|
Username string `db:"username" json:"username"`
|
|
AgentName string `db:"agent_name" json:"agent_name"`
|
|
WorkspaceName string `db:"workspace_name" json:"workspace_name"`
|
|
RxBytes int64 `db:"rx_bytes" json:"rx_bytes"`
|
|
TxBytes int64 `db:"tx_bytes" json:"tx_bytes"`
|
|
SessionCountVSCode int64 `db:"session_count_vscode" json:"session_count_vscode"`
|
|
SessionCountSSH int64 `db:"session_count_ssh" json:"session_count_ssh"`
|
|
SessionCountJetBrains int64 `db:"session_count_jetbrains" json:"session_count_jetbrains"`
|
|
SessionCountReconnectingPTY int64 `db:"session_count_reconnecting_pty" json:"session_count_reconnecting_pty"`
|
|
ConnectionCount int64 `db:"connection_count" json:"connection_count"`
|
|
ConnectionMedianLatencyMS float64 `db:"connection_median_latency_ms" json:"connection_median_latency_ms"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAt time.Time) ([]GetWorkspaceAgentStatsAndLabelsRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAgentStatsAndLabels, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetWorkspaceAgentStatsAndLabelsRow
|
|
for rows.Next() {
|
|
var i GetWorkspaceAgentStatsAndLabelsRow
|
|
if err := rows.Scan(
|
|
&i.Username,
|
|
&i.AgentName,
|
|
&i.WorkspaceName,
|
|
&i.RxBytes,
|
|
&i.TxBytes,
|
|
&i.SessionCountVSCode,
|
|
&i.SessionCountSSH,
|
|
&i.SessionCountJetBrains,
|
|
&i.SessionCountReconnectingPTY,
|
|
&i.ConnectionCount,
|
|
&i.ConnectionMedianLatencyMS,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceAgentStat = `-- name: InsertWorkspaceAgentStat :one
|
|
INSERT INTO
|
|
workspace_agent_stats (
|
|
id,
|
|
created_at,
|
|
user_id,
|
|
workspace_id,
|
|
template_id,
|
|
agent_id,
|
|
connections_by_proto,
|
|
connection_count,
|
|
rx_packets,
|
|
rx_bytes,
|
|
tx_packets,
|
|
tx_bytes,
|
|
session_count_vscode,
|
|
session_count_jetbrains,
|
|
session_count_reconnecting_pty,
|
|
session_count_ssh,
|
|
connection_median_latency_ms
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) RETURNING id, created_at, user_id, agent_id, workspace_id, template_id, connections_by_proto, connection_count, rx_packets, rx_bytes, tx_packets, tx_bytes, connection_median_latency_ms, session_count_vscode, session_count_jetbrains, session_count_reconnecting_pty, session_count_ssh
|
|
`
|
|
|
|
type InsertWorkspaceAgentStatParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
|
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
ConnectionsByProto json.RawMessage `db:"connections_by_proto" json:"connections_by_proto"`
|
|
ConnectionCount int64 `db:"connection_count" json:"connection_count"`
|
|
RxPackets int64 `db:"rx_packets" json:"rx_packets"`
|
|
RxBytes int64 `db:"rx_bytes" json:"rx_bytes"`
|
|
TxPackets int64 `db:"tx_packets" json:"tx_packets"`
|
|
TxBytes int64 `db:"tx_bytes" json:"tx_bytes"`
|
|
SessionCountVSCode int64 `db:"session_count_vscode" json:"session_count_vscode"`
|
|
SessionCountJetBrains int64 `db:"session_count_jetbrains" json:"session_count_jetbrains"`
|
|
SessionCountReconnectingPTY int64 `db:"session_count_reconnecting_pty" json:"session_count_reconnecting_pty"`
|
|
SessionCountSSH int64 `db:"session_count_ssh" json:"session_count_ssh"`
|
|
ConnectionMedianLatencyMS float64 `db:"connection_median_latency_ms" json:"connection_median_latency_ms"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceAgentStat(ctx context.Context, arg InsertWorkspaceAgentStatParams) (WorkspaceAgentStat, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceAgentStat,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UserID,
|
|
arg.WorkspaceID,
|
|
arg.TemplateID,
|
|
arg.AgentID,
|
|
arg.ConnectionsByProto,
|
|
arg.ConnectionCount,
|
|
arg.RxPackets,
|
|
arg.RxBytes,
|
|
arg.TxPackets,
|
|
arg.TxBytes,
|
|
arg.SessionCountVSCode,
|
|
arg.SessionCountJetBrains,
|
|
arg.SessionCountReconnectingPTY,
|
|
arg.SessionCountSSH,
|
|
arg.ConnectionMedianLatencyMS,
|
|
)
|
|
var i WorkspaceAgentStat
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UserID,
|
|
&i.AgentID,
|
|
&i.WorkspaceID,
|
|
&i.TemplateID,
|
|
&i.ConnectionsByProto,
|
|
&i.ConnectionCount,
|
|
&i.RxPackets,
|
|
&i.RxBytes,
|
|
&i.TxPackets,
|
|
&i.TxBytes,
|
|
&i.ConnectionMedianLatencyMS,
|
|
&i.SessionCountVSCode,
|
|
&i.SessionCountJetBrains,
|
|
&i.SessionCountReconnectingPTY,
|
|
&i.SessionCountSSH,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceAppByAgentIDAndSlug = `-- name: GetWorkspaceAppByAgentIDAndSlug :one
|
|
SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external FROM workspace_apps WHERE agent_id = $1 AND slug = $2
|
|
`
|
|
|
|
type GetWorkspaceAppByAgentIDAndSlugParams struct {
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
Slug string `db:"slug" json:"slug"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg GetWorkspaceAppByAgentIDAndSlugParams) (WorkspaceApp, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceAppByAgentIDAndSlug, arg.AgentID, arg.Slug)
|
|
var i WorkspaceApp
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.AgentID,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Command,
|
|
&i.Url,
|
|
&i.HealthcheckUrl,
|
|
&i.HealthcheckInterval,
|
|
&i.HealthcheckThreshold,
|
|
&i.Health,
|
|
&i.Subdomain,
|
|
&i.SharingLevel,
|
|
&i.Slug,
|
|
&i.External,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceAppsByAgentID = `-- name: GetWorkspaceAppsByAgentID :many
|
|
SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external FROM workspace_apps WHERE agent_id = $1 ORDER BY slug ASC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]WorkspaceApp, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAppsByAgentID, agentID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceApp
|
|
for rows.Next() {
|
|
var i WorkspaceApp
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.AgentID,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Command,
|
|
&i.Url,
|
|
&i.HealthcheckUrl,
|
|
&i.HealthcheckInterval,
|
|
&i.HealthcheckThreshold,
|
|
&i.Health,
|
|
&i.Subdomain,
|
|
&i.SharingLevel,
|
|
&i.Slug,
|
|
&i.External,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAppsByAgentIDs = `-- name: GetWorkspaceAppsByAgentIDs :many
|
|
SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external FROM workspace_apps WHERE agent_id = ANY($1 :: uuid [ ]) ORDER BY slug ASC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceApp, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAppsByAgentIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceApp
|
|
for rows.Next() {
|
|
var i WorkspaceApp
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.AgentID,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Command,
|
|
&i.Url,
|
|
&i.HealthcheckUrl,
|
|
&i.HealthcheckInterval,
|
|
&i.HealthcheckThreshold,
|
|
&i.Health,
|
|
&i.Subdomain,
|
|
&i.SharingLevel,
|
|
&i.Slug,
|
|
&i.External,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceAppsCreatedAfter = `-- name: GetWorkspaceAppsCreatedAfter :many
|
|
SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external FROM workspace_apps WHERE created_at > $1 ORDER BY slug ASC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceApp, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceAppsCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceApp
|
|
for rows.Next() {
|
|
var i WorkspaceApp
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.AgentID,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Command,
|
|
&i.Url,
|
|
&i.HealthcheckUrl,
|
|
&i.HealthcheckInterval,
|
|
&i.HealthcheckThreshold,
|
|
&i.Health,
|
|
&i.Subdomain,
|
|
&i.SharingLevel,
|
|
&i.Slug,
|
|
&i.External,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceApp = `-- name: InsertWorkspaceApp :one
|
|
INSERT INTO
|
|
workspace_apps (
|
|
id,
|
|
created_at,
|
|
agent_id,
|
|
slug,
|
|
display_name,
|
|
icon,
|
|
command,
|
|
url,
|
|
external,
|
|
subdomain,
|
|
sharing_level,
|
|
healthcheck_url,
|
|
healthcheck_interval,
|
|
healthcheck_threshold,
|
|
health
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) RETURNING id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external
|
|
`
|
|
|
|
type InsertWorkspaceAppParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
AgentID uuid.UUID `db:"agent_id" json:"agent_id"`
|
|
Slug string `db:"slug" json:"slug"`
|
|
DisplayName string `db:"display_name" json:"display_name"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
Command sql.NullString `db:"command" json:"command"`
|
|
Url sql.NullString `db:"url" json:"url"`
|
|
External bool `db:"external" json:"external"`
|
|
Subdomain bool `db:"subdomain" json:"subdomain"`
|
|
SharingLevel AppSharingLevel `db:"sharing_level" json:"sharing_level"`
|
|
HealthcheckUrl string `db:"healthcheck_url" json:"healthcheck_url"`
|
|
HealthcheckInterval int32 `db:"healthcheck_interval" json:"healthcheck_interval"`
|
|
HealthcheckThreshold int32 `db:"healthcheck_threshold" json:"healthcheck_threshold"`
|
|
Health WorkspaceAppHealth `db:"health" json:"health"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceApp(ctx context.Context, arg InsertWorkspaceAppParams) (WorkspaceApp, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceApp,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.AgentID,
|
|
arg.Slug,
|
|
arg.DisplayName,
|
|
arg.Icon,
|
|
arg.Command,
|
|
arg.Url,
|
|
arg.External,
|
|
arg.Subdomain,
|
|
arg.SharingLevel,
|
|
arg.HealthcheckUrl,
|
|
arg.HealthcheckInterval,
|
|
arg.HealthcheckThreshold,
|
|
arg.Health,
|
|
)
|
|
var i WorkspaceApp
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.AgentID,
|
|
&i.DisplayName,
|
|
&i.Icon,
|
|
&i.Command,
|
|
&i.Url,
|
|
&i.HealthcheckUrl,
|
|
&i.HealthcheckInterval,
|
|
&i.HealthcheckThreshold,
|
|
&i.Health,
|
|
&i.Subdomain,
|
|
&i.SharingLevel,
|
|
&i.Slug,
|
|
&i.External,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceAppHealthByID = `-- name: UpdateWorkspaceAppHealthByID :exec
|
|
UPDATE
|
|
workspace_apps
|
|
SET
|
|
health = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAppHealthByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Health WorkspaceAppHealth `db:"health" json:"health"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAppHealthByID(ctx context.Context, arg UpdateWorkspaceAppHealthByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAppHealthByID, arg.ID, arg.Health)
|
|
return err
|
|
}
|
|
|
|
const getWorkspaceBuildParameters = `-- name: GetWorkspaceBuildParameters :many
|
|
SELECT
|
|
workspace_build_id, name, value
|
|
FROM
|
|
workspace_build_parameters
|
|
WHERE
|
|
workspace_build_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildParameters(ctx context.Context, workspaceBuildID uuid.UUID) ([]WorkspaceBuildParameter, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceBuildParameters, workspaceBuildID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceBuildParameter
|
|
for rows.Next() {
|
|
var i WorkspaceBuildParameter
|
|
if err := rows.Scan(&i.WorkspaceBuildID, &i.Name, &i.Value); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceBuildParameters = `-- name: InsertWorkspaceBuildParameters :exec
|
|
INSERT INTO
|
|
workspace_build_parameters (workspace_build_id, name, value)
|
|
SELECT
|
|
$1 :: uuid AS workspace_build_id,
|
|
unnest($2 :: text[]) AS name,
|
|
unnest($3 :: text[]) AS value
|
|
RETURNING workspace_build_id, name, value
|
|
`
|
|
|
|
type InsertWorkspaceBuildParametersParams struct {
|
|
WorkspaceBuildID uuid.UUID `db:"workspace_build_id" json:"workspace_build_id"`
|
|
Name []string `db:"name" json:"name"`
|
|
Value []string `db:"value" json:"value"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceBuildParameters(ctx context.Context, arg InsertWorkspaceBuildParametersParams) error {
|
|
_, err := q.db.ExecContext(ctx, insertWorkspaceBuildParameters, arg.WorkspaceBuildID, pq.Array(arg.Name), pq.Array(arg.Value))
|
|
return err
|
|
}
|
|
|
|
const getLatestWorkspaceBuildByWorkspaceID = `-- name: GetLatestWorkspaceBuildByWorkspaceID :one
|
|
SELECT
|
|
id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_id = $1
|
|
ORDER BY
|
|
build_number desc
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, getLatestWorkspaceBuildByWorkspaceID, workspaceID)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getLatestWorkspaceBuilds = `-- name: GetLatestWorkspaceBuilds :many
|
|
SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline
|
|
FROM (
|
|
SELECT
|
|
workspace_id, MAX(build_number) as max_build_number
|
|
FROM
|
|
workspace_builds
|
|
GROUP BY
|
|
workspace_id
|
|
) m
|
|
JOIN
|
|
workspace_builds wb
|
|
ON m.workspace_id = wb.workspace_id AND m.max_build_number = wb.build_number
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLatestWorkspaceBuilds(ctx context.Context) ([]WorkspaceBuild, error) {
|
|
rows, err := q.db.QueryContext(ctx, getLatestWorkspaceBuilds)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceBuild
|
|
for rows.Next() {
|
|
var i WorkspaceBuild
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getLatestWorkspaceBuildsByWorkspaceIDs = `-- name: GetLatestWorkspaceBuildsByWorkspaceIDs :many
|
|
SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline
|
|
FROM (
|
|
SELECT
|
|
workspace_id, MAX(build_number) as max_build_number
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_id = ANY($1 :: uuid [ ])
|
|
GROUP BY
|
|
workspace_id
|
|
) m
|
|
JOIN
|
|
workspace_builds wb
|
|
ON m.workspace_id = wb.workspace_id AND m.max_build_number = wb.build_number
|
|
`
|
|
|
|
func (q *sqlQuerier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceBuild, error) {
|
|
rows, err := q.db.QueryContext(ctx, getLatestWorkspaceBuildsByWorkspaceIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceBuild
|
|
for rows.Next() {
|
|
var i WorkspaceBuild
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceBuildByID = `-- name: GetWorkspaceBuildByID :one
|
|
SELECT
|
|
id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceBuildByID, id)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceBuildByJobID = `-- name: GetWorkspaceBuildByJobID :one
|
|
SELECT
|
|
id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
job_id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UUID) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceBuildByJobID, jobID)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceBuildByWorkspaceIDAndBuildNumber = `-- name: GetWorkspaceBuildByWorkspaceIDAndBuildNumber :one
|
|
SELECT
|
|
id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_id = $1
|
|
AND build_number = $2
|
|
`
|
|
|
|
type GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams struct {
|
|
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
|
|
BuildNumber int32 `db:"build_number" json:"build_number"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Context, arg GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceBuildByWorkspaceIDAndBuildNumber, arg.WorkspaceID, arg.BuildNumber)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceBuildsByWorkspaceID = `-- name: GetWorkspaceBuildsByWorkspaceID :many
|
|
SELECT
|
|
id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_builds.workspace_id = $1
|
|
AND workspace_builds.created_at > $2
|
|
AND CASE
|
|
-- This allows using the last element on a page as effectively a cursor.
|
|
-- This is an important option for scripts that need to paginate without
|
|
-- duplicating or missing data.
|
|
WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN (
|
|
-- The pagination cursor is the last ID of the previous page.
|
|
-- The query is ordered by the build_number field, so select all
|
|
-- rows after the cursor.
|
|
build_number > (
|
|
SELECT
|
|
build_number
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
id = $3
|
|
)
|
|
)
|
|
ELSE true
|
|
END
|
|
ORDER BY
|
|
build_number desc OFFSET $4
|
|
LIMIT
|
|
-- A null limit means "no limit", so 0 means return all
|
|
NULLIF($5 :: int, 0)
|
|
`
|
|
|
|
type GetWorkspaceBuildsByWorkspaceIDParams struct {
|
|
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
|
|
Since time.Time `db:"since" json:"since"`
|
|
AfterID uuid.UUID `db:"after_id" json:"after_id"`
|
|
OffsetOpt int32 `db:"offset_opt" json:"offset_opt"`
|
|
LimitOpt int32 `db:"limit_opt" json:"limit_opt"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg GetWorkspaceBuildsByWorkspaceIDParams) ([]WorkspaceBuild, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceBuildsByWorkspaceID,
|
|
arg.WorkspaceID,
|
|
arg.Since,
|
|
arg.AfterID,
|
|
arg.OffsetOpt,
|
|
arg.LimitOpt,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceBuild
|
|
for rows.Next() {
|
|
var i WorkspaceBuild
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceBuildsCreatedAfter = `-- name: GetWorkspaceBuildsCreatedAfter :many
|
|
SELECT id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline FROM workspace_builds WHERE created_at > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceBuild, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceBuildsCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceBuild
|
|
for rows.Next() {
|
|
var i WorkspaceBuild
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceBuild = `-- name: InsertWorkspaceBuild :one
|
|
INSERT INTO
|
|
workspace_builds (
|
|
id,
|
|
created_at,
|
|
updated_at,
|
|
workspace_id,
|
|
template_version_id,
|
|
"build_number",
|
|
transition,
|
|
initiator_id,
|
|
job_id,
|
|
provisioner_state,
|
|
deadline,
|
|
max_deadline,
|
|
reason
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) RETURNING id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
`
|
|
|
|
type InsertWorkspaceBuildParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
|
|
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
|
|
BuildNumber int32 `db:"build_number" json:"build_number"`
|
|
Transition WorkspaceTransition `db:"transition" json:"transition"`
|
|
InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"`
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
ProvisionerState []byte `db:"provisioner_state" json:"provisioner_state"`
|
|
Deadline time.Time `db:"deadline" json:"deadline"`
|
|
MaxDeadline time.Time `db:"max_deadline" json:"max_deadline"`
|
|
Reason BuildReason `db:"reason" json:"reason"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceBuild(ctx context.Context, arg InsertWorkspaceBuildParams) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceBuild,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.WorkspaceID,
|
|
arg.TemplateVersionID,
|
|
arg.BuildNumber,
|
|
arg.Transition,
|
|
arg.InitiatorID,
|
|
arg.JobID,
|
|
arg.ProvisionerState,
|
|
arg.Deadline,
|
|
arg.MaxDeadline,
|
|
arg.Reason,
|
|
)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceBuildByID = `-- name: UpdateWorkspaceBuildByID :one
|
|
UPDATE
|
|
workspace_builds
|
|
SET
|
|
updated_at = $2,
|
|
provisioner_state = $3,
|
|
deadline = $4,
|
|
max_deadline = $5
|
|
WHERE
|
|
id = $1 RETURNING id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
`
|
|
|
|
type UpdateWorkspaceBuildByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
ProvisionerState []byte `db:"provisioner_state" json:"provisioner_state"`
|
|
Deadline time.Time `db:"deadline" json:"deadline"`
|
|
MaxDeadline time.Time `db:"max_deadline" json:"max_deadline"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceBuildByID(ctx context.Context, arg UpdateWorkspaceBuildByIDParams) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, updateWorkspaceBuildByID,
|
|
arg.ID,
|
|
arg.UpdatedAt,
|
|
arg.ProvisionerState,
|
|
arg.Deadline,
|
|
arg.MaxDeadline,
|
|
)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceBuildCostByID = `-- name: UpdateWorkspaceBuildCostByID :one
|
|
UPDATE
|
|
workspace_builds
|
|
SET
|
|
daily_cost = $2
|
|
WHERE
|
|
id = $1 RETURNING id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline
|
|
`
|
|
|
|
type UpdateWorkspaceBuildCostByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
DailyCost int32 `db:"daily_cost" json:"daily_cost"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceBuildCostByID(ctx context.Context, arg UpdateWorkspaceBuildCostByIDParams) (WorkspaceBuild, error) {
|
|
row := q.db.QueryRowContext(ctx, updateWorkspaceBuildCostByID, arg.ID, arg.DailyCost)
|
|
var i WorkspaceBuild
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.WorkspaceID,
|
|
&i.TemplateVersionID,
|
|
&i.BuildNumber,
|
|
&i.Transition,
|
|
&i.InitiatorID,
|
|
&i.ProvisionerState,
|
|
&i.JobID,
|
|
&i.Deadline,
|
|
&i.Reason,
|
|
&i.DailyCost,
|
|
&i.MaxDeadline,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceResourceByID = `-- name: GetWorkspaceResourceByID :one
|
|
SELECT
|
|
id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost
|
|
FROM
|
|
workspace_resources
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (WorkspaceResource, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceResourceByID, id)
|
|
var i WorkspaceResource
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Transition,
|
|
&i.Type,
|
|
&i.Name,
|
|
&i.Hide,
|
|
&i.Icon,
|
|
&i.InstanceType,
|
|
&i.DailyCost,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceResourceMetadataByResourceIDs = `-- name: GetWorkspaceResourceMetadataByResourceIDs :many
|
|
SELECT
|
|
workspace_resource_id, key, value, sensitive, id
|
|
FROM
|
|
workspace_resource_metadata
|
|
WHERE
|
|
workspace_resource_id = ANY($1 :: uuid [ ]) ORDER BY id ASC
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceResourceMetadatum, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceResourceMetadataByResourceIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResourceMetadatum
|
|
for rows.Next() {
|
|
var i WorkspaceResourceMetadatum
|
|
if err := rows.Scan(
|
|
&i.WorkspaceResourceID,
|
|
&i.Key,
|
|
&i.Value,
|
|
&i.Sensitive,
|
|
&i.ID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceResourceMetadataCreatedAfter = `-- name: GetWorkspaceResourceMetadataCreatedAfter :many
|
|
SELECT workspace_resource_id, key, value, sensitive, id FROM workspace_resource_metadata WHERE workspace_resource_id = ANY(
|
|
SELECT id FROM workspace_resources WHERE created_at > $1
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceResourceMetadatum, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceResourceMetadataCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResourceMetadatum
|
|
for rows.Next() {
|
|
var i WorkspaceResourceMetadatum
|
|
if err := rows.Scan(
|
|
&i.WorkspaceResourceID,
|
|
&i.Key,
|
|
&i.Value,
|
|
&i.Sensitive,
|
|
&i.ID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceResourcesByJobID = `-- name: GetWorkspaceResourcesByJobID :many
|
|
SELECT
|
|
id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost
|
|
FROM
|
|
workspace_resources
|
|
WHERE
|
|
job_id = $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]WorkspaceResource, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceResourcesByJobID, jobID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResource
|
|
for rows.Next() {
|
|
var i WorkspaceResource
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Transition,
|
|
&i.Type,
|
|
&i.Name,
|
|
&i.Hide,
|
|
&i.Icon,
|
|
&i.InstanceType,
|
|
&i.DailyCost,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceResourcesByJobIDs = `-- name: GetWorkspaceResourcesByJobIDs :many
|
|
SELECT
|
|
id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost
|
|
FROM
|
|
workspace_resources
|
|
WHERE
|
|
job_id = ANY($1 :: uuid [ ])
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceResource, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceResourcesByJobIDs, pq.Array(ids))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResource
|
|
for rows.Next() {
|
|
var i WorkspaceResource
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Transition,
|
|
&i.Type,
|
|
&i.Name,
|
|
&i.Hide,
|
|
&i.Icon,
|
|
&i.InstanceType,
|
|
&i.DailyCost,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspaceResourcesCreatedAfter = `-- name: GetWorkspaceResourcesCreatedAfter :many
|
|
SELECT id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost FROM workspace_resources WHERE created_at > $1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceResource, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaceResourcesCreatedAfter, createdAt)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResource
|
|
for rows.Next() {
|
|
var i WorkspaceResource
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Transition,
|
|
&i.Type,
|
|
&i.Name,
|
|
&i.Hide,
|
|
&i.Icon,
|
|
&i.InstanceType,
|
|
&i.DailyCost,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspaceResource = `-- name: InsertWorkspaceResource :one
|
|
INSERT INTO
|
|
workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, created_at, job_id, transition, type, name, hide, icon, instance_type, daily_cost
|
|
`
|
|
|
|
type InsertWorkspaceResourceParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
|
Transition WorkspaceTransition `db:"transition" json:"transition"`
|
|
Type string `db:"type" json:"type"`
|
|
Name string `db:"name" json:"name"`
|
|
Hide bool `db:"hide" json:"hide"`
|
|
Icon string `db:"icon" json:"icon"`
|
|
InstanceType sql.NullString `db:"instance_type" json:"instance_type"`
|
|
DailyCost int32 `db:"daily_cost" json:"daily_cost"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceResource(ctx context.Context, arg InsertWorkspaceResourceParams) (WorkspaceResource, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspaceResource,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.JobID,
|
|
arg.Transition,
|
|
arg.Type,
|
|
arg.Name,
|
|
arg.Hide,
|
|
arg.Icon,
|
|
arg.InstanceType,
|
|
arg.DailyCost,
|
|
)
|
|
var i WorkspaceResource
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.JobID,
|
|
&i.Transition,
|
|
&i.Type,
|
|
&i.Name,
|
|
&i.Hide,
|
|
&i.Icon,
|
|
&i.InstanceType,
|
|
&i.DailyCost,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const insertWorkspaceResourceMetadata = `-- name: InsertWorkspaceResourceMetadata :many
|
|
INSERT INTO
|
|
workspace_resource_metadata
|
|
SELECT
|
|
$1 :: uuid AS workspace_resource_id,
|
|
unnest($2 :: text [ ]) AS key,
|
|
unnest($3 :: text [ ]) AS value,
|
|
unnest($4 :: boolean [ ]) AS sensitive RETURNING workspace_resource_id, key, value, sensitive, id
|
|
`
|
|
|
|
type InsertWorkspaceResourceMetadataParams struct {
|
|
WorkspaceResourceID uuid.UUID `db:"workspace_resource_id" json:"workspace_resource_id"`
|
|
Key []string `db:"key" json:"key"`
|
|
Value []string `db:"value" json:"value"`
|
|
Sensitive []bool `db:"sensitive" json:"sensitive"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspaceResourceMetadata(ctx context.Context, arg InsertWorkspaceResourceMetadataParams) ([]WorkspaceResourceMetadatum, error) {
|
|
rows, err := q.db.QueryContext(ctx, insertWorkspaceResourceMetadata,
|
|
arg.WorkspaceResourceID,
|
|
pq.Array(arg.Key),
|
|
pq.Array(arg.Value),
|
|
pq.Array(arg.Sensitive),
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []WorkspaceResourceMetadatum
|
|
for rows.Next() {
|
|
var i WorkspaceResourceMetadatum
|
|
if err := rows.Scan(
|
|
&i.WorkspaceResourceID,
|
|
&i.Key,
|
|
&i.Value,
|
|
&i.Sensitive,
|
|
&i.ID,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getDeploymentWorkspaceStats = `-- name: GetDeploymentWorkspaceStats :one
|
|
WITH workspaces_with_jobs AS (
|
|
SELECT
|
|
latest_build.transition, latest_build.provisioner_job_id, latest_build.started_at, latest_build.updated_at, latest_build.canceled_at, latest_build.completed_at, latest_build.error FROM workspaces
|
|
LEFT JOIN LATERAL (
|
|
SELECT
|
|
workspace_builds.transition,
|
|
provisioner_jobs.id AS provisioner_job_id,
|
|
provisioner_jobs.started_at,
|
|
provisioner_jobs.updated_at,
|
|
provisioner_jobs.canceled_at,
|
|
provisioner_jobs.completed_at,
|
|
provisioner_jobs.error
|
|
FROM
|
|
workspace_builds
|
|
LEFT JOIN
|
|
provisioner_jobs
|
|
ON
|
|
provisioner_jobs.id = workspace_builds.job_id
|
|
WHERE
|
|
workspace_builds.workspace_id = workspaces.id
|
|
ORDER BY
|
|
build_number DESC
|
|
LIMIT
|
|
1
|
|
) latest_build ON TRUE WHERE deleted = false
|
|
), pending_workspaces AS (
|
|
SELECT COUNT(*) AS count FROM workspaces_with_jobs WHERE
|
|
started_at IS NULL
|
|
), building_workspaces AS (
|
|
SELECT COUNT(*) AS count FROM workspaces_with_jobs WHERE
|
|
started_at IS NOT NULL AND
|
|
canceled_at IS NULL AND
|
|
completed_at IS NULL AND
|
|
updated_at - INTERVAL '30 seconds' < NOW()
|
|
), running_workspaces AS (
|
|
SELECT COUNT(*) AS count FROM workspaces_with_jobs WHERE
|
|
completed_at IS NOT NULL AND
|
|
canceled_at IS NULL AND
|
|
error IS NULL AND
|
|
transition = 'start'::workspace_transition
|
|
), failed_workspaces AS (
|
|
SELECT COUNT(*) AS count FROM workspaces_with_jobs WHERE
|
|
(canceled_at IS NOT NULL AND
|
|
error IS NOT NULL) OR
|
|
(completed_at IS NOT NULL AND
|
|
error IS NOT NULL)
|
|
), stopped_workspaces AS (
|
|
SELECT COUNT(*) AS count FROM workspaces_with_jobs WHERE
|
|
completed_at IS NOT NULL AND
|
|
canceled_at IS NULL AND
|
|
error IS NULL AND
|
|
transition = 'stop'::workspace_transition
|
|
)
|
|
SELECT
|
|
pending_workspaces.count AS pending_workspaces,
|
|
building_workspaces.count AS building_workspaces,
|
|
running_workspaces.count AS running_workspaces,
|
|
failed_workspaces.count AS failed_workspaces,
|
|
stopped_workspaces.count AS stopped_workspaces
|
|
FROM pending_workspaces, building_workspaces, running_workspaces, failed_workspaces, stopped_workspaces
|
|
`
|
|
|
|
type GetDeploymentWorkspaceStatsRow struct {
|
|
PendingWorkspaces int64 `db:"pending_workspaces" json:"pending_workspaces"`
|
|
BuildingWorkspaces int64 `db:"building_workspaces" json:"building_workspaces"`
|
|
RunningWorkspaces int64 `db:"running_workspaces" json:"running_workspaces"`
|
|
FailedWorkspaces int64 `db:"failed_workspaces" json:"failed_workspaces"`
|
|
StoppedWorkspaces int64 `db:"stopped_workspaces" json:"stopped_workspaces"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetDeploymentWorkspaceStats(ctx context.Context) (GetDeploymentWorkspaceStatsRow, error) {
|
|
row := q.db.QueryRowContext(ctx, getDeploymentWorkspaceStats)
|
|
var i GetDeploymentWorkspaceStatsRow
|
|
err := row.Scan(
|
|
&i.PendingWorkspaces,
|
|
&i.BuildingWorkspaces,
|
|
&i.RunningWorkspaces,
|
|
&i.FailedWorkspaces,
|
|
&i.StoppedWorkspaces,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceByAgentID = `-- name: GetWorkspaceByAgentID :one
|
|
SELECT
|
|
id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
FROM
|
|
workspaces
|
|
WHERE
|
|
workspaces.id = (
|
|
SELECT
|
|
workspace_id
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_builds.job_id = (
|
|
SELECT
|
|
job_id
|
|
FROM
|
|
workspace_resources
|
|
WHERE
|
|
workspace_resources.id = (
|
|
SELECT
|
|
resource_id
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
workspace_agents.id = $1
|
|
)
|
|
)
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceByAgentID, agentID)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceByID = `-- name: GetWorkspaceByID :one
|
|
SELECT
|
|
id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
FROM
|
|
workspaces
|
|
WHERE
|
|
id = $1
|
|
LIMIT
|
|
1
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceByID, id)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceByOwnerIDAndName = `-- name: GetWorkspaceByOwnerIDAndName :one
|
|
SELECT
|
|
id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
FROM
|
|
workspaces
|
|
WHERE
|
|
owner_id = $1
|
|
AND deleted = $2
|
|
AND LOWER("name") = LOWER($3)
|
|
ORDER BY created_at DESC
|
|
`
|
|
|
|
type GetWorkspaceByOwnerIDAndNameParams struct {
|
|
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWorkspaceByOwnerIDAndNameParams) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceByOwnerIDAndName, arg.OwnerID, arg.Deleted, arg.Name)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one
|
|
SELECT
|
|
id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
FROM
|
|
workspaces
|
|
WHERE
|
|
workspaces.id = (
|
|
SELECT
|
|
workspace_id
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_builds.job_id = (
|
|
SELECT
|
|
job_id
|
|
FROM
|
|
workspace_resources
|
|
WHERE
|
|
workspace_resources.id = (
|
|
SELECT
|
|
resource_id
|
|
FROM
|
|
workspace_agents
|
|
WHERE
|
|
workspace_agents.id = (
|
|
SELECT
|
|
agent_id
|
|
FROM
|
|
workspace_apps
|
|
WHERE
|
|
workspace_apps.id = $1
|
|
)
|
|
)
|
|
)
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, getWorkspaceByWorkspaceAppID, workspaceAppID)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const getWorkspaces = `-- name: GetWorkspaces :many
|
|
SELECT
|
|
workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, COUNT(*) OVER () as count
|
|
FROM
|
|
workspaces
|
|
JOIN
|
|
users
|
|
ON
|
|
workspaces.owner_id = users.id
|
|
LEFT JOIN LATERAL (
|
|
SELECT
|
|
workspace_builds.transition,
|
|
provisioner_jobs.id AS provisioner_job_id,
|
|
provisioner_jobs.started_at,
|
|
provisioner_jobs.updated_at,
|
|
provisioner_jobs.canceled_at,
|
|
provisioner_jobs.completed_at,
|
|
provisioner_jobs.error
|
|
FROM
|
|
workspace_builds
|
|
LEFT JOIN
|
|
provisioner_jobs
|
|
ON
|
|
provisioner_jobs.id = workspace_builds.job_id
|
|
WHERE
|
|
workspace_builds.workspace_id = workspaces.id
|
|
ORDER BY
|
|
build_number DESC
|
|
LIMIT
|
|
1
|
|
) latest_build ON TRUE
|
|
WHERE
|
|
-- Optionally include deleted workspaces
|
|
workspaces.deleted = $1
|
|
AND CASE
|
|
WHEN $2 :: text != '' THEN
|
|
CASE
|
|
WHEN $2 = 'pending' THEN
|
|
latest_build.started_at IS NULL
|
|
WHEN $2 = 'starting' THEN
|
|
latest_build.started_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.completed_at IS NULL AND
|
|
latest_build.updated_at - INTERVAL '30 seconds' < NOW() AND
|
|
latest_build.transition = 'start'::workspace_transition
|
|
|
|
WHEN $2 = 'running' THEN
|
|
latest_build.completed_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.error IS NULL AND
|
|
latest_build.transition = 'start'::workspace_transition
|
|
|
|
WHEN $2 = 'stopping' THEN
|
|
latest_build.started_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.completed_at IS NULL AND
|
|
latest_build.updated_at - INTERVAL '30 seconds' < NOW() AND
|
|
latest_build.transition = 'stop'::workspace_transition
|
|
|
|
WHEN $2 = 'stopped' THEN
|
|
latest_build.completed_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.error IS NULL AND
|
|
latest_build.transition = 'stop'::workspace_transition
|
|
|
|
WHEN $2 = 'failed' THEN
|
|
(latest_build.canceled_at IS NOT NULL AND
|
|
latest_build.error IS NOT NULL) OR
|
|
(latest_build.completed_at IS NOT NULL AND
|
|
latest_build.error IS NOT NULL)
|
|
|
|
WHEN $2 = 'canceling' THEN
|
|
latest_build.canceled_at IS NOT NULL AND
|
|
latest_build.completed_at IS NULL
|
|
|
|
WHEN $2 = 'canceled' THEN
|
|
latest_build.canceled_at IS NOT NULL AND
|
|
latest_build.completed_at IS NOT NULL
|
|
|
|
WHEN $2 = 'deleted' THEN
|
|
latest_build.started_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.completed_at IS NOT NULL AND
|
|
latest_build.updated_at - INTERVAL '30 seconds' < NOW() AND
|
|
latest_build.transition = 'delete'::workspace_transition
|
|
|
|
WHEN $2 = 'deleting' THEN
|
|
latest_build.completed_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.error IS NULL AND
|
|
latest_build.transition = 'delete'::workspace_transition
|
|
|
|
ELSE
|
|
true
|
|
END
|
|
ELSE true
|
|
END
|
|
-- Filter by owner_id
|
|
AND CASE
|
|
WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN
|
|
owner_id = $3
|
|
ELSE true
|
|
END
|
|
-- Filter by owner_name
|
|
AND CASE
|
|
WHEN $4 :: text != '' THEN
|
|
owner_id = (SELECT id FROM users WHERE lower(username) = lower($4) AND deleted = false)
|
|
ELSE true
|
|
END
|
|
-- Filter by template_name
|
|
-- There can be more than 1 template with the same name across organizations.
|
|
-- Use the organization filter to restrict to 1 org if needed.
|
|
AND CASE
|
|
WHEN $5 :: text != '' THEN
|
|
template_id = ANY(SELECT id FROM templates WHERE lower(name) = lower($5) AND deleted = false)
|
|
ELSE true
|
|
END
|
|
-- Filter by template_ids
|
|
AND CASE
|
|
WHEN array_length($6 :: uuid[], 1) > 0 THEN
|
|
template_id = ANY($6)
|
|
ELSE true
|
|
END
|
|
-- Filter by name, matching on substring
|
|
AND CASE
|
|
WHEN $7 :: text != '' THEN
|
|
name ILIKE '%' || $7 || '%'
|
|
ELSE true
|
|
END
|
|
-- Filter by agent status
|
|
-- has-agent: is only applicable for workspaces in "start" transition. Stopped and deleted workspaces don't have agents.
|
|
AND CASE
|
|
WHEN $8 :: text != '' THEN
|
|
(
|
|
SELECT COUNT(*)
|
|
FROM
|
|
workspace_resources
|
|
JOIN
|
|
workspace_agents
|
|
ON
|
|
workspace_agents.resource_id = workspace_resources.id
|
|
WHERE
|
|
workspace_resources.job_id = latest_build.provisioner_job_id AND
|
|
latest_build.transition = 'start'::workspace_transition AND
|
|
$8 = (
|
|
CASE
|
|
WHEN workspace_agents.first_connected_at IS NULL THEN
|
|
CASE
|
|
WHEN workspace_agents.connection_timeout_seconds > 0 AND NOW() - workspace_agents.created_at > workspace_agents.connection_timeout_seconds * INTERVAL '1 second' THEN
|
|
'timeout'
|
|
ELSE
|
|
'connecting'
|
|
END
|
|
WHEN workspace_agents.disconnected_at > workspace_agents.last_connected_at THEN
|
|
'disconnected'
|
|
WHEN NOW() - workspace_agents.last_connected_at > INTERVAL '1 second' * $9 :: bigint THEN
|
|
'disconnected'
|
|
WHEN workspace_agents.last_connected_at IS NOT NULL THEN
|
|
'connected'
|
|
ELSE
|
|
NULL
|
|
END
|
|
)
|
|
) > 0
|
|
ELSE true
|
|
END
|
|
-- Authorize Filter clause will be injected below in GetAuthorizedWorkspaces
|
|
-- @authorize_filter
|
|
ORDER BY
|
|
(latest_build.completed_at IS NOT NULL AND
|
|
latest_build.canceled_at IS NULL AND
|
|
latest_build.error IS NULL AND
|
|
latest_build.transition = 'start'::workspace_transition) DESC,
|
|
LOWER(users.username) ASC,
|
|
LOWER(name) ASC
|
|
LIMIT
|
|
CASE
|
|
WHEN $11 :: integer > 0 THEN
|
|
$11
|
|
END
|
|
OFFSET
|
|
$10
|
|
`
|
|
|
|
type GetWorkspacesParams struct {
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
Status string `db:"status" json:"status"`
|
|
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
|
OwnerUsername string `db:"owner_username" json:"owner_username"`
|
|
TemplateName string `db:"template_name" json:"template_name"`
|
|
TemplateIds []uuid.UUID `db:"template_ids" json:"template_ids"`
|
|
Name string `db:"name" json:"name"`
|
|
HasAgent string `db:"has_agent" json:"has_agent"`
|
|
AgentInactiveDisconnectTimeoutSeconds int64 `db:"agent_inactive_disconnect_timeout_seconds" json:"agent_inactive_disconnect_timeout_seconds"`
|
|
Offset int32 `db:"offset_" json:"offset_"`
|
|
Limit int32 `db:"limit_" json:"limit_"`
|
|
}
|
|
|
|
type GetWorkspacesRow struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
Name string `db:"name" json:"name"`
|
|
AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"`
|
|
Ttl sql.NullInt64 `db:"ttl" json:"ttl"`
|
|
LastUsedAt time.Time `db:"last_used_at" json:"last_used_at"`
|
|
Count int64 `db:"count" json:"count"`
|
|
}
|
|
|
|
func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) ([]GetWorkspacesRow, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspaces,
|
|
arg.Deleted,
|
|
arg.Status,
|
|
arg.OwnerID,
|
|
arg.OwnerUsername,
|
|
arg.TemplateName,
|
|
pq.Array(arg.TemplateIds),
|
|
arg.Name,
|
|
arg.HasAgent,
|
|
arg.AgentInactiveDisconnectTimeoutSeconds,
|
|
arg.Offset,
|
|
arg.Limit,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetWorkspacesRow
|
|
for rows.Next() {
|
|
var i GetWorkspacesRow
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
&i.Count,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getWorkspacesEligibleForAutoStartStop = `-- name: GetWorkspacesEligibleForAutoStartStop :many
|
|
SELECT
|
|
workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at
|
|
FROM
|
|
workspaces
|
|
LEFT JOIN
|
|
workspace_builds ON workspace_builds.workspace_id = workspaces.id
|
|
WHERE
|
|
workspace_builds.build_number = (
|
|
SELECT
|
|
MAX(build_number)
|
|
FROM
|
|
workspace_builds
|
|
WHERE
|
|
workspace_builds.workspace_id = workspaces.id
|
|
) AND
|
|
|
|
(
|
|
-- If the workspace build was a start transition, the workspace is
|
|
-- potentially eligible for autostop if it's past the deadline. The
|
|
-- deadline is computed at build time upon success and is bumped based
|
|
-- on activity (up the max deadline if set). We don't need to check
|
|
-- license here since that's done when the values are written to the build.
|
|
(
|
|
workspace_builds.transition = 'start'::workspace_transition AND
|
|
workspace_builds.deadline IS NOT NULL AND
|
|
workspace_builds.deadline < $1 :: timestamptz
|
|
) OR
|
|
|
|
-- If the workspace build was a stop transition, the workspace is
|
|
-- potentially eligible for autostart if it has a schedule set. The
|
|
-- caller must check if the template allows autostart in a license-aware
|
|
-- fashion as we cannot check it here.
|
|
(
|
|
workspace_builds.transition = 'stop'::workspace_transition AND
|
|
workspaces.autostart_schedule IS NOT NULL
|
|
)
|
|
)
|
|
`
|
|
|
|
func (q *sqlQuerier) GetWorkspacesEligibleForAutoStartStop(ctx context.Context, now time.Time) ([]Workspace, error) {
|
|
rows, err := q.db.QueryContext(ctx, getWorkspacesEligibleForAutoStartStop, now)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []Workspace
|
|
for rows.Next() {
|
|
var i Workspace
|
|
if err := rows.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Close(); err != nil {
|
|
return nil, err
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const insertWorkspace = `-- name: InsertWorkspace :one
|
|
INSERT INTO
|
|
workspaces (
|
|
id,
|
|
created_at,
|
|
updated_at,
|
|
owner_id,
|
|
organization_id,
|
|
template_id,
|
|
name,
|
|
autostart_schedule,
|
|
ttl,
|
|
last_used_at
|
|
)
|
|
VALUES
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
`
|
|
|
|
type InsertWorkspaceParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
|
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
|
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
Name string `db:"name" json:"name"`
|
|
AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"`
|
|
Ttl sql.NullInt64 `db:"ttl" json:"ttl"`
|
|
LastUsedAt time.Time `db:"last_used_at" json:"last_used_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) InsertWorkspace(ctx context.Context, arg InsertWorkspaceParams) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, insertWorkspace,
|
|
arg.ID,
|
|
arg.CreatedAt,
|
|
arg.UpdatedAt,
|
|
arg.OwnerID,
|
|
arg.OrganizationID,
|
|
arg.TemplateID,
|
|
arg.Name,
|
|
arg.AutostartSchedule,
|
|
arg.Ttl,
|
|
arg.LastUsedAt,
|
|
)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspace = `-- name: UpdateWorkspace :one
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
name = $2
|
|
WHERE
|
|
id = $1
|
|
AND deleted = false
|
|
RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at
|
|
`
|
|
|
|
type UpdateWorkspaceParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Name string `db:"name" json:"name"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspace(ctx context.Context, arg UpdateWorkspaceParams) (Workspace, error) {
|
|
row := q.db.QueryRowContext(ctx, updateWorkspace, arg.ID, arg.Name)
|
|
var i Workspace
|
|
err := row.Scan(
|
|
&i.ID,
|
|
&i.CreatedAt,
|
|
&i.UpdatedAt,
|
|
&i.OwnerID,
|
|
&i.OrganizationID,
|
|
&i.TemplateID,
|
|
&i.Deleted,
|
|
&i.Name,
|
|
&i.AutostartSchedule,
|
|
&i.Ttl,
|
|
&i.LastUsedAt,
|
|
)
|
|
return i, err
|
|
}
|
|
|
|
const updateWorkspaceAutostart = `-- name: UpdateWorkspaceAutostart :exec
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
autostart_schedule = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceAutostartParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceAutostart(ctx context.Context, arg UpdateWorkspaceAutostartParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceAutostart, arg.ID, arg.AutostartSchedule)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceDeletedByID = `-- name: UpdateWorkspaceDeletedByID :exec
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
deleted = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceDeletedByIDParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Deleted bool `db:"deleted" json:"deleted"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceDeletedByID(ctx context.Context, arg UpdateWorkspaceDeletedByIDParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceDeletedByID, arg.ID, arg.Deleted)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceLastUsedAt = `-- name: UpdateWorkspaceLastUsedAt :exec
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
last_used_at = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceLastUsedAtParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
LastUsedAt time.Time `db:"last_used_at" json:"last_used_at"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceLastUsedAt(ctx context.Context, arg UpdateWorkspaceLastUsedAtParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceLastUsedAt, arg.ID, arg.LastUsedAt)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceTTL = `-- name: UpdateWorkspaceTTL :exec
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
ttl = $2
|
|
WHERE
|
|
id = $1
|
|
`
|
|
|
|
type UpdateWorkspaceTTLParams struct {
|
|
ID uuid.UUID `db:"id" json:"id"`
|
|
Ttl sql.NullInt64 `db:"ttl" json:"ttl"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceTTL(ctx context.Context, arg UpdateWorkspaceTTLParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceTTL, arg.ID, arg.Ttl)
|
|
return err
|
|
}
|
|
|
|
const updateWorkspaceTTLToBeWithinTemplateMax = `-- name: UpdateWorkspaceTTLToBeWithinTemplateMax :exec
|
|
UPDATE
|
|
workspaces
|
|
SET
|
|
ttl = LEAST(ttl, $1::bigint)
|
|
WHERE
|
|
template_id = $2
|
|
-- LEAST() does not pick NULL, so filter it out as we don't want to set a
|
|
-- TTL on the workspace if it's unset.
|
|
--
|
|
-- During build time, the template max TTL will still be used if the
|
|
-- workspace TTL is NULL.
|
|
AND ttl IS NOT NULL
|
|
`
|
|
|
|
type UpdateWorkspaceTTLToBeWithinTemplateMaxParams struct {
|
|
TemplateMaxTTL int64 `db:"template_max_ttl" json:"template_max_ttl"`
|
|
TemplateID uuid.UUID `db:"template_id" json:"template_id"`
|
|
}
|
|
|
|
func (q *sqlQuerier) UpdateWorkspaceTTLToBeWithinTemplateMax(ctx context.Context, arg UpdateWorkspaceTTLToBeWithinTemplateMaxParams) error {
|
|
_, err := q.db.ExecContext(ctx, updateWorkspaceTTLToBeWithinTemplateMax, arg.TemplateMaxTTL, arg.TemplateID)
|
|
return err
|
|
}
|