Queries gettin' craAaaAazy

This commit is contained in:
Kyle Carberry 2024-05-01 00:06:24 +00:00
parent 9d81d7991d
commit 19ad836e01
24 changed files with 1464 additions and 77 deletions

115
coderd/apidoc/docs.go generated
View File

@ -1154,15 +1154,59 @@ const docTemplate = `{
"application/json"
],
"tags": [
"Insights"
"Intel"
],
"summary": "List intel machines",
"operationId": "list-intel-machines",
"parameters": [
{
"type": "integer",
"description": "Page limit",
"name": "limit",
"in": "query"
},
{
"type": "integer",
"description": "Page offset",
"name": "offset",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system against",
"name": "operating_system",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system platform against",
"name": "operating_system_platform",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system version against",
"name": "operating_system_version",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine architecture against",
"name": "architecture",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine instance ID against",
"name": "instance_id",
"in": "query"
}
],
"summary": "Get deployment DAUs",
"operationId": "get-deployment-daus",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.DAUsResponse"
"$ref": "#/definitions/codersdk.IntelMachinesResponse"
}
}
}
@ -9850,6 +9894,69 @@ const docTemplate = `{
"InsightsReportIntervalWeek"
]
},
"codersdk.IntelMachine": {
"type": "object",
"properties": {
"architecture": {
"type": "string"
},
"cpu_cores": {
"type": "integer"
},
"created_at": {
"type": "string",
"format": "date-time"
},
"hostname": {
"type": "string"
},
"id": {
"type": "string",
"format": "uuid"
},
"instance_id": {
"type": "string"
},
"memory_mb_total": {
"type": "integer"
},
"operating_system": {
"type": "string"
},
"operating_system_platform": {
"type": "string"
},
"operating_system_version": {
"type": "string"
},
"organization_id": {
"type": "string",
"format": "uuid"
},
"updated_at": {
"type": "string",
"format": "date-time"
},
"user_id": {
"type": "string",
"format": "uuid"
}
}
},
"codersdk.IntelMachinesResponse": {
"type": "object",
"properties": {
"count": {
"type": "integer"
},
"intel_machines": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.IntelMachine"
}
}
}
},
"codersdk.IssueReconnectingPTYSignedTokenRequest": {
"type": "object",
"required": [

View File

@ -995,14 +995,58 @@
}
],
"produces": ["application/json"],
"tags": ["Insights"],
"summary": "Get deployment DAUs",
"operationId": "get-deployment-daus",
"tags": ["Intel"],
"summary": "List intel machines",
"operationId": "list-intel-machines",
"parameters": [
{
"type": "integer",
"description": "Page limit",
"name": "limit",
"in": "query"
},
{
"type": "integer",
"description": "Page offset",
"name": "offset",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system against",
"name": "operating_system",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system platform against",
"name": "operating_system_platform",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine operating system version against",
"name": "operating_system_version",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine architecture against",
"name": "architecture",
"in": "query"
},
{
"type": "string",
"description": "Regex to match a machine instance ID against",
"name": "instance_id",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.DAUsResponse"
"$ref": "#/definitions/codersdk.IntelMachinesResponse"
}
}
}
@ -8838,6 +8882,69 @@
"InsightsReportIntervalWeek"
]
},
"codersdk.IntelMachine": {
"type": "object",
"properties": {
"architecture": {
"type": "string"
},
"cpu_cores": {
"type": "integer"
},
"created_at": {
"type": "string",
"format": "date-time"
},
"hostname": {
"type": "string"
},
"id": {
"type": "string",
"format": "uuid"
},
"instance_id": {
"type": "string"
},
"memory_mb_total": {
"type": "integer"
},
"operating_system": {
"type": "string"
},
"operating_system_platform": {
"type": "string"
},
"operating_system_version": {
"type": "string"
},
"organization_id": {
"type": "string",
"format": "uuid"
},
"updated_at": {
"type": "string",
"format": "date-time"
},
"user_id": {
"type": "string",
"format": "uuid"
}
}
},
"codersdk.IntelMachinesResponse": {
"type": "object",
"properties": {
"count": {
"type": "integer"
},
"intel_machines": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.IntelMachine"
}
}
}
},
"codersdk.IssueReconnectingPTYSignedTokenRequest": {
"type": "object",
"required": ["agentID", "url"],

View File

@ -90,6 +90,7 @@ func testSQLDB(t testing.TB) *sql.DB {
connection, closeFn, err := dbtestutil.Open()
require.NoError(t, err)
t.Cleanup(closeFn)
t.Logf("pgurl %s", connection)
db, err := sql.Open("postgres", connection)
require.NoError(t, err)

View File

@ -1181,11 +1181,24 @@ func (q *querier) GetIntelCohortsMatchedByMachineIDs(ctx context.Context, ids []
return q.db.GetIntelCohortsMatchedByMachineIDs(ctx, ids)
}
func (q *querier) GetIntelInvocationSummaries(ctx context.Context) ([]database.IntelInvocationSummary, error) {
// No authz checks - it'd be too slow
return q.db.GetIntelInvocationSummaries(ctx)
}
func (q *querier) GetIntelMachinesMatchingFilters(ctx context.Context, arg database.GetIntelMachinesMatchingFiltersParams) ([]database.GetIntelMachinesMatchingFiltersRow, error) {
// No authz checks possible. It's too weird
return q.db.GetIntelMachinesMatchingFilters(ctx, arg)
}
func (q *querier) GetIntelReportCommands(ctx context.Context, startsAt database.GetIntelReportCommandsParams) ([]database.GetIntelReportCommandsRow, error) {
panic("not implemented")
}
func (q *querier) GetIntelReportGitRemotes(ctx context.Context, startsAt database.GetIntelReportGitRemotesParams) ([]database.GetIntelReportGitRemotesRow, error) {
panic("not implemented")
}
func (q *querier) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
if _, err := fetch(q.log, q.auth, q.db.GetWorkspaceByID)(ctx, arg.WorkspaceID); err != nil {
return database.JfrogXrayScan{}, err
@ -3362,6 +3375,10 @@ func (q *querier) UpsertIntelCohort(ctx context.Context, arg database.UpsertInte
return insert(q.log, q.auth, rbac.ResourceGroup.InOrg(arg.OrganizationID), q.db.UpsertIntelCohort)(ctx, arg)
}
func (q *querier) UpsertIntelInvocationSummaries(ctx context.Context) error {
panic("not implemented")
}
func (q *querier) UpsertIntelMachine(ctx context.Context, arg database.UpsertIntelMachineParams) (database.IntelMachine, error) {
// No authz
return q.db.UpsertIntelMachine(ctx, arg)

View File

@ -2442,6 +2442,10 @@ func (q *FakeQuerier) GetIntelCohortsMatchedByMachineIDs(_ context.Context, ids
return rows, nil
}
func (q *FakeQuerier) GetIntelInvocationSummaries(ctx context.Context) ([]database.IntelInvocationSummary, error) {
panic("not implemented")
}
func (q *FakeQuerier) GetIntelMachinesMatchingFilters(_ context.Context, arg database.GetIntelMachinesMatchingFiltersParams) ([]database.GetIntelMachinesMatchingFiltersRow, error) {
err := validateDatabaseType(arg)
if err != nil {
@ -2504,6 +2508,14 @@ func (q *FakeQuerier) GetIntelMachinesMatchingFilters(_ context.Context, arg dat
return machines, nil
}
func (q *FakeQuerier) GetIntelReportCommands(ctx context.Context, startsAt database.GetIntelReportCommandsParams) ([]database.GetIntelReportCommandsRow, error) {
panic("not implemented")
}
func (q *FakeQuerier) GetIntelReportGitRemotes(ctx context.Context, startsAt database.GetIntelReportGitRemotesParams) ([]database.GetIntelReportGitRemotesRow, error) {
panic("not implemented")
}
func (q *FakeQuerier) GetJFrogXrayScanByWorkspaceAndAgentID(_ context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
err := validateDatabaseType(arg)
if err != nil {
@ -6007,6 +6019,7 @@ func (q *FakeQuerier) InsertIntelInvocations(_ context.Context, arg database.Ins
CreatedAt: arg.CreatedAt,
MachineID: arg.MachineID,
UserID: arg.UserID,
BinaryName: arg.BinaryName[i],
BinaryHash: arg.BinaryHash[i],
BinaryPath: arg.BinaryPath[i],
BinaryArgs: binaryArgs[i],
@ -8416,6 +8429,10 @@ func (q *FakeQuerier) UpsertIntelCohort(_ context.Context, arg database.UpsertIn
return cohort, nil
}
func (q *FakeQuerier) UpsertIntelInvocationSummaries(ctx context.Context) error {
panic("not implemented")
}
func (q *FakeQuerier) UpsertIntelMachine(_ context.Context, arg database.UpsertIntelMachineParams) (database.IntelMachine, error) {
err := validateDatabaseType(arg)
if err != nil {

View File

@ -618,6 +618,13 @@ func (m metricsStore) GetIntelCohortsMatchedByMachineIDs(ctx context.Context, id
return r0, r1
}
func (m metricsStore) GetIntelInvocationSummaries(ctx context.Context) ([]database.IntelInvocationSummary, error) {
start := time.Now()
r0, r1 := m.s.GetIntelInvocationSummaries(ctx)
m.queryLatencies.WithLabelValues("GetIntelInvocationSummaries").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m metricsStore) GetIntelMachinesMatchingFilters(ctx context.Context, arg database.GetIntelMachinesMatchingFiltersParams) ([]database.GetIntelMachinesMatchingFiltersRow, error) {
start := time.Now()
r0, r1 := m.s.GetIntelMachinesMatchingFilters(ctx, arg)
@ -625,6 +632,20 @@ func (m metricsStore) GetIntelMachinesMatchingFilters(ctx context.Context, arg d
return r0, r1
}
func (m metricsStore) GetIntelReportCommands(ctx context.Context, startsAt database.GetIntelReportCommandsParams) ([]database.GetIntelReportCommandsRow, error) {
start := time.Now()
r0, r1 := m.s.GetIntelReportCommands(ctx, startsAt)
m.queryLatencies.WithLabelValues("GetIntelReportCommands").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m metricsStore) GetIntelReportGitRemotes(ctx context.Context, startsAt database.GetIntelReportGitRemotesParams) ([]database.GetIntelReportGitRemotesRow, error) {
start := time.Now()
r0, r1 := m.s.GetIntelReportGitRemotes(ctx, startsAt)
m.queryLatencies.WithLabelValues("GetIntelReportGitRemotes").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m metricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
start := time.Now()
r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg)
@ -2214,6 +2235,13 @@ func (m metricsStore) UpsertIntelCohort(ctx context.Context, arg database.Upsert
return r0, r1
}
func (m metricsStore) UpsertIntelInvocationSummaries(ctx context.Context) error {
start := time.Now()
r0 := m.s.UpsertIntelInvocationSummaries(ctx)
m.queryLatencies.WithLabelValues("UpsertIntelInvocationSummaries").Observe(time.Since(start).Seconds())
return r0
}
func (m metricsStore) UpsertIntelMachine(ctx context.Context, arg database.UpsertIntelMachineParams) (database.IntelMachine, error) {
start := time.Now()
r0, r1 := m.s.UpsertIntelMachine(ctx, arg)

View File

@ -1214,6 +1214,21 @@ func (mr *MockStoreMockRecorder) GetIntelCohortsMatchedByMachineIDs(arg0, arg1 a
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntelCohortsMatchedByMachineIDs", reflect.TypeOf((*MockStore)(nil).GetIntelCohortsMatchedByMachineIDs), arg0, arg1)
}
// GetIntelInvocationSummaries mocks base method.
func (m *MockStore) GetIntelInvocationSummaries(arg0 context.Context) ([]database.IntelInvocationSummary, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetIntelInvocationSummaries", arg0)
ret0, _ := ret[0].([]database.IntelInvocationSummary)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetIntelInvocationSummaries indicates an expected call of GetIntelInvocationSummaries.
func (mr *MockStoreMockRecorder) GetIntelInvocationSummaries(arg0 any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntelInvocationSummaries", reflect.TypeOf((*MockStore)(nil).GetIntelInvocationSummaries), arg0)
}
// GetIntelMachinesMatchingFilters mocks base method.
func (m *MockStore) GetIntelMachinesMatchingFilters(arg0 context.Context, arg1 database.GetIntelMachinesMatchingFiltersParams) ([]database.GetIntelMachinesMatchingFiltersRow, error) {
m.ctrl.T.Helper()
@ -1229,6 +1244,36 @@ func (mr *MockStoreMockRecorder) GetIntelMachinesMatchingFilters(arg0, arg1 any)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntelMachinesMatchingFilters", reflect.TypeOf((*MockStore)(nil).GetIntelMachinesMatchingFilters), arg0, arg1)
}
// GetIntelReportCommands mocks base method.
func (m *MockStore) GetIntelReportCommands(arg0 context.Context, arg1 database.GetIntelReportCommandsParams) ([]database.GetIntelReportCommandsRow, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetIntelReportCommands", arg0, arg1)
ret0, _ := ret[0].([]database.GetIntelReportCommandsRow)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetIntelReportCommands indicates an expected call of GetIntelReportCommands.
func (mr *MockStoreMockRecorder) GetIntelReportCommands(arg0, arg1 any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntelReportCommands", reflect.TypeOf((*MockStore)(nil).GetIntelReportCommands), arg0, arg1)
}
// GetIntelReportGitRemotes mocks base method.
func (m *MockStore) GetIntelReportGitRemotes(arg0 context.Context, arg1 database.GetIntelReportGitRemotesParams) ([]database.GetIntelReportGitRemotesRow, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetIntelReportGitRemotes", arg0, arg1)
ret0, _ := ret[0].([]database.GetIntelReportGitRemotesRow)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetIntelReportGitRemotes indicates an expected call of GetIntelReportGitRemotes.
func (mr *MockStoreMockRecorder) GetIntelReportGitRemotes(arg0, arg1 any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntelReportGitRemotes", reflect.TypeOf((*MockStore)(nil).GetIntelReportGitRemotes), arg0, arg1)
}
// GetJFrogXrayScanByWorkspaceAndAgentID mocks base method.
func (m *MockStore) GetJFrogXrayScanByWorkspaceAndAgentID(arg0 context.Context, arg1 database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) {
m.ctrl.T.Helper()
@ -4638,6 +4683,20 @@ func (mr *MockStoreMockRecorder) UpsertIntelCohort(arg0, arg1 any) *gomock.Call
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertIntelCohort", reflect.TypeOf((*MockStore)(nil).UpsertIntelCohort), arg0, arg1)
}
// UpsertIntelInvocationSummaries mocks base method.
func (m *MockStore) UpsertIntelInvocationSummaries(arg0 context.Context) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpsertIntelInvocationSummaries", arg0)
ret0, _ := ret[0].(error)
return ret0
}
// UpsertIntelInvocationSummaries indicates an expected call of UpsertIntelInvocationSummaries.
func (mr *MockStoreMockRecorder) UpsertIntelInvocationSummaries(arg0 any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertIntelInvocationSummaries", reflect.TypeOf((*MockStore)(nil).UpsertIntelInvocationSummaries), arg0)
}
// UpsertIntelMachine mocks base method.
func (m *MockStore) UpsertIntelMachine(arg0 context.Context, arg1 database.UpsertIntelMachineParams) (database.IntelMachine, error) {
m.ctrl.T.Helper()

View File

@ -498,12 +498,29 @@ CREATE TABLE intel_cohorts (
tracked_executables text[] NOT NULL
);
CREATE TABLE intel_invocations (
CREATE TABLE intel_invocation_summaries (
id uuid NOT NULL,
cohort_id uuid NOT NULL,
starts_at timestamp with time zone NOT NULL,
ends_at timestamp with time zone NOT NULL,
binary_name text NOT NULL,
binary_args jsonb NOT NULL,
binary_paths jsonb NOT NULL,
working_directories jsonb NOT NULL,
git_remote_urls jsonb NOT NULL,
exit_codes jsonb NOT NULL,
unique_machines bigint NOT NULL,
total_invocations bigint NOT NULL,
median_duration_ms bigint NOT NULL
);
CREATE UNLOGGED TABLE intel_invocations (
id uuid NOT NULL,
created_at timestamp with time zone NOT NULL,
machine_id uuid NOT NULL,
user_id uuid NOT NULL,
binary_hash text NOT NULL,
binary_name text NOT NULL,
binary_path text NOT NULL,
binary_args jsonb NOT NULL,
binary_version text NOT NULL,
@ -1674,6 +1691,18 @@ CREATE INDEX idx_audit_log_user_id ON audit_logs USING btree (user_id);
CREATE INDEX idx_audit_logs_time_desc ON audit_logs USING btree ("time" DESC);
CREATE INDEX idx_intel_cohorts_id ON intel_cohorts USING btree (id);
CREATE INDEX idx_intel_invocations_binary_args ON intel_invocations USING gin (binary_args);
CREATE INDEX idx_intel_invocations_binary_name ON intel_invocations USING btree (binary_name);
CREATE INDEX idx_intel_invocations_created_at ON intel_invocations USING btree (created_at);
CREATE INDEX idx_intel_invocations_id ON intel_invocations USING btree (id);
CREATE INDEX idx_intel_invocations_machine_id ON intel_invocations USING btree (machine_id);
CREATE INDEX idx_organization_member_organization_id_uuid ON organization_members USING btree (organization_id);
CREATE INDEX idx_organization_member_user_id_uuid ON organization_members USING btree (user_id);
@ -1785,6 +1814,9 @@ ALTER TABLE ONLY group_members
ALTER TABLE ONLY groups
ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY intel_invocation_summaries
ADD CONSTRAINT intel_invocation_summaries_cohort_id_fkey FOREIGN KEY (cohort_id) REFERENCES intel_cohorts(id) ON DELETE CASCADE;
ALTER TABLE ONLY intel_invocations
ADD CONSTRAINT intel_invocations_machine_id_fkey FOREIGN KEY (machine_id) REFERENCES intel_machines(id) ON DELETE CASCADE;

View File

@ -13,6 +13,7 @@ const (
ForeignKeyGroupMembersGroupID ForeignKeyConstraint = "group_members_group_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_group_id_fkey FOREIGN KEY (group_id) REFERENCES groups(id) ON DELETE CASCADE;
ForeignKeyGroupMembersUserID ForeignKeyConstraint = "group_members_user_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyGroupsOrganizationID ForeignKeyConstraint = "groups_organization_id_fkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ForeignKeyIntelInvocationSummariesCohortID ForeignKeyConstraint = "intel_invocation_summaries_cohort_id_fkey" // ALTER TABLE ONLY intel_invocation_summaries ADD CONSTRAINT intel_invocation_summaries_cohort_id_fkey FOREIGN KEY (cohort_id) REFERENCES intel_cohorts(id) ON DELETE CASCADE;
ForeignKeyIntelInvocationsMachineID ForeignKeyConstraint = "intel_invocations_machine_id_fkey" // ALTER TABLE ONLY intel_invocations ADD CONSTRAINT intel_invocations_machine_id_fkey FOREIGN KEY (machine_id) REFERENCES intel_machines(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansAgentID ForeignKeyConstraint = "jfrog_xray_scans_agent_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansWorkspaceID ForeignKeyConstraint = "jfrog_xray_scans_workspace_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE;

View File

@ -18,6 +18,8 @@ CREATE TABLE intel_cohorts (
tracked_executables TEXT[] NOT NULL
);
CREATE INDEX idx_intel_cohorts_id ON intel_cohorts (id);
CREATE TABLE intel_machines (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
created_at TIMESTAMPTZ NOT NULL,
@ -42,12 +44,15 @@ COMMENT ON COLUMN intel_machines.memory_mb_total IS 'in MB';
COMMENT ON COLUMN intel_machines.architecture IS 'GOARCH. e.g. amd64';
COMMENT ON COLUMN intel_machines.daemon_version IS 'Version of the daemon running on the machine';
CREATE TABLE intel_invocations (
-- UNLOGGED because it is extremely update-heavy and the data is not valuable enough to justify
-- the overhead of WAL logging.
CREATE UNLOGGED TABLE intel_invocations (
id uuid NOT NULL,
created_at TIMESTAMPTZ NOT NULL,
machine_id uuid NOT NULL REFERENCES intel_machines(id) ON DELETE CASCADE,
user_id uuid NOT NULL,
binary_hash TEXT NOT NULL,
binary_name TEXT NOT NULL,
binary_path TEXT NOT NULL,
binary_args jsonb NOT NULL,
binary_version TEXT NOT NULL,
@ -56,3 +61,31 @@ CREATE TABLE intel_invocations (
exit_code INT NOT NULL,
duration_ms INT NOT NULL
);
CREATE INDEX idx_intel_invocations_id ON intel_invocations (id);
CREATE INDEX idx_intel_invocations_created_at ON intel_invocations (created_at);
CREATE INDEX idx_intel_invocations_machine_id ON intel_invocations (machine_id);
CREATE INDEX idx_intel_invocations_binary_name ON intel_invocations (binary_name);
CREATE INDEX idx_intel_invocations_binary_args ON intel_invocations USING gin (binary_args);
-- Stores summaries for hour intervals of invocations.
-- There are so many invocations that we need to summarize them to make querying them feasible.
-- Summarize invocations by timestamp and cohort.
-- There are so many invocations that we need to summarize them to make querying feasible.
CREATE TABLE intel_invocation_summaries (
id uuid NOT NULL,
cohort_id uuid NOT NULL REFERENCES intel_cohorts(id) ON DELETE CASCADE,
starts_at TIMESTAMPTZ NOT NULL,
ends_at TIMESTAMPTZ NOT NULL,
binary_name TEXT NOT NULL,
binary_args jsonb NOT NULL,
binary_paths jsonb NOT NULL,
working_directories jsonb NOT NULL,
git_remote_urls jsonb NOT NULL,
exit_codes jsonb NOT NULL,
unique_machines BIGINT NOT NULL,
total_invocations BIGINT NOT NULL,
median_duration_ms BIGINT NOT NULL
);

View File

@ -1870,6 +1870,7 @@ type IntelInvocation struct {
MachineID uuid.UUID `db:"machine_id" json:"machine_id"`
UserID uuid.UUID `db:"user_id" json:"user_id"`
BinaryHash string `db:"binary_hash" json:"binary_hash"`
BinaryName string `db:"binary_name" json:"binary_name"`
BinaryPath string `db:"binary_path" json:"binary_path"`
BinaryArgs json.RawMessage `db:"binary_args" json:"binary_args"`
BinaryVersion string `db:"binary_version" json:"binary_version"`
@ -1879,6 +1880,22 @@ type IntelInvocation struct {
DurationMs int32 `db:"duration_ms" json:"duration_ms"`
}
type IntelInvocationSummary struct {
ID uuid.UUID `db:"id" json:"id"`
CohortID uuid.UUID `db:"cohort_id" json:"cohort_id"`
StartsAt time.Time `db:"starts_at" json:"starts_at"`
EndsAt time.Time `db:"ends_at" json:"ends_at"`
BinaryName string `db:"binary_name" json:"binary_name"`
BinaryArgs json.RawMessage `db:"binary_args" json:"binary_args"`
BinaryPaths json.RawMessage `db:"binary_paths" json:"binary_paths"`
WorkingDirectories json.RawMessage `db:"working_directories" json:"working_directories"`
GitRemoteUrls json.RawMessage `db:"git_remote_urls" json:"git_remote_urls"`
ExitCodes json.RawMessage `db:"exit_codes" json:"exit_codes"`
UniqueMachines int64 `db:"unique_machines" json:"unique_machines"`
TotalInvocations int64 `db:"total_invocations" json:"total_invocations"`
MedianDurationMs int64 `db:"median_duration_ms" json:"median_duration_ms"`
}
type IntelMachine struct {
ID uuid.UUID `db:"id" json:"id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`

View File

@ -132,7 +132,12 @@ type sqlcQuerier interface {
GetIntelCohortsByOrganizationID(ctx context.Context, organizationID uuid.UUID) ([]IntelCohort, error)
// Obtains a list of cohorts that a user can track invocations for.
GetIntelCohortsMatchedByMachineIDs(ctx context.Context, ids []uuid.UUID) ([]GetIntelCohortsMatchedByMachineIDsRow, error)
GetIntelInvocationSummaries(ctx context.Context) ([]IntelInvocationSummary, error)
GetIntelMachinesMatchingFilters(ctx context.Context, arg GetIntelMachinesMatchingFiltersParams) ([]GetIntelMachinesMatchingFiltersRow, error)
GetIntelReportCommands(ctx context.Context, arg GetIntelReportCommandsParams) ([]GetIntelReportCommandsRow, error)
// Get the total amount of time spent invoking commands
// in the directories of a given git remote URL.
GetIntelReportGitRemotes(ctx context.Context, arg GetIntelReportGitRemotesParams) ([]GetIntelReportGitRemotesRow, error)
GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg GetJFrogXrayScanByWorkspaceAndAgentIDParams) (JfrogXrayScan, error)
GetLastUpdateCheck(ctx context.Context) (string, error)
GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (WorkspaceBuild, error)
@ -427,6 +432,7 @@ type sqlcQuerier interface {
UpsertDefaultProxy(ctx context.Context, arg UpsertDefaultProxyParams) error
UpsertHealthSettings(ctx context.Context, value string) error
UpsertIntelCohort(ctx context.Context, arg UpsertIntelCohortParams) (IntelCohort, error)
UpsertIntelInvocationSummaries(ctx context.Context) error
UpsertIntelMachine(ctx context.Context, arg UpsertIntelMachineParams) (IntelMachine, error)
UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error
UpsertLastUpdateCheck(ctx context.Context, value string) error

View File

@ -6,12 +6,15 @@ import (
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"net"
"sort"
"testing"
"time"
"github.com/google/uuid"
"github.com/lib/pq"
"github.com/sqlc-dev/pqtype"
"github.com/stretchr/testify/require"
@ -691,13 +694,53 @@ func TestIntel(t *testing.T) {
db := database.New(sqlDB)
ctx := context.Background()
machine, err := db.UpsertIntelMachine(ctx, database.UpsertIntelMachineParams{
cohort, err := db.UpsertIntelCohort(ctx, database.UpsertIntelCohortParams{
ID: uuid.New(),
CreatedAt: dbtime.Now(),
UpdatedAt: dbtime.Now(),
Name: "cohort",
OrganizationID: uuid.New(),
CreatedBy: uuid.New(),
DisplayName: "cohort",
TrackedExecutables: []string{"go"},
RegexOperatingSystem: ".*",
RegexOperatingSystemVersion: ".*",
RegexOperatingSystemPlatform: ".*",
RegexArchitecture: ".*",
RegexInstanceID: ".*",
})
require.NoError(t, err)
machine1, err := db.UpsertIntelMachine(ctx, database.UpsertIntelMachineParams{
ID: uuid.New(),
CreatedAt: dbtime.Now(),
UpdatedAt: dbtime.Now(),
InstanceID: "some-id",
OrganizationID: uuid.New(),
UserID: uuid.New(),
OrganizationID: cohort.OrganizationID,
UserID: cohort.CreatedBy,
IPAddress: pqtype.Inet{
IPNet: net.IPNet{
IP: net.IPv4(127, 0, 0, 1),
Mask: net.IPv4Mask(255, 255, 255, 255),
},
Valid: true,
},
Hostname: "host",
OperatingSystem: "linux",
CPUCores: 4,
MemoryMBTotal: 16 * 1024,
Architecture: "amd64",
DaemonVersion: "1.0.0",
})
require.NoError(t, err)
machine2, err := db.UpsertIntelMachine(ctx, database.UpsertIntelMachineParams{
ID: uuid.New(),
CreatedAt: dbtime.Now(),
UpdatedAt: dbtime.Now(),
InstanceID: "some-id-2",
OrganizationID: cohort.OrganizationID,
UserID: cohort.CreatedBy,
IPAddress: pqtype.Inet{
IPNet: net.IPNet{
IP: net.IPv4(127, 0, 0, 1),
@ -715,8 +758,8 @@ func TestIntel(t *testing.T) {
require.NoError(t, err)
rows, err := db.GetIntelMachinesMatchingFilters(ctx, database.GetIntelMachinesMatchingFiltersParams{
OrganizationID: machine.OrganizationID,
RegexOperatingSystem: "linux|windows",
OrganizationID: machine1.OrganizationID,
RegexOperatingSystem: ".*",
RegexOperatingSystemVersion: ".*",
RegexOperatingSystemPlatform: ".*",
RegexArchitecture: ".*",
@ -725,21 +768,107 @@ func TestIntel(t *testing.T) {
OffsetOpt: 0,
})
require.NoError(t, err)
require.Len(t, rows, 1)
require.Len(t, rows, 2)
err = db.InsertIntelInvocations(ctx, database.InsertIntelInvocationsParams{
CreatedAt: dbtime.Now(),
MachineID: machine.ID,
UserID: machine.UserID,
ID: []uuid.UUID{uuid.New()},
BinaryHash: []string{"hash"},
BinaryPath: []string{"/go"},
BinaryArgs: json.RawMessage("[[\"some\",\"arg\"]]"),
BinaryVersion: []string{"1"},
WorkingDirectory: []string{"/"},
GitRemoteUrl: []string{""},
ExitCode: []int32{0},
DurationMs: []int32{0},
})
chunkSize := 100
numberOfChunks := 100
for n := 0; n < numberOfChunks; n++ {
i := chunkSize
ids := make([]uuid.UUID, 0)
binaryNames := make([]string, 0, i)
binaryHashes := make([]string, 0, i)
binaryPaths := make([]string, 0, i)
binaryArgs := make([]json.RawMessage, 0, i)
binaryVersions := make([]string, 0, i)
workingDirs := make([]string, 0, i)
gitRemoteURLs := make([]string, 0, i)
exitCodes := make([]int32, 0, i)
durationsMS := make([]int32, 0, i)
for z := 0; z < i; z++ {
ids = append(ids, uuid.New())
binaryNames = append(binaryNames, "go")
binaryHashes = append(binaryHashes, "my-hash")
binaryPaths = append(binaryPaths, "/usr/bin/go")
args := []string{"test"}
workingDir := "/home/coder"
durationMS := int32(15)
if z%2 == 0 {
args = []string{"build"}
if z%3 == 0 {
workingDir = "/home/moo"
}
durationMS = 5
}
argsData, _ := json.Marshal(args)
binaryArgs = append(binaryArgs, argsData)
binaryVersions = append(binaryVersions, "version")
workingDirs = append(workingDirs, workingDir)
gitRemoteURLs = append(gitRemoteURLs, "remote")
exitCodes = append(exitCodes, 0)
durationsMS = append(durationsMS, durationMS)
}
machineID := machine1.ID
if n%2 == 0 {
machineID = machine2.ID
}
binaryArgsData, _ := json.Marshal(binaryArgs)
err = db.InsertIntelInvocations(ctx, database.InsertIntelInvocationsParams{
CreatedAt: dbtime.Now(),
MachineID: machineID,
UserID: machine1.UserID,
ID: ids,
BinaryName: binaryNames,
BinaryHash: binaryHashes,
BinaryPath: binaryPaths,
BinaryArgs: binaryArgsData,
BinaryVersion: binaryVersions,
WorkingDirectory: workingDirs,
GitRemoteUrl: gitRemoteURLs,
ExitCode: exitCodes,
DurationMs: durationsMS,
})
require.NoError(t, err)
t.Logf("inserted %d (%d/%d) invocations", chunkSize, n, numberOfChunks)
}
start := time.Now()
err = db.UpsertIntelInvocationSummaries(ctx)
if err != nil {
var pqErr *pq.Error
if errors.As(err, &pqErr) {
t.Fatalf("failed: %+v", pqErr.Message)
}
}
require.NoError(t, err)
end := time.Now()
fmt.Printf("UpsertIntelInvocationSummaries took %s\n", end.Sub(start))
summaries, err := db.GetIntelInvocationSummaries(ctx)
require.NoError(t, err)
fmt.Printf("Summaries %d\n", len(summaries))
for _, summary := range summaries {
fmt.Printf("%+v\n", summary)
workingDirs := map[string]int{}
err = json.Unmarshal(summary.BinaryPaths, &workingDirs)
require.NoError(t, err)
totalWorkingDirs := int64(0)
for _, count := range workingDirs {
totalWorkingDirs += int64(count)
}
// require.Equal(t, summary.TotalInvocations, totalWorkingDirs)
}
// got, err := db.GetIntelInvocationSummariesByBinaryAndCohort(ctx)
// require.NoError(t, err)
// for _, row := range got {
// fmt.Printf("%+v\n", row)
// }
}

View File

@ -3056,6 +3056,47 @@ func (q *sqlQuerier) GetIntelCohortsMatchedByMachineIDs(ctx context.Context, ids
return items, nil
}
const getIntelInvocationSummaries = `-- name: GetIntelInvocationSummaries :many
SELECT id, cohort_id, starts_at, ends_at, binary_name, binary_args, binary_paths, working_directories, git_remote_urls, exit_codes, unique_machines, total_invocations, median_duration_ms FROM intel_invocation_summaries
`
func (q *sqlQuerier) GetIntelInvocationSummaries(ctx context.Context) ([]IntelInvocationSummary, error) {
rows, err := q.db.QueryContext(ctx, getIntelInvocationSummaries)
if err != nil {
return nil, err
}
defer rows.Close()
var items []IntelInvocationSummary
for rows.Next() {
var i IntelInvocationSummary
if err := rows.Scan(
&i.ID,
&i.CohortID,
&i.StartsAt,
&i.EndsAt,
&i.BinaryName,
&i.BinaryArgs,
&i.BinaryPaths,
&i.WorkingDirectories,
&i.GitRemoteUrls,
&i.ExitCodes,
&i.UniqueMachines,
&i.TotalInvocations,
&i.MedianDurationMs,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getIntelMachinesMatchingFilters = `-- name: GetIntelMachinesMatchingFilters :many
WITH filtered_machines AS (
SELECT
@ -3139,25 +3180,172 @@ func (q *sqlQuerier) GetIntelMachinesMatchingFilters(ctx context.Context, arg Ge
return items, nil
}
const getIntelReportCommands = `-- name: GetIntelReportCommands :many
SELECT
starts_at,
ends_at,
cohort_id,
binary_name,
binary_args,
SUM(total_invocations) AS total_invocations,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY median_duration_ms) AS median_duration_ms,
array_agg(working_directories):: jsonb [] AS aggregated_working_directories,
array_agg(binary_paths):: jsonb [] AS aggregated_binary_paths,
array_agg(git_remote_urls):: jsonb [] AS aggregated_git_remote_urls,
array_agg(exit_codes):: jsonb [] AS aggregated_exit_codes
FROM
intel_invocation_summaries
WHERE
starts_at >= $1
AND
(CARDINALITY($2 :: uuid []) = 0 OR cohort_id = ANY($2 :: uuid []))
GROUP BY
starts_at, ends_at, cohort_id, binary_name, binary_args
`
type GetIntelReportCommandsParams struct {
StartsAt time.Time `db:"starts_at" json:"starts_at"`
CohortIds []uuid.UUID `db:"cohort_ids" json:"cohort_ids"`
}
type GetIntelReportCommandsRow struct {
StartsAt time.Time `db:"starts_at" json:"starts_at"`
EndsAt time.Time `db:"ends_at" json:"ends_at"`
CohortID uuid.UUID `db:"cohort_id" json:"cohort_id"`
BinaryName string `db:"binary_name" json:"binary_name"`
BinaryArgs json.RawMessage `db:"binary_args" json:"binary_args"`
TotalInvocations int64 `db:"total_invocations" json:"total_invocations"`
MedianDurationMs float64 `db:"median_duration_ms" json:"median_duration_ms"`
AggregatedWorkingDirectories []json.RawMessage `db:"aggregated_working_directories" json:"aggregated_working_directories"`
AggregatedBinaryPaths []json.RawMessage `db:"aggregated_binary_paths" json:"aggregated_binary_paths"`
AggregatedGitRemoteUrls []json.RawMessage `db:"aggregated_git_remote_urls" json:"aggregated_git_remote_urls"`
AggregatedExitCodes []json.RawMessage `db:"aggregated_exit_codes" json:"aggregated_exit_codes"`
}
func (q *sqlQuerier) GetIntelReportCommands(ctx context.Context, arg GetIntelReportCommandsParams) ([]GetIntelReportCommandsRow, error) {
rows, err := q.db.QueryContext(ctx, getIntelReportCommands, arg.StartsAt, pq.Array(arg.CohortIds))
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetIntelReportCommandsRow
for rows.Next() {
var i GetIntelReportCommandsRow
if err := rows.Scan(
&i.StartsAt,
&i.EndsAt,
&i.CohortID,
&i.BinaryName,
&i.BinaryArgs,
&i.TotalInvocations,
&i.MedianDurationMs,
pq.Array(&i.AggregatedWorkingDirectories),
pq.Array(&i.AggregatedBinaryPaths),
pq.Array(&i.AggregatedGitRemoteUrls),
pq.Array(&i.AggregatedExitCodes),
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getIntelReportGitRemotes = `-- name: GetIntelReportGitRemotes :many
SELECT
starts_at,
ends_at,
cohort_id,
git_remote_url::text,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY median_duration_ms) AS median_duration_ms,
SUM(total_invocations) AS total_invocations
FROM
intel_invocation_summaries,
LATERAL jsonb_each_text(git_remote_urls) AS git_urls(git_remote_url, invocations)
WHERE
starts_at >= $1
AND
(CARDINALITY($2 :: uuid []) = 0 OR cohort_id = ANY($2 :: uuid []))
GROUP BY
starts_at,
ends_at,
cohort_id,
git_remote_url
`
type GetIntelReportGitRemotesParams struct {
StartsAt time.Time `db:"starts_at" json:"starts_at"`
CohortIds []uuid.UUID `db:"cohort_ids" json:"cohort_ids"`
}
type GetIntelReportGitRemotesRow struct {
StartsAt time.Time `db:"starts_at" json:"starts_at"`
EndsAt time.Time `db:"ends_at" json:"ends_at"`
CohortID uuid.UUID `db:"cohort_id" json:"cohort_id"`
GitRemoteUrl string `db:"git_remote_url" json:"git_remote_url"`
MedianDurationMs float64 `db:"median_duration_ms" json:"median_duration_ms"`
TotalInvocations int64 `db:"total_invocations" json:"total_invocations"`
}
// Get the total amount of time spent invoking commands
// in the directories of a given git remote URL.
func (q *sqlQuerier) GetIntelReportGitRemotes(ctx context.Context, arg GetIntelReportGitRemotesParams) ([]GetIntelReportGitRemotesRow, error) {
rows, err := q.db.QueryContext(ctx, getIntelReportGitRemotes, arg.StartsAt, pq.Array(arg.CohortIds))
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetIntelReportGitRemotesRow
for rows.Next() {
var i GetIntelReportGitRemotesRow
if err := rows.Scan(
&i.StartsAt,
&i.EndsAt,
&i.CohortID,
&i.GitRemoteUrl,
&i.MedianDurationMs,
&i.TotalInvocations,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const insertIntelInvocations = `-- name: InsertIntelInvocations :exec
INSERT INTO intel_invocations (
created_at, machine_id, user_id, id, binary_hash, binary_path, binary_args,
created_at, machine_id, user_id, id, binary_name, binary_hash, binary_path, binary_args,
binary_version, working_directory, git_remote_url, exit_code, duration_ms)
SELECT
$1 :: timestamptz as created_at,
$2 :: uuid as machine_id,
$3 :: uuid as user_id,
unnest($4 :: uuid[ ]) as id,
unnest($5 :: text[ ]) as binary_hash,
unnest($6 :: text[ ]) as binary_path,
unnest($5 :: text[ ]) as binary_name,
unnest($6 :: text[ ]) as binary_hash,
unnest($7 :: text[ ]) as binary_path,
-- This has to be jsonb because PostgreSQL does not support parsing
-- multi-dimensional multi-length arrays!
jsonb_array_elements($7 :: jsonb) as binary_args,
unnest($8 :: text[ ]) as binary_version,
unnest($9 :: text[ ]) as working_directory,
unnest($10 :: text[ ]) as git_remote_url,
unnest($11 :: int [ ]) as exit_code,
unnest($12 :: int[ ]) as duration_ms
jsonb_array_elements($8 :: jsonb) as binary_args,
unnest($9 :: text[ ]) as binary_version,
unnest($10 :: text[ ]) as working_directory,
unnest($11 :: text[ ]) as git_remote_url,
unnest($12 :: int [ ]) as exit_code,
unnest($13 :: int[ ]) as duration_ms
`
type InsertIntelInvocationsParams struct {
@ -3165,6 +3353,7 @@ type InsertIntelInvocationsParams struct {
MachineID uuid.UUID `db:"machine_id" json:"machine_id"`
UserID uuid.UUID `db:"user_id" json:"user_id"`
ID []uuid.UUID `db:"id" json:"id"`
BinaryName []string `db:"binary_name" json:"binary_name"`
BinaryHash []string `db:"binary_hash" json:"binary_hash"`
BinaryPath []string `db:"binary_path" json:"binary_path"`
BinaryArgs json.RawMessage `db:"binary_args" json:"binary_args"`
@ -3182,6 +3371,7 @@ func (q *sqlQuerier) InsertIntelInvocations(ctx context.Context, arg InsertIntel
arg.MachineID,
arg.UserID,
pq.Array(arg.ID),
pq.Array(arg.BinaryName),
pq.Array(arg.BinaryHash),
pq.Array(arg.BinaryPath),
arg.BinaryArgs,
@ -3269,6 +3459,124 @@ func (q *sqlQuerier) UpsertIntelCohort(ctx context.Context, arg UpsertIntelCohor
return i, err
}
const upsertIntelInvocationSummaries = `-- name: UpsertIntelInvocationSummaries :exec
WITH machine_cohorts AS (
SELECT
m.id AS machine_id,
c.id AS cohort_id
FROM intel_machines m
JOIN intel_cohorts c ON
m.operating_system ~ c.regex_operating_system AND
m.operating_system_platform ~ c.regex_operating_system_platform AND
m.operating_system_version ~ c.regex_operating_system_version AND
m.architecture ~ c.regex_architecture AND
m.instance_id ~ c.regex_instance_id
),
invocations_with_cohorts AS (
SELECT
i.id, i.created_at, i.machine_id, i.user_id, i.binary_hash, i.binary_name, i.binary_path, i.binary_args, i.binary_version, i.working_directory, i.git_remote_url, i.exit_code, i.duration_ms,
-- Truncate the created_at timestamp to the nearest 15 minute interval
date_trunc('minute', i.created_at)
- INTERVAL '1 minute' * (EXTRACT(MINUTE FROM i.created_at)::integer % 15) AS truncated_created_at,
mc.cohort_id
FROM intel_invocations i
JOIN machine_cohorts mc ON i.machine_id = mc.machine_id
),
invocation_working_dirs AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
working_directory,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, working_directory
),
invocation_binary_paths AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
binary_path,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, binary_path
),
invocation_git_remote_urls AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
git_remote_url,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, git_remote_url
),
invocation_exit_codes AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
exit_code,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, exit_code
),
aggregated AS (
SELECT
invocations_with_cohorts.truncated_created_at,
invocations_with_cohorts.cohort_id,
invocations_with_cohorts.binary_name,
invocations_with_cohorts.binary_args,
jsonb_object_agg(invocation_working_dirs.working_directory, invocation_working_dirs.count) AS working_directories,
jsonb_object_agg(invocation_git_remote_urls.git_remote_url, invocation_git_remote_urls.count) AS git_remote_urls,
jsonb_object_agg(invocation_exit_codes.exit_code, invocation_exit_codes.count) AS exit_codes,
jsonb_object_agg(invocation_binary_paths.binary_path, invocation_binary_paths.count) AS binary_paths,
COUNT(DISTINCT machine_id) as unique_machines,
COUNT(DISTINCT id) as total_invocations,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY duration_ms) AS median_duration_ms
FROM invocations_with_cohorts
JOIN invocation_working_dirs USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_git_remote_urls USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_exit_codes USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_binary_paths USING (truncated_created_at, cohort_id, binary_name, binary_args)
GROUP BY
invocations_with_cohorts.truncated_created_at,
invocations_with_cohorts.cohort_id,
invocations_with_cohorts.binary_name,
invocations_with_cohorts.binary_args
),
saved AS (
INSERT INTO intel_invocation_summaries (id, cohort_id, starts_at, ends_at, binary_name, binary_args, binary_paths, working_directories, git_remote_urls, exit_codes, unique_machines, total_invocations, median_duration_ms)
SELECT
gen_random_uuid(),
cohort_id,
truncated_created_at,
truncated_created_at + INTERVAL '15 minutes' AS ends_at, -- Add 15 minutes to starts_at
binary_name,
binary_args,
binary_paths,
working_directories,
git_remote_urls,
exit_codes,
unique_machines,
total_invocations,
median_duration_ms
FROM aggregated
)
DELETE FROM intel_invocations
WHERE id IN (SELECT id FROM invocations_with_cohorts)
`
func (q *sqlQuerier) UpsertIntelInvocationSummaries(ctx context.Context) error {
_, err := q.db.ExecContext(ctx, upsertIntelInvocationSummaries)
return err
}
const upsertIntelMachine = `-- name: UpsertIntelMachine :one
INSERT INTO intel_machines (id, created_at, updated_at, instance_id, organization_id, user_id, ip_address, hostname, operating_system, operating_system_platform, operating_system_version, cpu_cores, memory_mb_total, architecture, daemon_version)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)

View File

@ -40,13 +40,14 @@ INSERT INTO intel_machines (id, created_at, updated_at, instance_id, organizatio
-- name: InsertIntelInvocations :exec
-- Insert many invocations using unnest
INSERT INTO intel_invocations (
created_at, machine_id, user_id, id, binary_hash, binary_path, binary_args,
created_at, machine_id, user_id, id, binary_name, binary_hash, binary_path, binary_args,
binary_version, working_directory, git_remote_url, exit_code, duration_ms)
SELECT
@created_at :: timestamptz as created_at,
@machine_id :: uuid as machine_id,
@user_id :: uuid as user_id,
unnest(@id :: uuid[ ]) as id,
unnest(@binary_name :: text[ ]) as binary_name,
unnest(@binary_hash :: text[ ]) as binary_hash,
unnest(@binary_path :: text[ ]) as binary_path,
-- This has to be jsonb because PostgreSQL does not support parsing
@ -102,3 +103,164 @@ GROUP BY
binary_path, binary_args
ORDER BY
median_duration DESC;
-- name: UpsertIntelInvocationSummaries :exec
WITH machine_cohorts AS (
SELECT
m.id AS machine_id,
c.id AS cohort_id
FROM intel_machines m
JOIN intel_cohorts c ON
m.operating_system ~ c.regex_operating_system AND
m.operating_system_platform ~ c.regex_operating_system_platform AND
m.operating_system_version ~ c.regex_operating_system_version AND
m.architecture ~ c.regex_architecture AND
m.instance_id ~ c.regex_instance_id
),
invocations_with_cohorts AS (
SELECT
i.*,
-- Truncate the created_at timestamp to the nearest 15 minute interval
date_trunc('minute', i.created_at)
- INTERVAL '1 minute' * (EXTRACT(MINUTE FROM i.created_at)::integer % 15) AS truncated_created_at,
mc.cohort_id
FROM intel_invocations i
JOIN machine_cohorts mc ON i.machine_id = mc.machine_id
),
invocation_working_dirs AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
working_directory,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, working_directory
),
invocation_binary_paths AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
binary_path,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, binary_path
),
invocation_git_remote_urls AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
git_remote_url,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, git_remote_url
),
invocation_exit_codes AS (
SELECT
truncated_created_at,
cohort_id,
binary_name,
binary_args,
exit_code,
COUNT(*) as count
FROM invocations_with_cohorts
GROUP BY truncated_created_at, cohort_id, binary_name, binary_args, exit_code
),
aggregated AS (
SELECT
invocations_with_cohorts.truncated_created_at,
invocations_with_cohorts.cohort_id,
invocations_with_cohorts.binary_name,
invocations_with_cohorts.binary_args,
jsonb_object_agg(invocation_working_dirs.working_directory, invocation_working_dirs.count) AS working_directories,
jsonb_object_agg(invocation_git_remote_urls.git_remote_url, invocation_git_remote_urls.count) AS git_remote_urls,
jsonb_object_agg(invocation_exit_codes.exit_code, invocation_exit_codes.count) AS exit_codes,
jsonb_object_agg(invocation_binary_paths.binary_path, invocation_binary_paths.count) AS binary_paths,
COUNT(DISTINCT machine_id) as unique_machines,
COUNT(DISTINCT id) as total_invocations,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY duration_ms) AS median_duration_ms
FROM invocations_with_cohorts
JOIN invocation_working_dirs USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_git_remote_urls USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_exit_codes USING (truncated_created_at, cohort_id, binary_name, binary_args)
JOIN invocation_binary_paths USING (truncated_created_at, cohort_id, binary_name, binary_args)
GROUP BY
invocations_with_cohorts.truncated_created_at,
invocations_with_cohorts.cohort_id,
invocations_with_cohorts.binary_name,
invocations_with_cohorts.binary_args
),
saved AS (
INSERT INTO intel_invocation_summaries (id, cohort_id, starts_at, ends_at, binary_name, binary_args, binary_paths, working_directories, git_remote_urls, exit_codes, unique_machines, total_invocations, median_duration_ms)
SELECT
gen_random_uuid(),
cohort_id,
truncated_created_at,
truncated_created_at + INTERVAL '15 minutes' AS ends_at, -- Add 15 minutes to starts_at
binary_name,
binary_args,
binary_paths,
working_directories,
git_remote_urls,
exit_codes,
unique_machines,
total_invocations,
median_duration_ms
FROM aggregated
)
DELETE FROM intel_invocations
WHERE id IN (SELECT id FROM invocations_with_cohorts);
-- name: GetIntelInvocationSummaries :many
SELECT * FROM intel_invocation_summaries;
-- name: GetIntelReportGitRemotes :many
-- Get the total amount of time spent invoking commands
-- in the directories of a given git remote URL.
SELECT
starts_at,
ends_at,
cohort_id,
git_remote_url::text,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY median_duration_ms) AS median_duration_ms,
SUM(total_invocations) AS total_invocations
FROM
intel_invocation_summaries,
LATERAL jsonb_each_text(git_remote_urls) AS git_urls(git_remote_url, invocations)
WHERE
starts_at >= @starts_at
AND
(CARDINALITY(@cohort_ids :: uuid []) = 0 OR cohort_id = ANY(@cohort_ids :: uuid []))
GROUP BY
starts_at,
ends_at,
cohort_id,
git_remote_url;
-- name: GetIntelReportCommands :many
SELECT
starts_at,
ends_at,
cohort_id,
binary_name,
binary_args,
SUM(total_invocations) AS total_invocations,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY median_duration_ms) AS median_duration_ms,
array_agg(working_directories):: jsonb [] AS aggregated_working_directories,
array_agg(binary_paths):: jsonb [] AS aggregated_binary_paths,
array_agg(git_remote_urls):: jsonb [] AS aggregated_git_remote_urls,
array_agg(exit_codes):: jsonb [] AS aggregated_exit_codes
FROM
intel_invocation_summaries
WHERE
starts_at >= @starts_at
AND
(CARDINALITY(@cohort_ids :: uuid []) = 0 OR cohort_id = ANY(@cohort_ids :: uuid []))
GROUP BY
starts_at, ends_at, cohort_id, binary_name, binary_args;

View File

@ -2,14 +2,17 @@ package coderd
import (
"context"
"encoding/json"
"io"
"net"
"net/http"
"strconv"
"time"
"github.com/google/uuid"
"github.com/hashicorp/yamux"
"github.com/sqlc-dev/pqtype"
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
"nhooyr.io/websocket"
"storj.io/drpc/drpcmux"
@ -25,6 +28,174 @@ import (
"github.com/coder/coder/v2/inteld/proto"
)
func (api *API) intelReport(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
var req codersdk.IntelReportRequest
q := r.URL.Query()
rawCohortIDs := q["cohort_id"]
req.CohortIDs = make([]uuid.UUID, 0, len(rawCohortIDs))
for _, rawCohortID := range rawCohortIDs {
cohortID, err := uuid.Parse(rawCohortID)
if err != nil {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid cohort ID.",
Detail: err.Error(),
})
return
}
req.CohortIDs = append(req.CohortIDs, cohortID)
}
var err error
req.StartsAt, err = time.Parse(q.Get("starts_at"), time.DateOnly)
if err != nil {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid starts_at.",
Detail: err.Error(),
})
return
}
var eg errgroup.Group
var report codersdk.IntelReport
eg.Go(func() error {
rows, err := api.Database.GetIntelReportGitRemotes(ctx, database.GetIntelReportGitRemotesParams{
StartsAt: req.StartsAt,
CohortIds: req.CohortIDs,
})
if err != nil {
return err
}
reportByRemote := make(map[string]codersdk.IntelReportGitRemote, len(rows))
for _, row := range rows {
gitRemote, ok := reportByRemote[row.GitRemoteUrl]
if !ok {
var externalAuthConfigID *string
for _, extAuth := range api.ExternalAuthConfigs {
if extAuth.Regex.MatchString(row.GitRemoteUrl) {
externalAuthConfigID = &extAuth.ID
break
}
}
gitRemote = codersdk.IntelReportGitRemote{
URL: row.GitRemoteUrl,
ExternalAuthProviderID: externalAuthConfigID,
}
}
gitRemote.Invocations += row.TotalInvocations
gitRemote.Intervals = append(gitRemote.Intervals, codersdk.IntelReportInvocationInterval{
StartsAt: row.StartsAt,
EndsAt: row.EndsAt,
Invocations: row.TotalInvocations,
MedianDurationMS: row.MedianDurationMs,
CohortID: row.CohortID,
})
reportByRemote[row.GitRemoteUrl] = gitRemote
}
for _, gitRemote := range reportByRemote {
report.GitRemotes = append(report.GitRemotes, gitRemote)
}
return nil
})
eg.Go(func() error {
rows, err := api.Database.GetIntelReportCommands(ctx, database.GetIntelReportCommandsParams{
StartsAt: req.StartsAt,
CohortIds: req.CohortIDs,
})
if err != nil {
return err
}
reportByBinary := make(map[string]codersdk.IntelReportCommand, len(rows))
for _, row := range rows {
// Just index by this for simplicity on lookup.
binaryID := string(append([]byte(row.BinaryName), row.BinaryArgs...))
command, ok := reportByBinary[binaryID]
if !ok {
command = codersdk.IntelReportCommand{
BinaryName: row.BinaryName,
}
err = json.Unmarshal(row.BinaryArgs, &command.BinaryArgs)
if err != nil {
return err
}
}
command.Invocations += row.TotalInvocations
// Merge exit codes
exitCodes := map[string]int64{}
for _, exitCodeRaw := range row.AggregatedExitCodes {
err = json.Unmarshal(exitCodeRaw, &exitCodes)
if err != nil {
return err
}
for exitCodeRaw, invocations := range exitCodes {
exitCode, err := strconv.Atoi(exitCodeRaw)
if err != nil {
return err
}
command.ExitCodes[exitCode] += invocations
}
}
// Merge binary paths
binaryPaths := map[string]int64{}
for _, binaryPathRaw := range row.AggregatedBinaryPaths {
err = json.Unmarshal(binaryPathRaw, &binaryPaths)
if err != nil {
return err
}
for binaryPath, invocations := range binaryPaths {
command.BinaryPaths[binaryPath] += invocations
}
}
// Merge working directories
workingDirectories := map[string]int64{}
for _, workingDirectoryRaw := range row.AggregatedWorkingDirectories {
err = json.Unmarshal(workingDirectoryRaw, &workingDirectories)
if err != nil {
return err
}
for workingDirectory, invocations := range workingDirectories {
command.WorkingDirectories[workingDirectory] += invocations
}
}
// Merge git remote URLs
gitRemoteURLs := map[string]int64{}
for _, gitRemoteURLRaw := range row.AggregatedGitRemoteUrls {
err = json.Unmarshal(gitRemoteURLRaw, &gitRemoteURLs)
if err != nil {
return err
}
for gitRemoteURL, invocations := range gitRemoteURLs {
command.GitRemoteURLs[gitRemoteURL] += invocations
}
}
command.Intervals = append(command.Intervals, codersdk.IntelReportInvocationInterval{
StartsAt: row.StartsAt,
EndsAt: row.EndsAt,
Invocations: row.TotalInvocations,
MedianDurationMS: row.MedianDurationMs,
CohortID: row.CohortID,
})
reportByBinary[binaryID] = command
}
for _, command := range reportByBinary {
report.Commands = append(report.Commands, command)
report.Invocations += command.Invocations
}
return nil
})
err = eg.Wait()
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Internal error getting intel report.",
Detail: err.Error(),
})
return
}
httpapi.Write(ctx, rw, http.StatusOK, report)
}
// intelMachines returns all machines that match the given filters.
//
// @Summary List intel machines

View File

@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"errors"
"path/filepath"
"sync"
"time"
@ -91,6 +92,7 @@ func (s *server) invocationQueueLoop() {
for {
err := s.invocationQueue.startFlushLoop(s.closeContext, func(i []*proto.Invocation) error {
ids := make([]uuid.UUID, 0)
binaryNames := make([]string, 0, len(i))
binaryHashes := make([]string, 0, len(i))
binaryPaths := make([]string, 0, len(i))
binaryArgs := make([]json.RawMessage, 0, len(i))
@ -102,6 +104,8 @@ func (s *server) invocationQueueLoop() {
for _, invocation := range i {
ids = append(ids, uuid.New())
binaryNames = append(binaryNames, filepath.Base(invocation.Executable.Path))
binaryHashes = append(binaryHashes, invocation.Executable.Hash)
binaryPaths = append(binaryPaths, invocation.Executable.Path)
argsData, _ := json.Marshal(invocation.Arguments)
@ -119,6 +123,7 @@ func (s *server) invocationQueueLoop() {
CreatedAt: dbtime.Now(),
MachineID: s.MachineID,
UserID: s.UserID,
BinaryName: binaryNames,
BinaryHash: binaryHashes,
BinaryPath: binaryPaths,
BinaryArgs: binaryArgsData,

View File

@ -197,3 +197,49 @@ func (c *Client) ServeIntelDaemon(ctx context.Context, req ServeIntelDaemonReque
}
return proto.NewDRPCIntelDaemonClient(drpc.MultiplexedConn(session)), nil
}
// IntelReportRequest returns a report of invocations for a cohort.
type IntelReportRequest struct {
StartsAt time.Time `json:"starts_at" format:"date-time"`
// CohortIDs is a list of cohort IDs to report on.
// If empty, all cohorts will be reported on.
CohortIDs []uuid.UUID `json:"cohort_ids"`
}
type IntelReport struct {
Invocations int64 `json:"invocations"`
Commands []IntelReportCommand `json:"commands"`
GitRemotes []IntelReportGitRemote `json:"git_remotes"`
}
// IntelReportInvocationInterval reports the invocation interval for a duration.
type IntelReportInvocationInterval struct {
CohortID uuid.UUID `json:"cohort_id" format:"uuid"`
StartsAt time.Time `json:"starts_at" format:"date-time"`
EndsAt time.Time `json:"ends_at" format:"date-time"`
Invocations int64 `json:"invocations"`
MedianDurationMS float64 `json:"median_duration_ms"`
}
// IntelReportGitRemote reports the Git remote URL execution time
// across all invocations.
type IntelReportGitRemote struct {
URL string `json:"url"`
ExternalAuthProviderID *string `json:"external_auth_provider_id"`
Invocations int64 `json:"invocations"`
Intervals []IntelReportInvocationInterval `json:"intervals"`
}
type IntelReportCommand struct {
BinaryName string `json:"binary_name"`
BinaryArgs []string `json:"binary_args"`
Invocations int64 `json:"invocations"`
Intervals []IntelReportInvocationInterval `json:"intervals"`
// ExitCodes maps exit codes to the number of invocations.
ExitCodes map[int]int64 `json:"exit_codes"`
GitRemoteURLs map[string]int64 `json:"git_remote_urls"`
WorkingDirectories map[string]int64 `json:"working_directories"`
BinaryPaths map[string]int64 `json:"binary_paths"`
}

37
docs/api/insights.md generated
View File

@ -1,42 +1,5 @@
# Insights
## Get deployment DAUs
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/insights/daus \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /insights/daus`
### Example responses
> 200 Response
```json
{
"entries": [
{
"amount": 0,
"date": "string"
}
],
"tz_hour_offset": 0
}
```
### Responses
| Status | Meaning | Description | Schema |
| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------- |
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.DAUsResponse](schemas.md#codersdkdausresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Get insights about templates
### Code samples

61
docs/api/intel.md generated Normal file
View File

@ -0,0 +1,61 @@
# Intel
## List intel machines
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/insights/daus \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /insights/daus`
### Parameters
| Name | In | Type | Required | Description |
| --------------------------- | ----- | ------- | -------- | ---------------------------------------------------------- |
| `limit` | query | integer | false | Page limit |
| `offset` | query | integer | false | Page offset |
| `operating_system` | query | string | false | Regex to match a machine operating system against |
| `operating_system_platform` | query | string | false | Regex to match a machine operating system platform against |
| `operating_system_version` | query | string | false | Regex to match a machine operating system version against |
| `architecture` | query | string | false | Regex to match a machine architecture against |
| `instance_id` | query | string | false | Regex to match a machine instance ID against |
### Example responses
> 200 Response
```json
{
"count": 0,
"intel_machines": [
{
"architecture": "string",
"cpu_cores": 0,
"created_at": "2019-08-24T14:15:22Z",
"hostname": "string",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"instance_id": "string",
"memory_mb_total": 0,
"operating_system": "string",
"operating_system_platform": "string",
"operating_system_version": "string",
"organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
"updated_at": "2019-08-24T14:15:22Z",
"user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
]
}
```
### Responses
| Status | Meaning | Description | Schema |
| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------------- |
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.IntelMachinesResponse](schemas.md#codersdkintelmachinesresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).

70
docs/api/schemas.md generated
View File

@ -3040,6 +3040,76 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
| `day` |
| `week` |
## codersdk.IntelMachine
```json
{
"architecture": "string",
"cpu_cores": 0,
"created_at": "2019-08-24T14:15:22Z",
"hostname": "string",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"instance_id": "string",
"memory_mb_total": 0,
"operating_system": "string",
"operating_system_platform": "string",
"operating_system_version": "string",
"organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
"updated_at": "2019-08-24T14:15:22Z",
"user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
| --------------------------- | ------- | -------- | ------------ | ----------- |
| `architecture` | string | false | | |
| `cpu_cores` | integer | false | | |
| `created_at` | string | false | | |
| `hostname` | string | false | | |
| `id` | string | false | | |
| `instance_id` | string | false | | |
| `memory_mb_total` | integer | false | | |
| `operating_system` | string | false | | |
| `operating_system_platform` | string | false | | |
| `operating_system_version` | string | false | | |
| `organization_id` | string | false | | |
| `updated_at` | string | false | | |
| `user_id` | string | false | | |
## codersdk.IntelMachinesResponse
```json
{
"count": 0,
"intel_machines": [
{
"architecture": "string",
"cpu_cores": 0,
"created_at": "2019-08-24T14:15:22Z",
"hostname": "string",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"instance_id": "string",
"memory_mb_total": 0,
"operating_system": "string",
"operating_system_platform": "string",
"operating_system_version": "string",
"organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
"updated_at": "2019-08-24T14:15:22Z",
"user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
]
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
| ---------------- | ------------------------------------------------------- | -------- | ------------ | ----------- |
| `count` | integer | false | | |
| `intel_machines` | array of [codersdk.IntelMachine](#codersdkintelmachine) | false | | |
## codersdk.IssueReconnectingPTYSignedTokenRequest
```json

View File

@ -572,6 +572,10 @@
"title": "Insights",
"path": "./api/insights.md"
},
{
"title": "Intel",
"path": "./api/intel.md"
},
{
"title": "Members",
"path": "./api/members.md"

View File

@ -649,6 +649,7 @@ export interface IntelMachine {
readonly instance_id: string;
readonly hostname: string;
readonly operating_system: string;
readonly operating_system_platform: string;
readonly operating_system_version: string;
readonly cpu_cores: number;
readonly memory_mb_total: number;
@ -667,6 +668,48 @@ export interface IntelMachinesResponse {
readonly count: number;
}
// From codersdk/intel.go
export interface IntelReport {
readonly invocations: number;
readonly commands: readonly IntelReportCommand[];
readonly git_remotes: readonly IntelReportGitRemote[];
}
// From codersdk/intel.go
export interface IntelReportCommand {
readonly binary_name: string;
readonly binary_args: readonly string[];
readonly invocations: number;
readonly intervals: readonly IntelReportInvocationInterval[];
readonly exit_codes: Record<number, number>;
readonly git_remote_urls: Record<string, number>;
readonly working_directories: Record<string, number>;
readonly binary_paths: Record<string, number>;
}
// From codersdk/intel.go
export interface IntelReportGitRemote {
readonly url: string;
readonly external_auth_provider_id?: string;
readonly invocations: number;
readonly intervals: readonly IntelReportInvocationInterval[];
}
// From codersdk/intel.go
export interface IntelReportInvocationInterval {
readonly cohort_id: string;
readonly starts_at: string;
readonly ends_at: string;
readonly invocations: number;
readonly median_duration_ms: number;
}
// From codersdk/intel.go
export interface IntelReportRequest {
readonly starts_at: string;
readonly cohort_ids: readonly string[];
}
// From codersdk/workspaceagents.go
export interface IssueReconnectingPTYSignedTokenRequest {
readonly url: string;