From c39b9b157a516171f20417e8fed9a08916605ca9 Mon Sep 17 00:00:00 2001 From: Nick Gerakines Date: Thu, 26 Mar 2020 10:36:43 -0400 Subject: [PATCH] Refreshed init migrations. Created docker-compose. Added migrations to binary. Added translations to binary. --- .gitignore | 6 + CHANGELOG.md | 6 + Dockerfile | 3 +- README.md | 11 + config/i18n.go | 25 ++- docker-compose.yml | 61 ++++++ errors/errors_generated.go | 2 +- errors/errors_generated_test.go | 2 +- gennonprod.go | 2 + genprod.go | 2 + init.sql | 2 + migrations/20200308144825_init.down.sql | 2 +- migrations/20200308144825_init.up.sql | 268 +++++++++++++----------- migrations/command.go | 15 +- schema.sql | 259 +++++++++++++++++++++++ start/command.go | 177 ++++++++-------- storage/actor.go | 24 +-- storage/actor_id.go | 4 - storage/network.go | 14 +- web/handler_actor.go | 26 +-- web/handler_feed.go | 2 +- web/handler_network.go | 11 +- 22 files changed, 663 insertions(+), 261 deletions(-) create mode 100644 docker-compose.yml create mode 100644 init.sql create mode 100644 schema.sql diff --git a/.gitignore b/.gitignore index dd955cf..0728bac 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ # generated files templates/assets.go +migrations/assets.go +translations/assets.go # uploaded files assets/ @@ -47,3 +49,7 @@ public/fa-solid-900.woff2 # release files dist/ + +# local development environment files +common.env +tavern-town.env \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index aa45949..8e46328 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ## [Unreleased] +### Fixed + +- [#53 About page](https://gitlab.com/ngerakines/tavern/-/issues/53) +- [#54 Terms page](https://gitlab.com/ngerakines/tavern/-/issues/54) +- [#55 Usage page](https://gitlab.com/ngerakines/tavern/-/issues/55) + ## [0.1.0-rc1] Project created diff --git a/Dockerfile b/Dockerfile index 410f111..c4c8adf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,6 +4,7 @@ WORKDIR /src COPY go.mod go.sum ./ RUN go mod download COPY . . +RUN go generate -tags prod ./... RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go install -ldflags "-w -s -extldflags '-static' -X main.GitCommit=$GIT_COMMIT -X main.ReleaseCode=$RELEASE_CODE -X 'main.BuildTime=$BUILD_TIME'" github.com/ngerakines/tavern/... FROM alpine:3.11 as tavern @@ -11,8 +12,6 @@ RUN apk add --no-cache --update ca-certificates tzdata RUN mkdir -p /app WORKDIR /app COPY --from=tavern-build /src/public /app/public -COPY --from=tavern-build /src/templates /app/templates -COPY --from=tavern-build /src/translations /app/translations COPY --from=tavern-build /go/bin/tavern /go/bin/ EXPOSE 5000 # HEALTHCHECK --interval=5m --timeout=3s CMD curl -f http://localhost:5000/ || exit 1 diff --git a/README.md b/README.md index 0ef0c35..9120ff3 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,17 @@ Roadmap: https://gitlab.com/ngerakines/tavern/-/wikis/Roadmap Milestones: https://gitlab.com/ngerakines/tavern/-/milestones +# Quick Start + +The quickest way to get up and running is with docker-compose. + +If you are building from source, be sure to run `docker-compose build`. + +1. docker-compose up -d db svger +2. docker-compose run web migrate +3. docker-compose run web init --admin-email=nick.gerakines@gmail.com --admin-password=asd123 --admin-name=nick --database=postgresql://postgres:password@db:5432/tavern?sslmode=disable +4. docker-compose up -d + # License MIT License diff --git a/config/i18n.go b/config/i18n.go index fda186b..abeccf7 100644 --- a/config/i18n.go +++ b/config/i18n.go @@ -1,23 +1,27 @@ package config import ( + "bytes" + "github.com/go-playground/locales/en" ut "github.com/go-playground/universal-translator" "github.com/urfave/cli/v2" + + "github.com/ngerakines/tavern/translations" ) var TranslationsFlag = cli.StringFlag{ - Name: "translations", - Usage: "The path translations are located", + Name: "translations", + Usage: "The path translations are located", EnvVars: []string{"TRANSLATIONS"}, - Value: "translations", + Value: "translations", } func Trans(cliCtx *cli.Context) (*ut.UniversalTranslator, error) { english := en.New() utrans := ut.New(english, english) - err := utrans.Import(ut.FormatJSON, cliCtx.String("translations")) + err := loadTranslations(utrans) if err != nil { return nil, err } @@ -29,3 +33,16 @@ func Trans(cliCtx *cli.Context) (*ut.UniversalTranslator, error) { return utrans, nil } + +func loadTranslations(utrans *ut.UniversalTranslator) error { + for _, source := range translations.AssetNames() { + data, err := translations.Asset(source) + if err != nil { + return err + } + if err = utrans.ImportByReader(ut.FormatJSON, bytes.NewReader(data)); err != nil { + return err + } + } + return nil +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..132222e --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,61 @@ +version: '3' +services: + + db: + restart: on-failure + image: postgres:12-alpine + networks: + - internal_network + healthcheck: + test: ["CMD", "pg_isready", "-U", "postgres"] + volumes: + - ./init.sql:/docker-entrypoint-initdb.d/10-init.sql + - ./postgres:/var/lib/postgresql/data + environment: + - POSTGRES_PASSWORD=password + + svger: + image: svger + restart: on-failure + networks: + - external_network + - internal_network + healthcheck: + test: ["CMD-SHELL", "wget -q --spider --proxy=off localhost:9100 || exit 1"] + interval: 5m + timeout: 10s + ports: + - "9100:9100" + environment: + - PORT=9100 + + web: + build: . + image: ngerakines/tavern + restart: on-failure + networks: + - external_network + - internal_network + healthcheck: + test: ["CMD-SHELL", "wget -q --spider --proxy=off localhost:9000 || exit 1"] + interval: 5m + timeout: 10s + ports: + - "127.0.0.1:9000:9000" + depends_on: + - db + - svger + env_file: + - ./common.env + - ./off-the-clock.env + environment: + - LISTEN=0.0.0.0:9000 + - DATABASE=postgresql://postgres:password@db:5432/tavern?sslmode=disable + - SVGER=http://svger:9100/ + - ASSET_STORAGE_REMOTE_DENY=* + - ALLOW_REPLY_COLLECTION_UPDATES=true + +networks: + external_network: + internal_network: + internal: true diff --git a/errors/errors_generated.go b/errors/errors_generated.go index 99aa099..7c090d8 100644 --- a/errors/errors_generated.go +++ b/errors/errors_generated.go @@ -1,5 +1,5 @@ // Code generated by go generate; DO NOT EDIT. -// This file was generated by herr at 2020-03-25 10:47:33.846856734 -0400 EDT m=+0.008849068 +// This file was generated by herr at 2020-03-26 10:29:28.140563266 -0400 EDT m=+0.008827289 package errors import ( diff --git a/errors/errors_generated_test.go b/errors/errors_generated_test.go index 17f75b9..7f59b44 100644 --- a/errors/errors_generated_test.go +++ b/errors/errors_generated_test.go @@ -1,5 +1,5 @@ // Code generated by go generate; DO NOT EDIT. -// This file was generated by herr at 2020-03-25 10:47:33.873538567 -0400 EDT m=+0.035530881 +// This file was generated by herr at 2020-03-26 10:29:28.16759766 -0400 EDT m=+0.035861612 package errors import ( diff --git a/gennonprod.go b/gennonprod.go index c8c9e45..00aa048 100644 --- a/gennonprod.go +++ b/gennonprod.go @@ -1,4 +1,6 @@ // +build !prod //go:generate go run github.com/go-bindata/go-bindata/go-bindata -debug -pkg templates -o templates/assets.go -ignore=.*.go templates/... +//go:generate go run github.com/go-bindata/go-bindata/go-bindata -debug -pkg migrations -o migrations/assets.go -ignore=.*.go -prefix "migrations/" migrations/... +//go:generate go run github.com/go-bindata/go-bindata/go-bindata -debug -pkg translations -o translations/assets.go -ignore=.*.go translations/... package main diff --git a/genprod.go b/genprod.go index d8c1bab..cd08a61 100644 --- a/genprod.go +++ b/genprod.go @@ -1,4 +1,6 @@ // +build prod //go:generate go run github.com/go-bindata/go-bindata/go-bindata -pkg templates -o templates/assets.go -ignore=.*.go templates/... +//go:generate go run github.com/go-bindata/go-bindata/go-bindata -pkg migrations -o migrations/assets.go -ignore=.*.go -prefix "migrations/" migrations/... +//go:generate go run github.com/go-bindata/go-bindata/go-bindata -pkg translations -o translations/assets.go -ignore=.*.go translations/... package main diff --git a/init.sql b/init.sql new file mode 100644 index 0000000..de53238 --- /dev/null +++ b/init.sql @@ -0,0 +1,2 @@ +CREATE DATABASE tavern; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; \ No newline at end of file diff --git a/migrations/20200308144825_init.down.sql b/migrations/20200308144825_init.down.sql index 2f22054..7dd5813 100644 --- a/migrations/20200308144825_init.down.sql +++ b/migrations/20200308144825_init.down.sql @@ -1 +1 @@ -DROP TABLE actors, followers, following, image_aliases, images, keys, object_events, objects, peers, user_activities, user_feed, user_threads, users; +DROP TABLE users, actors, peers, images, image_aliases, schema_migrations, network_graph, actor_aliases, actor_keys, objects, object_events, user_object_events, object_tags, user_feed, object_replies, object_boosts; diff --git a/migrations/20200308144825_init.up.sql b/migrations/20200308144825_init.up.sql index 7540111..1c96610 100644 --- a/migrations/20200308144825_init.up.sql +++ b/migrations/20200308144825_init.up.sql @@ -1,4 +1,32 @@ -create table if not exists actors +create table if not exists public.users +( + id uuid not null + constraint users_pkey + primary key, + email varchar(100) + constraint users_email_key + unique, + password bytea, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + last_auth_at timestamp with time zone, + location varchar default 'UTC'::character varying not null, + mute_email boolean default false not null, + locale varchar default 'en'::character varying not null, + public_key text not null, + private_key text not null, + name varchar not null, + display_name varchar not null, + about text not null, + accept_followers boolean default true not null, + actor_id uuid not null, + reply_collection_updates boolean default false not null +); + +create unique index if not exists users_username_uindex + on public.users (name); + +create table if not exists public.actors ( id uuid not null constraint actors_pk @@ -11,22 +39,61 @@ create table if not exists actors updated_at timestamp with time zone default now() not null ); -create table if not exists actor_aliases +create table if not exists public.peers ( id uuid not null - constraint actor_subjects_pk + constraint peers_pk primary key, - actor_id uuid not null, - alias varchar not null, - created_at timestamp with time zone not null, - updated_at timestamp with time zone not null, - alias_type integer default 0 not null + inbox varchar(100) not null, + created_at timestamp with time zone not null ); -create unique index if not exists actor_aliases_alias_uindex - on actor_aliases (alias); +create unique index if not exists peers_inbox_uindex + on public.peers (inbox); -create table if not exists network_graph +create table if not exists public.images +( + id uuid not null + constraint asset_image_pk + primary key, + location varchar(200) not null, + checksum varchar(200) not null, + content_type integer default 0 not null, + size integer default 0 not null, + height integer default 0 not null, + width integer default 0 not null, + blur varchar(100) not null, + created_at timestamp with time zone +); + +create unique index if not exists asset_image_checksum_uindex + on public.images (checksum); + +create unique index if not exists asset_image_location_uindex + on public.images (location); + +create table if not exists public.image_aliases +( + id uuid not null + constraint image_aliases_pk + primary key, + image_id uuid not null, + alias varchar(200) not null, + created_at timestamp with time zone not null +); + +create unique index if not exists image_aliases_pairs + on public.image_aliases (image_id, alias); + +create table if not exists public.schema_migrations +( + version bigint not null + constraint schema_migrations_pkey + primary key, + dirty boolean not null +); + +create table if not exists public.network_graph ( id uuid not null constraint network_graph_pk @@ -42,9 +109,22 @@ create table if not exists network_graph unique (user_id, actor_id, relationship_type) ); +create table if not exists public.actor_aliases +( + id uuid not null + constraint actor_subjects_pk + primary key, + actor_id uuid not null, + alias varchar not null, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + alias_type integer default 0 not null +); +create unique index if not exists actor_aliases_alias_uindex + on public.actor_aliases (alias); -create table if not exists actor_keys +create table if not exists public.actor_keys ( id uuid not null constraint actor_keys_pk @@ -58,148 +138,98 @@ create table if not exists actor_keys unique (actor_id, key_id) ); - - -create table if not exists image_aliases +create table if not exists public.objects ( id uuid not null - constraint image_aliases_pk primary key, - image_id uuid not null, - -- TODO: make alias a sha256 value of the alias - alias varchar(200) not null, - created_at timestamp with time zone not null -); - -create unique index if not exists image_aliases_pairs on image_aliases (image_id, alias); - -create table if not exists images -( - id uuid not null - constraint asset_image_pk primary key, - location varchar(200) not null, - checksum varchar(200) not null, - content_type integer default 0 not null, - size integer default 0 not null, - height integer default 0 not null, - width integer default 0 not null, - blur varchar(100) not null, - created_at timestamp with time zone -); - -create unique index if not exists asset_image_checksum_uindex on images (checksum); - -create unique index if not exists asset_image_location_uindex on images (location); - -create table if not exists peers -( - id uuid not null - constraint peers_pk primary key, - inbox varchar(100) not null, - created_at timestamp with time zone not null -); - -create unique index if not exists peers_inbox_uindex on peers (inbox); - -create table if not exists users -( - id uuid not null - constraint users_pkey primary key, - email varchar(100) - constraint users_email_key unique, - password bytea, - created_at timestamp with time zone not null, - updated_at timestamp with time zone not null, - last_auth_at timestamp with time zone, - location varchar(48) default 'UTC'::character varying not null, -- user time zone - locale varchar(8) default 'en'::character varying not null, -- user language - public_key text not null, - private_key text not null, - name varchar(36) not null, - display_name varchar(100) not null, - about text not null, - accept_followers boolean default true not null -); - -create unique index if not exists users_username_uindex on users (name); - -create table objects -( - id uuid constraint objects_pk primary key, created_at timestamp with time zone not null, updated_at timestamp with time zone not null, payload jsonb not null, object_id varchar not null + constraint objects_object_uindex + unique ); -create unique index objects_object_uindex on objects (object_id); - -ALTER TABLE objects - ADD CONSTRAINT objects_object_uindex UNIQUE USING INDEX objects_object_uindex; - -create table object_events +create table if not exists public.object_events ( - id uuid - constraint object_events_pk primary key, + id uuid not null + constraint object_events_pk + primary key, created_at timestamp with time zone not null, updated_at timestamp with time zone not null, - activity_id varchar not null, + activity_id varchar not null + constraint object_events_activity_uindex + unique, object_id uuid not null, payload jsonb not null ); -create unique index object_events_activity_uindex on object_events (activity_id); - -ALTER TABLE object_events - ADD CONSTRAINT object_events_activity_uindex UNIQUE USING INDEX object_events_activity_uindex; - -create table user_object_events +create table if not exists public.user_object_events ( - id uuid - constraint user_object_events_pk primary key, + id uuid not null + constraint user_object_events_pk + primary key, created_at timestamp with time zone not null, updated_at timestamp with time zone not null, user_id uuid not null, activity_id uuid not null, object_id uuid not null, - public bool not null default false + public boolean default false not null, + constraint user_object_events_user_activity_uindex + unique (user_id, activity_id, object_id) ); -create unique index user_object_events_user_activity_uindex on user_object_events (user_id, activity_id, object_id); - -ALTER TABLE user_object_events - ADD CONSTRAINT user_object_events_user_activity_uindex UNIQUE USING INDEX user_object_events_user_activity_uindex; - -create table user_object_tags +create table if not exists public.object_tags ( - id uuid - constraint user_object_tags_pk primary key, - created_at timestamp with time zone not null, - updated_at timestamp with time zone not null, - user_id uuid not null, - activity_id uuid not null, - object_id uuid not null, - tag varchar not null + id uuid not null + constraint object_tags_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + object_id uuid not null, + tag varchar not null, + constraint object_tags_tagged_uindex + unique (object_id, tag) ); -create unique index user_object_tags_tagged_uindex on user_object_tags (user_id, object_id, tag); - -ALTER TABLE user_object_tags - ADD CONSTRAINT user_object_tags_tagged_uindex UNIQUE USING INDEX user_object_tags_tagged_uindex; - -create table user_feed +create table if not exists public.user_feed ( id uuid not null constraint user_feed_pk primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, activity_id uuid not null, object_id uuid not null, user_id uuid not null, - created_at timestamp with time zone not null + constraint user_feed_activity_uindex + unique (user_id, activity_id, object_id) ); -create unique index user_feed_activity_uindex on user_feed (user_id, activity_id, object_id); +create table if not exists public.object_replies +( + id uuid not null + constraint object_replies_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + object_id uuid not null, + parent_object_id uuid not null, + constraint object_replies_reply_uindex + unique (object_id, parent_object_id) +); + + +create table if not exists public.object_boosts +( + id uuid not null + constraint object_boosts_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + actor_id uuid not null, + activity_id uuid not null, + object_id uuid not null +); -ALTER TABLE user_feed - ADD CONSTRAINT user_feed_activity_uindex UNIQUE USING INDEX user_feed_activity_uindex; \ No newline at end of file diff --git a/migrations/command.go b/migrations/command.go index d9b8aa1..0a097b6 100644 --- a/migrations/command.go +++ b/migrations/command.go @@ -8,6 +8,7 @@ import ( "github.com/golang-migrate/migrate/v4" "github.com/golang-migrate/migrate/v4/database/postgres" _ "github.com/golang-migrate/migrate/v4/source/file" + bindata "github.com/golang-migrate/migrate/v4/source/go_bindata" _ "github.com/lib/pq" "github.com/urfave/cli/v2" "go.uber.org/zap" @@ -22,7 +23,6 @@ var Command = cli.Command{ Flags: []cli.Flag{ &config.EnvironmentFlag, &config.DatabaseFlag, - &config.MigrationsPathFlag, }, Action: serverCommandAction, } @@ -58,6 +58,15 @@ func serverCommandAction(cliCtx *cli.Context) error { defer sentry.Recover() } + migrationSources := bindata.Resource(AssetNames(), + func(name string) ([]byte, error) { + return Asset(name) + }) + migrationData, err := bindata.WithInstance(migrationSources) + if err != nil { + return err + } + db, dbClose, err := config.DB(cliCtx, logger) if err != nil { return err @@ -68,9 +77,11 @@ func serverCommandAction(cliCtx *cli.Context) error { if err != nil { return err } - m, err := migrate.NewWithDatabaseInstance(cliCtx.String("migrations"), "postgres", driver) + + m, err := migrate.NewWithInstance("go-bindata", migrationData, "postgres", driver) if err != nil { return err } + return m.Up() } diff --git a/schema.sql b/schema.sql new file mode 100644 index 0000000..7c2ab27 --- /dev/null +++ b/schema.sql @@ -0,0 +1,259 @@ +create table if not exists public.users +( + id uuid not null + constraint users_pkey + primary key, + email varchar(100) + constraint users_email_key + unique, + password bytea, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + last_auth_at timestamp with time zone, + location varchar default 'UTC'::character varying not null, + mute_email boolean default false not null, + locale varchar default 'en'::character varying not null, + public_key text not null, + private_key text not null, + name varchar not null, + display_name varchar not null, + about text not null, + accept_followers boolean default true not null, + actor_id uuid not null, + reply_collection_updates boolean default false not null +); + +create unique index if not exists users_username_uindex + on public.users (name); + +create table if not exists public.actors +( + id uuid not null + constraint actors_pk + primary key, + actor_id varchar not null + constraint actors_actor_id + unique, + payload jsonb not null, + created_at timestamp with time zone not null, + updated_at timestamp with time zone default now() not null +); + +create table if not exists public.peers +( + id uuid not null + constraint peers_pk + primary key, + inbox varchar(100) not null, + created_at timestamp with time zone not null +); + +create unique index if not exists peers_inbox_uindex + on public.peers (inbox); + +create table if not exists public.images +( + id uuid not null + constraint asset_image_pk + primary key, + location varchar(200) not null, + checksum varchar(200) not null, + content_type integer default 0 not null, + size integer default 0 not null, + height integer default 0 not null, + width integer default 0 not null, + blur varchar(100) not null, + created_at timestamp with time zone +); + +create unique index if not exists asset_image_checksum_uindex + on public.images (checksum); + +create unique index if not exists asset_image_location_uindex + on public.images (location); + +create table if not exists public.image_aliases +( + id uuid not null + constraint image_aliases_pk + primary key, + image_id uuid not null, + alias varchar(200) not null, + created_at timestamp with time zone not null +); + +create unique index if not exists image_aliases_pairs + on public.image_aliases (image_id, alias); + +create table if not exists public.schema_migrations +( + version bigint not null + constraint schema_migrations_pkey + primary key, + dirty boolean not null +); + +create table if not exists public.network_graph +( + id uuid not null + constraint network_graph_pk + primary key, + user_id uuid not null, + actor_id uuid not null, + activity jsonb not null, + relationship_type integer default 0 not null, + relationship_status integer default 0 not null, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + constraint network_graph_user_actor_rel + unique (user_id, actor_id, relationship_type) +); + +create table if not exists public.actor_aliases +( + id uuid not null + constraint actor_subjects_pk + primary key, + actor_id uuid not null, + alias varchar not null, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + alias_type integer default 0 not null +); + +create unique index if not exists actor_aliases_alias_uindex + on public.actor_aliases (alias); + +create table if not exists public.actor_keys +( + id uuid not null + constraint actor_keys_pk + primary key, + actor_id uuid not null, + key_id varchar not null, + pem text not null, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + constraint actor_keys_lookup + unique (actor_id, key_id) +); + +create table if not exists public.objects +( + id uuid not null + constraint objects_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + payload jsonb not null, + object_id varchar not null + constraint objects_object_uindex + unique +); + +create table if not exists public.object_events +( + id uuid not null + constraint object_events_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + activity_id varchar not null + constraint object_events_activity_uindex + unique, + object_id uuid not null, + payload jsonb not null +); + +create table if not exists public.user_object_events +( + id uuid not null + constraint user_object_events_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + user_id uuid not null, + activity_id uuid not null, + object_id uuid not null, + public boolean default false not null, + constraint user_object_events_user_activity_uindex + unique (user_id, activity_id, object_id) +); + +create table if not exists public.object_tags +( + id uuid not null + constraint object_tags_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + object_id uuid not null, + tag varchar not null, + constraint object_tags_tagged_uindex + unique (object_id, tag) +); + +create table if not exists public.user_feed +( + id uuid not null + constraint user_feed_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + activity_id uuid not null, + object_id uuid not null, + user_id uuid not null, + constraint user_feed_activity_uindex + unique (user_id, activity_id, object_id) +); + +create table if not exists public.object_replies +( + id uuid not null + constraint object_replies_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + object_id uuid not null, + parent_object_id uuid not null, + constraint object_replies_reply_uindex + unique (object_id, parent_object_id) +); + +create table if not exists public.threads +( + id uuid not null + constraint threads_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + thread_hash varchar not null + constraint threads_uindex + unique +); + +create table if not exists public.object_threads +( + id uuid not null + constraint object_threads_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + object_id uuid not null, + thread_id uuid not null, + constraint object_threads_uindex + unique (object_id, thread_id) +); + +create table if not exists public.object_boosts +( + id uuid not null + constraint object_boosts_pk + primary key, + created_at timestamp with time zone not null, + updated_at timestamp with time zone not null, + actor_id uuid not null, + activity_id uuid not null, + object_id uuid not null +); + diff --git a/start/command.go b/start/command.go index acfe314..5b9d994 100644 --- a/start/command.go +++ b/start/command.go @@ -16,6 +16,7 @@ import ( "golang.org/x/crypto/bcrypt" "github.com/ngerakines/tavern/config" + "github.com/ngerakines/tavern/errors" "github.com/ngerakines/tavern/g" "github.com/ngerakines/tavern/storage" ) @@ -102,93 +103,97 @@ func serverCommandAction(cliCtx *cli.Context) error { } defer dbClose() - s := storage.DefaultStorage(db, logger) - - userID := storage.NewV4() - - name := cliCtx.String("admin-name") - displayName := cliCtx.String("admin-displayname") - if len(displayName) == 0 { - displayName = name - } - about := cliCtx.String("admin-about") - if len(about) == 0 { - about = "Just a user" - } - - encPassword, err := bcrypt.GenerateFromPassword([]byte(cliCtx.String("admin-password")), bcrypt.DefaultCost) - if err != nil { - return err - } - - key, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return err - } - - privateKeyBytes := x509.MarshalPKCS1PrivateKey(key) - var privateKeyBuffer bytes.Buffer - if err := pem.Encode(&privateKeyBuffer, &pem.Block{ - Type: "PRIVATE KEY", - Bytes: privateKeyBytes, - }); err != nil { - return err - } - privateKey := string(privateKeyBuffer.Bytes()) - - publicKeyBytes, err := x509.MarshalPKIXPublicKey(key.Public()) - if err != nil { - return err - } - - var publicKeyBuffer bytes.Buffer - if err = pem.Encode(&publicKeyBuffer, &pem.Block{ - Type: "PUBLIC KEY", - Bytes: publicKeyBytes, - }); err != nil { - return err - } - publicKey := string(publicKeyBuffer.Bytes()) - ctx := context.Background() - err = s.CreateUser(ctx, userID, cliCtx.String("admin-email"), cliCtx.String("admin-locale"), name, displayName, about, publicKey, privateKey, encPassword) - if err != nil { - logger.Error("unable to create user", zap.Error(err)) - return err + txErr := storage.TransactionalStorage(ctx, storage.DefaultStorage(db, logger), func(s storage.Storage) error { + + name := cliCtx.String("admin-name") + displayName := cliCtx.String("admin-displayname") + if len(displayName) == 0 { + displayName = name + } + about := cliCtx.String("admin-about") + if len(about) == 0 { + about = "Just a user" + } + + encPassword, err := bcrypt.GenerateFromPassword([]byte(cliCtx.String("admin-password")), bcrypt.DefaultCost) + if err != nil { + return err + } + + key, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return err + } + + privateKeyBytes := x509.MarshalPKCS1PrivateKey(key) + var privateKeyBuffer bytes.Buffer + if err := pem.Encode(&privateKeyBuffer, &pem.Block{ + Type: "PRIVATE KEY", + Bytes: privateKeyBytes, + }); err != nil { + return err + } + privateKey := string(privateKeyBuffer.Bytes()) + + publicKeyBytes, err := x509.MarshalPKIXPublicKey(key.Public()) + if err != nil { + return err + } + + var publicKeyBuffer bytes.Buffer + if err = pem.Encode(&publicKeyBuffer, &pem.Block{ + Type: "PUBLIC KEY", + Bytes: publicKeyBytes, + }); err != nil { + return err + } + publicKey := string(publicKeyBuffer.Bytes()) + + // ActorFromUserInfo(name, displayName, domain, publicKey string, privateKey *rsa.PrivateKey) Payload { + userActor := storage.ActorFromUserInfo(name, displayName, domain, publicKey, key) + + actorID, _ := storage.JSONString(userActor, "id") + keyID, _ := storage.JSONDeepString(userActor, "publicKey", "id") + + err = s.CreateActor(ctx, actorID, userActor) + if err != nil { + return err + } + + actorRowID, err := s.ActorRowIDForActorID(ctx, actorID) + if err != nil { + return err + } + + err = s.CreateUser(ctx, actorRowID, cliCtx.String("admin-email"), cliCtx.String("admin-locale"), name, displayName, about, publicKey, privateKey, encPassword) + if err != nil { + logger.Error("unable to create user", zap.Error(err)) + return err + } + + user, err := s.GetUserByName(ctx, name) + if err != nil { + return err + } + + err = s.RecordActorKey(ctx, actorRowID, keyID, publicKey) + if err != nil { + return err + } + + if err = s.RecordActorAlias(ctx, actorRowID, fmt.Sprintf("acct:%s@%s", user.Name, domain), storage.ActorAliasSubject); err != nil { + return err + } + if err = s.RecordActorAlias(ctx, actorRowID, actorID, storage.ActorAliasSelf); err != nil { + return err + } + + return nil + }) + if txErr != nil { + logger.Error("error creating user", zap.Error(err), zap.Strings("error_chain", errors.ErrorChain(err))) } - - user, err := s.GetUser(ctx, userID) - if err != nil { - return err - } - - userActor := storage.ActorFromUser(user, domain, key) - - actorID, _ := storage.JSONString(userActor, "id") - keyID, _ := storage.JSONDeepString(userActor, "publicKey", "id") - - err = s.CreateActor(ctx, actorID, userActor) - if err != nil { - return err - } - - actorRowID, err := s.ActorRowIDForActorID(ctx, actorID) - if err != nil { - return err - } - - err = s.RecordActorKey(ctx, actorRowID, keyID, publicKey) - if err != nil { - return err - } - - if err = s.RecordActorAlias(ctx, actorRowID, fmt.Sprintf("acct:%s@%s", user.Name, domain), storage.ActorAliasSubject); err != nil { - return err - } - if err = s.RecordActorAlias(ctx, actorRowID, actorID, storage.ActorAliasSelf); err != nil { - return err - } - - return nil + return txErr } diff --git a/storage/actor.go b/storage/actor.go index fb658e0..52e040f 100644 --- a/storage/actor.go +++ b/storage/actor.go @@ -302,19 +302,19 @@ func CollectActorSubjectsActorToSubject(actorSubjects []ActorAlias) map[uuid.UUI return results } -func ActorFromUser(user *User, domain string, privateKey *rsa.PrivateKey) Payload { +func ActorFromUserInfo(name, displayName, domain, publicKey string, privateKey *rsa.PrivateKey) Payload { actor := EmptyPayload() actor["@context"] = "https://www.w3.org/ns/activitystreams" - actor["id"] = fmt.Sprintf("https://%s/user/%s", domain, user.Name) - actor["inbox"] = fmt.Sprintf("https://%s/user/%s/inbox", domain, user.Name) - actor["outbox"] = fmt.Sprintf("https://%s/user/%s/outbox", domain, user.Name) - actor["name"] = user.DisplayName - actor["preferredUsername"] = user.Name + actor["id"] = common.ActorURL(domain, name) + actor["inbox"] = fmt.Sprintf("%s/inbox", common.ActorURL(domain, name)) + actor["outbox"] = fmt.Sprintf("%s/outbox", common.ActorURL(domain, name)) + actor["name"] = displayName + actor["preferredUsername"] = name actor["summary"] = "" actor["type"] = "Person" - actor["url"] = fmt.Sprintf("https://%s/user/%s", domain, user.Name) - actor["followers"] = fmt.Sprintf("https://%s/user/%s/followers", domain, user.Name) - actor["following"] = fmt.Sprintf("https://%s/user/%s/following", domain, user.Name) + actor["url"] = common.ActorURL(domain, name) + actor["followers"] = fmt.Sprintf("%s/followers", common.ActorURL(domain, name)) + actor["following"] = fmt.Sprintf("%s/following", common.ActorURL(domain, name)) n := privateKey.PublicKey.N.Bytes() e := big.NewInt(int64(privateKey.PublicKey.E)).Bytes() @@ -325,9 +325,9 @@ func ActorFromUser(user *User, domain string, privateKey *rsa.PrivateKey) Payloa keyID := hex.EncodeToString(fingerPrint.Sum(nil)) key := EmptyPayload() - key["id"] = fmt.Sprintf("https://%s/user/%s#%s", domain, user.Name, keyID) - key["owner"] = fmt.Sprintf("https://%s/user/%s", domain, user.Name) - key["publicKeyPem"] = user.PublicKey + key["id"] = fmt.Sprintf("%s#%s", common.ActorURL(domain, name), keyID) + key["owner"] = common.ActorURL(domain, name) + key["publicKeyPem"] = publicKey actor["publicKey"] = key return actor diff --git a/storage/actor_id.go b/storage/actor_id.go index 962a16b..9fbe2a9 100644 --- a/storage/actor_id.go +++ b/storage/actor_id.go @@ -107,10 +107,6 @@ func (ID ActorID) Inbox() string { return fmt.Sprintf("%s/inbox", ID) } -func (ID ActorID) MainKey() string { - return fmt.Sprintf("%s#main-key", ID) -} - func DecodePublicKey(data string) (*rsa.PublicKey, error) { block, _ := pem.Decode([]byte(data)) if block == nil { diff --git a/storage/network.go b/storage/network.go index 793f7f6..383b280 100644 --- a/storage/network.go +++ b/storage/network.go @@ -120,22 +120,24 @@ func (s pgStorage) ListPendingFollowing(ctx context.Context, userID uuid.UUID, l return s.networkGraphQuery(s.db, ctx, userID, UserFollowsRelationship, PendingRelationshipStatus, limit, offset) } -func (s pgStorage) createRelationshipGraphRecord(ec QueryExecute, ctx context.Context, userID, actorID uuid.UUID, activity Payload, relationshipType RelationshipType, relationshipStatus RelationshipStatus) error { +func (s pgStorage) createRelationshipGraphRecord(ctx context.Context, userID, actorID uuid.UUID, activity Payload, relationshipType RelationshipType, relationshipStatus RelationshipStatus) error { fields := strings.Join(relationshipGraphFields, ",") valuesPlaceholder := strings.Join(common.DollarForEach(len(relationshipGraphFields)), ",") query := fmt.Sprintf(`INSERT INTO network_graph (%s) VALUES (%s) ON CONFLICT ON CONSTRAINT network_graph_user_actor_rel DO UPDATE SET activity = $4, relationship_status = $6`, fields, valuesPlaceholder) rowID := NewV4() now := s.now() + fmt.Println(query) + fmt.Println(rowID, userID, actorID, activity, relationshipType, relationshipStatus, now, now) _, err := s.db.ExecContext(ctx, query, rowID, userID, actorID, activity, relationshipType, relationshipStatus, now, now) return errors.WrapNetworkRelationshipInsertFailedError(err) } func (s pgStorage) CreatePendingFollowing(ctx context.Context, userID, actorID uuid.UUID, activity Payload) error { - return s.createRelationshipGraphRecord(s.db, ctx, userID, actorID, activity, UserFollowsRelationship, PendingRelationshipStatus) + return s.createRelationshipGraphRecord(ctx, userID, actorID, activity, UserFollowsRelationship, PendingRelationshipStatus) } func (s pgStorage) CreatePendingFollower(ctx context.Context, userID, actorID uuid.UUID, activity Payload) error { - return s.createRelationshipGraphRecord(s.db, ctx, userID, actorID, activity, UserFollowedByRelationship, PendingRelationshipStatus) + return s.createRelationshipGraphRecord(ctx, userID, actorID, activity, UserFollowedByRelationship, PendingRelationshipStatus) } func (s pgStorage) ActivityForFollowing(ctx context.Context, userID, actorID uuid.UUID) (Payload, error) { @@ -147,6 +149,8 @@ func (s pgStorage) ActivityForFollower(ctx context.Context, userID, actorID uuid } func (s pgStorage) networkGraphActivity(qc QueryExecute, ctx context.Context, userID, actorID uuid.UUID, relationshipType RelationshipType) (Payload, error) { + fmt.Println(`SELECT activity FROM network_graph WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`) + fmt.Println(userID, actorID, relationshipType) var payload Payload err := qc.QueryRowContext(ctx, `SELECT activity FROM network_graph WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, relationshipType). Scan(&payload) @@ -157,21 +161,25 @@ func (s pgStorage) networkGraphActivity(qc QueryExecute, ctx context.Context, us } func (s pgStorage) UpdateFollowingAccepted(ctx context.Context, userID, actorID uuid.UUID) error { + fmt.Println(`UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship, AcceptRelationshipStatus) _, err := s.db.ExecContext(ctx, `UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship, AcceptRelationshipStatus) return errors.WrapNetworkRelationshipUpdateFailedError(err) } func (s pgStorage) UpdateFollowingRejected(ctx context.Context, userID, actorID uuid.UUID) error { + fmt.Println(`UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship, RejectRelationshipStatus) _, err := s.db.ExecContext(ctx, `UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship, RejectRelationshipStatus) return errors.WrapNetworkRelationshipUpdateFailedError(err) } func (s pgStorage) UpdateFollowerApproved(ctx context.Context, userID, actorID uuid.UUID) error { + fmt.Println(`UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowedByRelationship, AcceptRelationshipStatus) _, err := s.db.ExecContext(ctx, `UPDATE network_graph SET relationship_status = $4 WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowedByRelationship, AcceptRelationshipStatus) return errors.WrapNetworkRelationshipUpdateFailedError(err) } func (s pgStorage) RemoveFollowing(ctx context.Context, userID, actorID uuid.UUID) error { + fmt.Println(`DELETE FROM network_graph WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship) _, err := s.db.ExecContext(ctx, `DELETE FROM network_graph WHERE user_id = $1 AND actor_id = $2 AND relationship_type = $3`, userID, actorID, UserFollowsRelationship) return errors.WrapNetworkRelationshipUpdateFailedError(err) } diff --git a/web/handler_actor.go b/web/handler_actor.go index f72cd1d..c9228bc 100644 --- a/web/handler_actor.go +++ b/web/handler_actor.go @@ -31,27 +31,13 @@ func (h handler) actorInfo(c *gin.Context) { return } - actorID := storage.NewActorID(user.Name, h.domain) + userActor, err := h.storage.GetActor(c.Request.Context(), user.ActorID) + if err != nil { + h.hardFail(c, err) + return + } - response := storage.EmptyPayload() - response["@context"] = "https://www.w3.org/ns/activitystreams" - response["id"] = actorID - response["inbox"] = actorID.Inbox() - response["outbox"] = actorID.Outbox() - response["name"] = user.DisplayName - response["preferredUsername"] = user.Name - response["summary"] = "" - response["type"] = "Person" - response["url"] = actorID - response["followers"] = actorID.Followers() - response["following"] = actorID.Following() - key := storage.EmptyPayload() - key["id"] = actorID.MainKey() - key["owner"] = actorID - key["publicKeyPem"] = user.PublicKey - response["publicKey"] = key - - h.writeJSONLDProfile(c, http.StatusOK, response) + h.writeJSONLDProfile(c, http.StatusOK, userActor.Payload) } func (h handler) actorFollowers(c *gin.Context) { diff --git a/web/handler_feed.go b/web/handler_feed.go index b197fdd..3c7a773 100644 --- a/web/handler_feed.go +++ b/web/handler_feed.go @@ -86,7 +86,7 @@ func (h handler) displayObjectFeed(c *gin.Context, requireUser bool, vars map[st if cont = h.saveSession(c, session); !cont { return } - c.HTML(http.StatusOK, "feed", data) + c.HTML(http.StatusOK, "objects", data) return } diff --git a/web/handler_network.go b/web/handler_network.go index fc1c170..22eaf64 100644 --- a/web/handler_network.go +++ b/web/handler_network.go @@ -142,6 +142,12 @@ func (h handler) networkFollow(c *gin.Context) { payload := follow.Bytes() + err = h.storage.CreatePendingFollowing(ctx, user.ID, actor.ID, follow) + if err != nil { + h.flashErrorOrFail(c, h.url("network"), err) + return + } + nc := fed.ActorClient{ HTTPClient: h.httpClient, Logger: h.logger, @@ -152,11 +158,6 @@ func (h handler) networkFollow(c *gin.Context) { return } - err = h.storage.CreatePendingFollowing(ctx, user.ID, actor.ID, follow) - if err != nil { - h.flashErrorOrFail(c, h.url("network"), err) - return - } c.Redirect(http.StatusFound, h.url("network")) }