Added api-key auth
This commit is contained in:
parent
3569e689a9
commit
78464fdd4c
|
@ -0,0 +1,6 @@
|
|||
schema: 'packages/api/graphql/schema.graphql'
|
||||
documents: 'packages/api/**/*.ts'
|
||||
extensions:
|
||||
languageService:
|
||||
cacheSchemaFileForLookup: true
|
||||
enableValidation: false
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,5 @@
|
|||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
yarn-path ".yarn/releases/yarn-1.18.0.cjs"
|
|
@ -9,6 +9,7 @@
|
|||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.1",
|
||||
"@typescript-eslint/parser": "^5.54.1",
|
||||
"@typescript-eslint/utils": "^5.59.0",
|
||||
"eslint": "^8.35.0",
|
||||
"eslint-config-prettier": "^8.7.0",
|
||||
"husky": "^8.0.2",
|
||||
|
|
|
@ -4,3 +4,5 @@ export * from './revoke'
|
|||
export * from './signin'
|
||||
export * from './introspect'
|
||||
export * from './middleware'
|
||||
export * from './key/apikey'
|
||||
export * from './key/keyware'
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
import crypto from 'node:crypto'
|
||||
import { client } from '../../helpers'
|
||||
import { gql } from 'graphql-request'
|
||||
|
||||
export class APIKey {
|
||||
public generateKey(): string {
|
||||
const key = crypto.randomBytes(32).toString('hex')
|
||||
return key
|
||||
}
|
||||
|
||||
public async fetchKeyS(userSub: string): Promise<any> {
|
||||
const initQuery = gql`
|
||||
query findUser($userSub: uuid!) {
|
||||
apikey_by_pk(user_id: $userSub) {
|
||||
api_key
|
||||
user_id
|
||||
created_at
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
`
|
||||
const data: any = await client.request(initQuery, { userSub })
|
||||
return data.apikey_by_pk
|
||||
}
|
||||
|
||||
public async createKeyS(userSub: string): Promise<any> {
|
||||
const serchKey = await this.fetchKeyS(userSub)
|
||||
const key = this.generateKey()
|
||||
if (serchKey) {
|
||||
const updateQuery = gql`
|
||||
mutation updateApiKey($userSub: uuid!, $key: String!) {
|
||||
update_apikey_by_pk(
|
||||
pk_columns: { user_id: $userSub }
|
||||
_set: { api_key: $key }
|
||||
) {
|
||||
api_key
|
||||
user_id
|
||||
created_at
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
`
|
||||
const data: any = await client.request(updateQuery, {
|
||||
userSub,
|
||||
key,
|
||||
})
|
||||
return data.update_apikey_by_pk
|
||||
}
|
||||
const createQuery = gql`
|
||||
mutation insertApiKey($userSub: uuid!, $key: String!) {
|
||||
insert_apikey_one(
|
||||
object: { user_id: $userSub, api_key: $key }
|
||||
) {
|
||||
api_key
|
||||
user_id
|
||||
created_at
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
`
|
||||
const data: any = await client.request(createQuery, { userSub, key })
|
||||
return data.insert_apikey_one
|
||||
}
|
||||
|
||||
public async deleteKeyS(userSub: string): Promise<any> {
|
||||
const deleteQuery = gql`
|
||||
mutation deleteApiKey($userSub: uuid!) {
|
||||
delete_apikey_by_pk(user_id: $userSub) {
|
||||
user_id
|
||||
created_at
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
`
|
||||
const data: any = await client.request(deleteQuery, { userSub })
|
||||
return data.delete_apikey_by_pk
|
||||
}
|
||||
|
||||
public async validateKeyS(key: string): Promise<any> {
|
||||
const validateQuery = gql`
|
||||
query validateKey($key: String!) {
|
||||
apikey(where: { api_key: { _eq: $key } }) {
|
||||
created_at
|
||||
updated_at
|
||||
user_id
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const data: any = await client.request(validateQuery, { key })
|
||||
|
||||
if (data.apikey.length === 0) {
|
||||
return {
|
||||
isValid: false,
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
isValid: true,
|
||||
userSub: data.apikey[0].user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
import { NextFunction, Response } from 'express'
|
||||
import { ModRequest } from '../../types'
|
||||
import { CustomError } from '../../libs/error'
|
||||
import { APIKey } from './apikey'
|
||||
import ServiceAccount from '../server/service'
|
||||
|
||||
export const serviceAccount = new ServiceAccount()
|
||||
|
||||
export const keyware = async (
|
||||
req: ModRequest | any,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
const authClient = new APIKey()
|
||||
try {
|
||||
const authHeader = req.headers?.['x-api-key']
|
||||
if (!authHeader) {
|
||||
throw new Error('No x-api-key header found !!')
|
||||
}
|
||||
const decoded = await authClient.validateKeyS(authHeader)
|
||||
|
||||
if (!decoded.isValid) {
|
||||
throw new Error('Invalid token')
|
||||
}
|
||||
|
||||
const { service_creds } = await serviceAccount.serviceAccount()
|
||||
const user = await serviceAccount.fetchUser(
|
||||
service_creds,
|
||||
decoded.userSub
|
||||
)
|
||||
|
||||
if (!user) {
|
||||
throw new Error('No user')
|
||||
}
|
||||
|
||||
const { attributes } = user
|
||||
Object.keys(attributes).forEach((key) => {
|
||||
user[key] = attributes[key][0]
|
||||
})
|
||||
|
||||
req.user = {
|
||||
userData: user,
|
||||
tokenData: decoded,
|
||||
}
|
||||
next()
|
||||
} catch (err: any) {
|
||||
next(
|
||||
new CustomError({
|
||||
message: err.message,
|
||||
statusCode: 401,
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
import axios from 'axios'
|
||||
import { configKeys } from '../..'
|
||||
import qs from 'qs'
|
||||
|
||||
export default class ServiceAccount {
|
||||
public serviceAccount = async () => {
|
||||
const rdata = qs.stringify({
|
||||
grant_type: 'client_credentials',
|
||||
})
|
||||
|
||||
const config = {
|
||||
method: 'post',
|
||||
maxBodyLength: Infinity,
|
||||
url: `${configKeys.KEYCLOAK_AUTH_SERVER_URL}/realms/${configKeys.KEYCLOAK_REALM}/protocol/openid-connect/token`,
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Authorization: `Basic ${Buffer.from(
|
||||
`${configKeys.KEYCLOAK_CLIENT_ID}:${configKeys.KEYCLOAK_CLIENT_SECRET}`
|
||||
).toString('base64')}`,
|
||||
},
|
||||
data: rdata,
|
||||
}
|
||||
|
||||
const { data } = await axios(config)
|
||||
|
||||
return {
|
||||
service_creds: data.access_token,
|
||||
}
|
||||
}
|
||||
|
||||
public fetchUser = async (token: string, userSub: string) => {
|
||||
const config = {
|
||||
method: 'get',
|
||||
maxBodyLength: Infinity,
|
||||
url: `${configKeys.KEYCLOAK_AUTH_SERVER_URL}/admin/realms/${configKeys.KEYCLOAK_REALM}/users/${userSub}`,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
}
|
||||
|
||||
const { data } = await axios(config)
|
||||
return data
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
#!/bin/bash
|
||||
|
||||
source .env
|
||||
FILE=graphql/schema.graphql
|
||||
rm -rf $FILE
|
||||
yarn gq $HASURA_GRAPHQL_ENDPOINT/v1/graphql -H "X-Hasura-Admin-Secret: $HASURA_GRAPHQL_ADMIN_SECRET" --introspect > $FILE
|
||||
tail -n +2 "$FILE" > "$FILE.tmp" && mv "$FILE.tmp" "$FILE"
|
|
@ -3,7 +3,9 @@ import { parse as parseFile } from 'envfile'
|
|||
import { Issuer } from 'openid-client'
|
||||
|
||||
const keyCloakIssuer: Issuer = await Issuer.discover(
|
||||
process.env.KEYCLOAK_AUTH_SERVER_URL!
|
||||
process.env.KEYCLOAK_AUTH_SERVER_URL +
|
||||
'/realms/' +
|
||||
process.env.KEYCLOAK_REALM
|
||||
)
|
||||
console.log('🔐 Connected to Keycloak')
|
||||
|
||||
|
@ -45,6 +47,7 @@ export interface IConfigKeys {
|
|||
KEYCLOAK_CLIENT_SECRET: string
|
||||
KEYCLOAK_REDIRECT_URI: string
|
||||
KEYCLOAK_AUTH_SERVER_URL: string
|
||||
KEYCLOAK_REALM: string
|
||||
}
|
||||
|
||||
export default class ConfigStoreFactory {
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
import { NextFunction, Request, Response } from 'express'
|
||||
import { makeResponse } from '../libs'
|
||||
import { ModRequest } from '../types'
|
||||
import { callback, introspect, refresh, signon, revoke } from '../auth/index'
|
||||
import {
|
||||
callback,
|
||||
introspect,
|
||||
refresh,
|
||||
signon,
|
||||
revoke,
|
||||
APIKey,
|
||||
} from '../auth/index'
|
||||
|
||||
export default class AuthController {
|
||||
export default class AuthController extends APIKey {
|
||||
public signin = (req: Request, res: Response) => {
|
||||
const { authurl } = signon()
|
||||
res.redirect(authurl)
|
||||
|
@ -40,4 +47,40 @@ export default class AuthController {
|
|||
) => {
|
||||
res.send(makeResponse(await introspect(req, res, next)))
|
||||
}
|
||||
|
||||
public createKey = async (req: ModRequest | any, res: Response) => {
|
||||
res.send(
|
||||
makeResponse(await this.createKeyS(req.user.userData.sub))
|
||||
).status(201)
|
||||
}
|
||||
|
||||
public fetchKey = async (req: ModRequest | any, res: Response) => {
|
||||
res.send(makeResponse(await this.fetchKeyS(req.user.userData.sub)))
|
||||
}
|
||||
|
||||
public deleteKey = async (req: ModRequest | any, res: Response) => {
|
||||
res.send(
|
||||
makeResponse(await this.deleteKeyS(req.user.userData.sub))
|
||||
).status(204)
|
||||
}
|
||||
|
||||
public validateKey = async (req: ModRequest, res: Response) => {
|
||||
let api_key
|
||||
if (req.method === 'POST') {
|
||||
api_key = req.body.api_key
|
||||
if (!api_key) {
|
||||
return res
|
||||
.status(400)
|
||||
.send(makeResponse(null, 'Invalid api_key'))
|
||||
}
|
||||
} else if (req.method === 'GET') {
|
||||
api_key = req.headers?.['x-api-key']
|
||||
if (!api_key) {
|
||||
return res
|
||||
.status(400)
|
||||
.send(makeResponse(null, 'Invalid api_key'))
|
||||
}
|
||||
}
|
||||
res.send(makeResponse(await this.validateKeyS(api_key)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ export default class LastFMController extends LastfmService {
|
|||
public fetchUser = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const data = await this.user()
|
||||
console.log(data)
|
||||
res.send(makeResponse(data))
|
||||
} catch (err: any) {
|
||||
res.send(makeResponse(err.message, {}, 'Failed', true))
|
||||
|
|
|
@ -0,0 +1,487 @@
|
|||
schema {
|
||||
query: query_root
|
||||
mutation: mutation_root
|
||||
subscription: subscription_root
|
||||
}
|
||||
|
||||
"""whether this query should be cached (Hasura Cloud only)"""
|
||||
directive @cached(
|
||||
"""measured in seconds"""
|
||||
ttl: Int! = 60
|
||||
|
||||
"""refresh the cache entry"""
|
||||
refresh: Boolean! = false
|
||||
) on QUERY
|
||||
|
||||
"""
|
||||
Boolean expression to compare columns of type "String". All fields are combined with logical 'AND'.
|
||||
"""
|
||||
input String_comparison_exp {
|
||||
_eq: String
|
||||
_gt: String
|
||||
_gte: String
|
||||
|
||||
"""does the column match the given case-insensitive pattern"""
|
||||
_ilike: String
|
||||
_in: [String!]
|
||||
|
||||
"""
|
||||
does the column match the given POSIX regular expression, case insensitive
|
||||
"""
|
||||
_iregex: String
|
||||
_is_null: Boolean
|
||||
|
||||
"""does the column match the given pattern"""
|
||||
_like: String
|
||||
_lt: String
|
||||
_lte: String
|
||||
_neq: String
|
||||
|
||||
"""does the column NOT match the given case-insensitive pattern"""
|
||||
_nilike: String
|
||||
_nin: [String!]
|
||||
|
||||
"""
|
||||
does the column NOT match the given POSIX regular expression, case insensitive
|
||||
"""
|
||||
_niregex: String
|
||||
|
||||
"""does the column NOT match the given pattern"""
|
||||
_nlike: String
|
||||
|
||||
"""
|
||||
does the column NOT match the given POSIX regular expression, case sensitive
|
||||
"""
|
||||
_nregex: String
|
||||
|
||||
"""does the column NOT match the given SQL regular expression"""
|
||||
_nsimilar: String
|
||||
|
||||
"""
|
||||
does the column match the given POSIX regular expression, case sensitive
|
||||
"""
|
||||
_regex: String
|
||||
|
||||
"""does the column match the given SQL regular expression"""
|
||||
_similar: String
|
||||
}
|
||||
|
||||
"""
|
||||
columns and relationships of "apikey"
|
||||
"""
|
||||
type apikey {
|
||||
api_key: String!
|
||||
created_at: timestamptz!
|
||||
updated_at: timestamptz!
|
||||
user_id: uuid!
|
||||
}
|
||||
|
||||
"""
|
||||
aggregated selection of "apikey"
|
||||
"""
|
||||
type apikey_aggregate {
|
||||
aggregate: apikey_aggregate_fields
|
||||
nodes: [apikey!]!
|
||||
}
|
||||
|
||||
"""
|
||||
aggregate fields of "apikey"
|
||||
"""
|
||||
type apikey_aggregate_fields {
|
||||
count(columns: [apikey_select_column!], distinct: Boolean): Int!
|
||||
max: apikey_max_fields
|
||||
min: apikey_min_fields
|
||||
}
|
||||
|
||||
"""
|
||||
Boolean expression to filter rows from the table "apikey". All fields are combined with a logical 'AND'.
|
||||
"""
|
||||
input apikey_bool_exp {
|
||||
_and: [apikey_bool_exp!]
|
||||
_not: apikey_bool_exp
|
||||
_or: [apikey_bool_exp!]
|
||||
api_key: String_comparison_exp
|
||||
created_at: timestamptz_comparison_exp
|
||||
updated_at: timestamptz_comparison_exp
|
||||
user_id: uuid_comparison_exp
|
||||
}
|
||||
|
||||
"""
|
||||
unique or primary key constraints on table "apikey"
|
||||
"""
|
||||
enum apikey_constraint {
|
||||
"""
|
||||
unique or primary key constraint on columns "api_key"
|
||||
"""
|
||||
apikey_api_key_key
|
||||
|
||||
"""
|
||||
unique or primary key constraint on columns "user_id"
|
||||
"""
|
||||
apikey_pkey
|
||||
}
|
||||
|
||||
"""
|
||||
input type for inserting data into table "apikey"
|
||||
"""
|
||||
input apikey_insert_input {
|
||||
api_key: String
|
||||
created_at: timestamptz
|
||||
updated_at: timestamptz
|
||||
user_id: uuid
|
||||
}
|
||||
|
||||
"""aggregate max on columns"""
|
||||
type apikey_max_fields {
|
||||
api_key: String
|
||||
created_at: timestamptz
|
||||
updated_at: timestamptz
|
||||
user_id: uuid
|
||||
}
|
||||
|
||||
"""aggregate min on columns"""
|
||||
type apikey_min_fields {
|
||||
api_key: String
|
||||
created_at: timestamptz
|
||||
updated_at: timestamptz
|
||||
user_id: uuid
|
||||
}
|
||||
|
||||
"""
|
||||
response of any mutation on the table "apikey"
|
||||
"""
|
||||
type apikey_mutation_response {
|
||||
"""number of rows affected by the mutation"""
|
||||
affected_rows: Int!
|
||||
|
||||
"""data from the rows affected by the mutation"""
|
||||
returning: [apikey!]!
|
||||
}
|
||||
|
||||
"""
|
||||
on_conflict condition type for table "apikey"
|
||||
"""
|
||||
input apikey_on_conflict {
|
||||
constraint: apikey_constraint!
|
||||
update_columns: [apikey_update_column!]! = []
|
||||
where: apikey_bool_exp
|
||||
}
|
||||
|
||||
"""Ordering options when selecting data from "apikey"."""
|
||||
input apikey_order_by {
|
||||
api_key: order_by
|
||||
created_at: order_by
|
||||
updated_at: order_by
|
||||
user_id: order_by
|
||||
}
|
||||
|
||||
"""primary key columns input for table: apikey"""
|
||||
input apikey_pk_columns_input {
|
||||
user_id: uuid!
|
||||
}
|
||||
|
||||
"""
|
||||
select columns of table "apikey"
|
||||
"""
|
||||
enum apikey_select_column {
|
||||
"""column name"""
|
||||
api_key
|
||||
|
||||
"""column name"""
|
||||
created_at
|
||||
|
||||
"""column name"""
|
||||
updated_at
|
||||
|
||||
"""column name"""
|
||||
user_id
|
||||
}
|
||||
|
||||
"""
|
||||
input type for updating data in table "apikey"
|
||||
"""
|
||||
input apikey_set_input {
|
||||
api_key: String
|
||||
created_at: timestamptz
|
||||
updated_at: timestamptz
|
||||
user_id: uuid
|
||||
}
|
||||
|
||||
"""
|
||||
Streaming cursor of the table "apikey"
|
||||
"""
|
||||
input apikey_stream_cursor_input {
|
||||
"""Stream column input with initial value"""
|
||||
initial_value: apikey_stream_cursor_value_input!
|
||||
|
||||
"""cursor ordering"""
|
||||
ordering: cursor_ordering
|
||||
}
|
||||
|
||||
"""Initial value of the column from where the streaming should start"""
|
||||
input apikey_stream_cursor_value_input {
|
||||
api_key: String
|
||||
created_at: timestamptz
|
||||
updated_at: timestamptz
|
||||
user_id: uuid
|
||||
}
|
||||
|
||||
"""
|
||||
update columns of table "apikey"
|
||||
"""
|
||||
enum apikey_update_column {
|
||||
"""column name"""
|
||||
api_key
|
||||
|
||||
"""column name"""
|
||||
created_at
|
||||
|
||||
"""column name"""
|
||||
updated_at
|
||||
|
||||
"""column name"""
|
||||
user_id
|
||||
}
|
||||
|
||||
input apikey_updates {
|
||||
"""sets the columns of the filtered rows to the given values"""
|
||||
_set: apikey_set_input
|
||||
|
||||
"""filter the rows which have to be updated"""
|
||||
where: apikey_bool_exp!
|
||||
}
|
||||
|
||||
"""ordering argument of a cursor"""
|
||||
enum cursor_ordering {
|
||||
"""ascending ordering of the cursor"""
|
||||
ASC
|
||||
|
||||
"""descending ordering of the cursor"""
|
||||
DESC
|
||||
}
|
||||
|
||||
"""mutation root"""
|
||||
type mutation_root {
|
||||
"""
|
||||
delete data from the table: "apikey"
|
||||
"""
|
||||
delete_apikey(
|
||||
"""filter the rows which have to be deleted"""
|
||||
where: apikey_bool_exp!
|
||||
): apikey_mutation_response
|
||||
|
||||
"""
|
||||
delete single row from the table: "apikey"
|
||||
"""
|
||||
delete_apikey_by_pk(user_id: uuid!): apikey
|
||||
|
||||
"""
|
||||
insert data into the table: "apikey"
|
||||
"""
|
||||
insert_apikey(
|
||||
"""the rows to be inserted"""
|
||||
objects: [apikey_insert_input!]!
|
||||
|
||||
"""upsert condition"""
|
||||
on_conflict: apikey_on_conflict
|
||||
): apikey_mutation_response
|
||||
|
||||
"""
|
||||
insert a single row into the table: "apikey"
|
||||
"""
|
||||
insert_apikey_one(
|
||||
"""the row to be inserted"""
|
||||
object: apikey_insert_input!
|
||||
|
||||
"""upsert condition"""
|
||||
on_conflict: apikey_on_conflict
|
||||
): apikey
|
||||
|
||||
"""
|
||||
update data of the table: "apikey"
|
||||
"""
|
||||
update_apikey(
|
||||
"""sets the columns of the filtered rows to the given values"""
|
||||
_set: apikey_set_input
|
||||
|
||||
"""filter the rows which have to be updated"""
|
||||
where: apikey_bool_exp!
|
||||
): apikey_mutation_response
|
||||
|
||||
"""
|
||||
update single row of the table: "apikey"
|
||||
"""
|
||||
update_apikey_by_pk(
|
||||
"""sets the columns of the filtered rows to the given values"""
|
||||
_set: apikey_set_input
|
||||
pk_columns: apikey_pk_columns_input!
|
||||
): apikey
|
||||
|
||||
"""
|
||||
update multiples rows of table: "apikey"
|
||||
"""
|
||||
update_apikey_many(
|
||||
"""updates to execute, in order"""
|
||||
updates: [apikey_updates!]!
|
||||
): [apikey_mutation_response]
|
||||
}
|
||||
|
||||
"""column ordering options"""
|
||||
enum order_by {
|
||||
"""in ascending order, nulls last"""
|
||||
asc
|
||||
|
||||
"""in ascending order, nulls first"""
|
||||
asc_nulls_first
|
||||
|
||||
"""in ascending order, nulls last"""
|
||||
asc_nulls_last
|
||||
|
||||
"""in descending order, nulls first"""
|
||||
desc
|
||||
|
||||
"""in descending order, nulls first"""
|
||||
desc_nulls_first
|
||||
|
||||
"""in descending order, nulls last"""
|
||||
desc_nulls_last
|
||||
}
|
||||
|
||||
type query_root {
|
||||
"""
|
||||
fetch data from the table: "apikey"
|
||||
"""
|
||||
apikey(
|
||||
"""distinct select on columns"""
|
||||
distinct_on: [apikey_select_column!]
|
||||
|
||||
"""limit the number of rows returned"""
|
||||
limit: Int
|
||||
|
||||
"""skip the first n rows. Use only with order_by"""
|
||||
offset: Int
|
||||
|
||||
"""sort the rows by one or more columns"""
|
||||
order_by: [apikey_order_by!]
|
||||
|
||||
"""filter the rows returned"""
|
||||
where: apikey_bool_exp
|
||||
): [apikey!]!
|
||||
|
||||
"""
|
||||
fetch aggregated fields from the table: "apikey"
|
||||
"""
|
||||
apikey_aggregate(
|
||||
"""distinct select on columns"""
|
||||
distinct_on: [apikey_select_column!]
|
||||
|
||||
"""limit the number of rows returned"""
|
||||
limit: Int
|
||||
|
||||
"""skip the first n rows. Use only with order_by"""
|
||||
offset: Int
|
||||
|
||||
"""sort the rows by one or more columns"""
|
||||
order_by: [apikey_order_by!]
|
||||
|
||||
"""filter the rows returned"""
|
||||
where: apikey_bool_exp
|
||||
): apikey_aggregate!
|
||||
|
||||
"""fetch data from the table: "apikey" using primary key columns"""
|
||||
apikey_by_pk(user_id: uuid!): apikey
|
||||
}
|
||||
|
||||
type subscription_root {
|
||||
"""
|
||||
fetch data from the table: "apikey"
|
||||
"""
|
||||
apikey(
|
||||
"""distinct select on columns"""
|
||||
distinct_on: [apikey_select_column!]
|
||||
|
||||
"""limit the number of rows returned"""
|
||||
limit: Int
|
||||
|
||||
"""skip the first n rows. Use only with order_by"""
|
||||
offset: Int
|
||||
|
||||
"""sort the rows by one or more columns"""
|
||||
order_by: [apikey_order_by!]
|
||||
|
||||
"""filter the rows returned"""
|
||||
where: apikey_bool_exp
|
||||
): [apikey!]!
|
||||
|
||||
"""
|
||||
fetch aggregated fields from the table: "apikey"
|
||||
"""
|
||||
apikey_aggregate(
|
||||
"""distinct select on columns"""
|
||||
distinct_on: [apikey_select_column!]
|
||||
|
||||
"""limit the number of rows returned"""
|
||||
limit: Int
|
||||
|
||||
"""skip the first n rows. Use only with order_by"""
|
||||
offset: Int
|
||||
|
||||
"""sort the rows by one or more columns"""
|
||||
order_by: [apikey_order_by!]
|
||||
|
||||
"""filter the rows returned"""
|
||||
where: apikey_bool_exp
|
||||
): apikey_aggregate!
|
||||
|
||||
"""fetch data from the table: "apikey" using primary key columns"""
|
||||
apikey_by_pk(user_id: uuid!): apikey
|
||||
|
||||
"""
|
||||
fetch data from the table in a streaming manner: "apikey"
|
||||
"""
|
||||
apikey_stream(
|
||||
"""maximum number of rows returned in a single batch"""
|
||||
batch_size: Int!
|
||||
|
||||
"""cursor to stream the results returned by the query"""
|
||||
cursor: [apikey_stream_cursor_input]!
|
||||
|
||||
"""filter the rows returned"""
|
||||
where: apikey_bool_exp
|
||||
): [apikey!]!
|
||||
}
|
||||
|
||||
scalar timestamptz
|
||||
|
||||
"""
|
||||
Boolean expression to compare columns of type "timestamptz". All fields are combined with logical 'AND'.
|
||||
"""
|
||||
input timestamptz_comparison_exp {
|
||||
_eq: timestamptz
|
||||
_gt: timestamptz
|
||||
_gte: timestamptz
|
||||
_in: [timestamptz!]
|
||||
_is_null: Boolean
|
||||
_lt: timestamptz
|
||||
_lte: timestamptz
|
||||
_neq: timestamptz
|
||||
_nin: [timestamptz!]
|
||||
}
|
||||
|
||||
scalar uuid
|
||||
|
||||
"""
|
||||
Boolean expression to compare columns of type "uuid". All fields are combined with logical 'AND'.
|
||||
"""
|
||||
input uuid_comparison_exp {
|
||||
_eq: uuid
|
||||
_gt: uuid
|
||||
_gte: uuid
|
||||
_in: [uuid!]
|
||||
_is_null: Boolean
|
||||
_lt: uuid
|
||||
_lte: uuid
|
||||
_neq: uuid
|
||||
_nin: [uuid!]
|
||||
}
|
||||
|
|
@ -4,7 +4,10 @@ const { KEYCLOAK_ISSUER } = configKeys
|
|||
|
||||
const authConfig = {
|
||||
client_id: configKeys.KEYCLOAK_CLIENT_ID,
|
||||
'auth-server-url': configKeys.KEYCLOAK_AUTH_SERVER_URL,
|
||||
'auth-server-url':
|
||||
configKeys.KEYCLOAK_AUTH_SERVER_URL +
|
||||
'/realms/' +
|
||||
configKeys.KEYCLOAK_REALM,
|
||||
'ssl-required': 'all',
|
||||
resource: configKeys.KEYCLOAK_CLIENT_ID,
|
||||
credentials: {
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
"nodemailer": "^6.8.0",
|
||||
"openid-client": "^5.4.0",
|
||||
"osu-api-extended": "^2.5.12",
|
||||
"ping": "^0.4.4",
|
||||
"redis": "^4.5.1",
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
|
@ -39,7 +40,8 @@
|
|||
"dev:express": "cross-env NODE_ENV=development nodemon -x node --no-warnings --experimental-specifier-resolution=node --loader ts-node/esm index.ts --signal SIGKILL",
|
||||
"build": "tsc",
|
||||
"start": "node --es-module-specifier-resolution=node --loader ts-node/esm ./build/index.js",
|
||||
"prettier": "prettier --write \"**/*.{ts,tsx,js,jsx,json,css,scss,md}\""
|
||||
"prettier": "prettier --write \"**/*.{ts,tsx,js,jsx,json,css,scss,md}\"",
|
||||
"fetch:schema": "bash ./bin/fetch-gql-schema.sh"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@swc/core": "^1.3.23",
|
||||
|
@ -48,6 +50,7 @@
|
|||
"@types/morgan": "^1.9.3",
|
||||
"concurrently": "^7.6.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"graphqurl": "^1.0.1",
|
||||
"hasura-cli": "^2.15.1",
|
||||
"prettier": "^2.8.2",
|
||||
"ts-node": "^10.9.1"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Router } from 'express'
|
||||
import AuthController from '../controllers/auth.controller'
|
||||
import { middleware } from '../auth'
|
||||
import { middleware, keyware } from '../auth'
|
||||
|
||||
const router = Router()
|
||||
const authController = new AuthController()
|
||||
|
@ -13,10 +13,20 @@ router.get('/me', middleware, authController.me as any)
|
|||
|
||||
router.get('/logout', middleware, authController.logout as any)
|
||||
|
||||
router.get('/refresh', middleware, authController.refresh as any)
|
||||
router.post('/refresh', middleware, authController.refresh as any)
|
||||
|
||||
router.get('/introspect', middleware, authController.introspect as any)
|
||||
|
||||
router.put('/key', middleware, authController.createKey as any)
|
||||
|
||||
router.get('/key', middleware, authController.fetchKey as any)
|
||||
|
||||
router.delete('/key', middleware, authController.deleteKey as any)
|
||||
|
||||
router.post('/key/verify', authController.validateKey as any)
|
||||
|
||||
router.get('/key/verify', keyware, authController.validateKey as any)
|
||||
|
||||
router.get('/', function (req, res) {
|
||||
res.render('pages/auth')
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue