Ready with Base Setup

This commit is contained in:
Jyotirmoy Bandyopadhayaya 2022-12-19 19:15:55 +05:30
parent 3dff3f7bd5
commit 7c59463e58
Signed by: bravo68web
GPG Key ID: F5671FD7BCB9917A
73 changed files with 7483 additions and 400 deletions

3
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}

11
jsconfig.json Normal file
View File

@ -0,0 +1,11 @@
{
"compilerOptions": {
"module": "ESNext",
"moduleResolution": "Node",
"target": "ES2020",
"jsx": "react",
"strictNullChecks": true,
"strictFunctionTypes": true
},
"exclude": ["node_modules", "**/node_modules/*"]
}

6
lerna.json Normal file
View File

@ -0,0 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"useWorkspaces": true,
"version": "0.0.0",
"packages": ["packages/*"]
}

View File

@ -1,14 +1,14 @@
{
"name": "bravo68web-api-rewrite",
"name": "b68",
"version": "1.0.0",
"description": "Rewrite for Bravo68web API",
"main": "src/",
"repository": "git@github.com:BRAVO68WEB/api-rewrite.git",
"description": "B68 Module",
"repository": "git@github.com:b68dev/b68.git",
"author": "Jyotirmoy Bandyopadhyaya [Bravo68] <jbandyopadhayaya@gmail.com>",
"license": "MIT",
"private": true,
"devDependencies": {
"husky": "^8.0.2",
"lerna": "^6.1.0",
"lint-staged": "^13.0.4",
"turbo": "^1.6.3"
},
@ -17,5 +17,8 @@
},
"lint-staged": {
"**/*": "prettier --write --ignore-unknown"
}
},
"workspaces": [
"packages/*"
]
}

28
packages/api/.env.example Normal file
View File

@ -0,0 +1,28 @@
PORT=
NODE_ENV=
HASURA_GRAPHQL_ADMIN_SECRET=
HASURA_GRAPHQL_ENDPOINT=
SAFE_TOKEN=
GH_TOKEN=
HASHNODE_API_KEY=
LASTFM_API_KEY=
OSU_API_KEY=
OSU_USERNAME=
OSU_PASSWORD=
YT_API_KEY=
TWITTER_API_KEY=
TWITTER_API_SECRET=
SPOTIFY_CLIENT_ID=
SPOTIFY_CLIENT_SECRET=
S3_CLIENT_ID=
S3_CLIENT_SECRET=
S3_BUCKET_NAME=
S3_BUCKET_REGION=
S3_BUCKET_ENDPOINT=
S3_BUCKET_URL=
S3_BUCKET_FOLDER=
MAL_CLIENT_ID=
MAL_CLIENT_SECRET=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_REGION=

View File

@ -0,0 +1,73 @@
import fs from 'fs'
import { parse as parseFile } from 'envfile'
type IconfigStore = 'development' | 'production'
interface IConfigKeys {
PORT: string | number
NODE_ENV: string
HASURA_GRAPHQL_ADMIN_SECRET: string
HASURA_GRAPHQL_ENDPOINT: string
SAFE_TOKEN: string
GH_TOKEN: string
HASHNODE_API_KEY: string
LASTFM_API_KEY: string
OSU_API_KEY: string
OSU_USERNAME: string
OSU_PASSWORD: string
YT_API_KEY: string
TWITTER_API_KEY: string
TWITTER_API_SECRET: string
SPOTIFY_CLIENT_ID: string
SPOTIFY_CLIENT_SECRET: string
S3_CLIENT_ID: string
S3_CLIENT_SECRET: string
S3_BUCKET_NAME: string
S3_BUCKET_REGION: string
S3_BUCKET_ENDPOINT: string
S3_BUCKET_URL: string
S3_BUCKET_FOLDER: string
MAL_CLIENT_ID: string
MAL_CLIENT_SECRET: string
AWS_ACCESS_KEY_ID: string
AWS_SECRET_ACCESS_KEY: string
AWS_REGION: string
}
export default class configStoreFactory {
public configStoreType: IconfigStore
constructor(isProd: boolean = false) {
if (isProd) {
this.configStoreType = 'production'
} else {
this.configStoreType = 'development'
}
}
public async getConfigStore() {
if (this.configStoreType === 'development') {
const envContent = await fs.readFileSync(`./.env`, 'utf8')
const env: Partial<IConfigKeys> = await parseFile(envContent)
return env
} else {
let reqEnvContent: any = await fs.readFileSync(
'./.env.example',
'utf8'
)
reqEnvContent = reqEnvContent.replaceAll('=', '')
reqEnvContent = reqEnvContent.split('\n')
let missingKeys: string[] = []
let env: Partial<IConfigKeys> = {}
for (const line of reqEnvContent) {
if (!process.env[line]) {
missingKeys.push(line)
} else env[line] = process.env[line]
}
if (missingKeys.length > 0) {
throw new Error(`Missing keys: ${missingKeys}`)
}
return env
}
}
}

View File

@ -0,0 +1,6 @@
version: 3
endpoint: https://hasura.b68web.xyz
metadata_directory: metadata
actions:
kind: synchronous
handler_webhook_baseurl: http://localhost:3000

View File

@ -0,0 +1,6 @@
actions: []
custom_types:
enums: []
input_objects: []
objects: []
scalars: []

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
version: 3

View File

@ -0,0 +1,34 @@
import axios from 'axios'
export const axiosInstance = axios.create({})
axiosInstance.interceptors.request.use(
(config) => {
const newConfig: any = { ...config }
newConfig.metadata = { startTime: new Date() }
return newConfig
},
(error) => {
return Promise.reject(error)
}
)
axiosInstance.interceptors.response.use(
(response) => {
const newRes: any = { ...response }
newRes.config.metadata.endTime = new Date()
newRes.duration =
newRes.config.metadata.endTime - newRes.config.metadata.startTime
return newRes
},
(error) => {
const newError = { ...error }
newError.config.metadata.endTime = new Date()
newError.duration =
newError.config.metadata.endTime -
newError.config.metadata.startTime
return Promise.reject(newError)
}
)
export default axiosInstance

View File

@ -0,0 +1,65 @@
import * as redis from 'redis'
import NodeCache from 'node-cache'
type CacheEnvironment = 'development' | 'production'
export default class CacheClient {
private static _clientMode: CacheEnvironment
private static _redisClient: redis.RedisClientType
private static _nodeClient: NodeCache
static get client() {
return this._clientMode === 'production'
? this._redisClient
: this._nodeClient
}
static get env() {
return this._clientMode
}
static init(forceEnv?: CacheEnvironment) {
const env =
forceEnv ||
process.env.CACHE_ENV ||
process.env.NODE_ENV ||
'development'
if (!['development', 'production'].includes(env))
throw new Error(
"Invalid Caching Environment, expected - ['development', 'production'], received - " +
env
)
this._clientMode = env as CacheEnvironment
const redisUrl = process.env.REDIS_URL || ''
if (env === 'production') {
this._redisClient = redis.createClient({
url: redisUrl,
name: 'currency-exchange-cache',
})
this._redisClient.connect()
}
this._nodeClient = new NodeCache()
console.log(`Caching Client initialized in '${env}' environment`)
}
static async set(key: string, value: any) {
if (this._clientMode === 'production') {
await this._redisClient.set(key, value)
} else {
this._nodeClient.set(key, value)
}
}
// expose single function to handle the client read irrespective of the underlying connections
static async get(key: string): Promise<string | null> {
if (this._clientMode === 'production') {
return await this._redisClient.get(key)
} else {
return (this._nodeClient.get(key) as string) || null
}
}
}

View File

@ -0,0 +1,17 @@
import { GraphQLClient } from 'graphql-request'
export let client = new GraphQLClient('')
export const hgqlInit = () => {
console.log('\n🚀 GraphQL Client Initialized')
let HASURA_URL: string = process.env.HASURA_GRAPHQL_ENDPOINT || ''
HASURA_URL += HASURA_URL.endsWith('/') ? 'v1/graphql' : '/v1/graphql'
const HASURA_ADMIN: string = process.env.HASURA_GRAPHQL_ADMIN_SECRET || ''
client = new GraphQLClient(HASURA_URL, {
headers: {
'x-hasura-admin-secret': HASURA_ADMIN,
},
})
}

View File

@ -0,0 +1,4 @@
export * from './cache.factory'
export * from './gql_clent'
export * from './mailer'
export * from './upload.factory'

View File

@ -0,0 +1,48 @@
export interface MailConfig {
host?: string
port?: number
secure?: boolean
auth?: {
user: string
pass: string
}
logger?: boolean
}
type MailerConfigValues = {
[k: string]: MailConfig & Partial<ExtraMailerConfig>
}
interface ExtraMailerConfig {
from_email: string
from_name: string
}
const ConfigValue: MailerConfigValues = {
development: {
host: process.env.MAIL_HOST,
port: Number(process.env.MAIL_PORT),
secure: false,
auth: {
user: process.env.MAIL_USER!,
pass: process.env.MAIL_PASS!,
},
logger: Boolean(process.env.MAIL_LOGGER),
from_email: process.env.MAIL_FROM_EMAIL,
from_name: process.env.MAIL_FROM_NAME,
},
production: {
host: process.env.MAIL_HOST,
port: Number(process.env.MAIL_PORT),
secure: false,
auth: {
user: process.env.MAIL_USER!,
pass: process.env.MAIL_PASS!,
},
logger: Boolean(process.env.MAIL_LOGGER),
from_email: process.env.MAIL_FROM_EMAIL,
from_name: process.env.MAIL_FROM_NAME,
},
}
export default ConfigValue

View File

@ -0,0 +1,22 @@
import nodemailer from 'nodemailer'
import Mail from 'nodemailer/lib/mailer'
import MailerConfig from './mailer.config'
const mailConfig =
process.env.NODE_ENV === 'production'
? MailerConfig.production
: MailerConfig.development
const FROM_EMAIL = 'B68 API <api@b68.dev>'
const transporter = nodemailer.createTransport(mailConfig)
const sendMailWrapper = async (mail: Mail.Options): Promise<void> => {
try {
await transporter.sendMail(mail)
} catch (err) {
console.log(err)
}
}
export default sendMailWrapper

View File

@ -0,0 +1,137 @@
import { S3Client } from '@aws-sdk/client-s3'
import multer from 'multer'
import multerS3 from 'multer-s3'
import path from 'path'
import { nanoid } from 'napi-nanoid'
import axios from 'axios'
import FormData from 'form-data'
import fs from 'fs'
type UploadEnvironment = 's3' | 'safe' | 'local'
type napiNanoId = () => string
export default class LocalUploadFactory {
private _upload: multer.Multer
constructor() {
this._upload = multer({
storage: multer.diskStorage({
destination: (req, file, cb) => {
cb(null, path.join(__dirname, '..', 'uploads'))
},
filename: (req, file, cb) => {
cb(null, nanoid() + path.extname(file.originalname))
},
}),
})
}
get upload() {
return this._upload
}
}
export class S3UploadFactory {
private _upload: multer.Multer
constructor(client: S3Client) {
this._upload = multer({
storage: multerS3({
s3: client,
bucket: process.env.AWS_BUCKET!,
acl: 'public-read',
key: (req, file, cb) => {
cb(null, nanoid() + path.extname(file.originalname))
},
}),
})
}
get upload() {
return this._upload
}
}
export class SafeUploadFactory {
// Safe is a http file upload service at https://safe.b68dev.xyz
// Upload API at https://safe.b68dev.xyz/api/upload
private _upload: multer.Multer
constructor() {
this._upload = multer({
storage: multer.memoryStorage(),
})
this._upload.single('file')
}
get upload() {
return this._upload
}
async uploadFile(file: any) {
const form = new FormData()
form.append('files[]', file.buffer, { filename: file.originalname })
const { data } = await axios.post(
'https://safe.b68dev.xyz/api/upload',
form,
{
headers: {
token: process.env.SAFE_TOKEN!,
...form.getHeaders(),
},
}
)
return data
}
}
export class UploadFactory {
private static _clientMode: UploadEnvironment
private static _s3Client: S3Client
private static _localClient: LocalUploadFactory
private static _safeClient: SafeUploadFactory
static get client() {
return this._clientMode === 's3'
? this._s3Client
: this._clientMode === 'safe'
? this._safeClient
: this._localClient
}
static get env() {
return this._clientMode
}
static init(forceEnv?: UploadEnvironment) {
const env =
forceEnv ||
process.env.UPLOAD_ENV ||
process.env.NODE_ENV ||
'local'
if (!['s3', 'local', 'safe'].includes(env))
throw new Error(
"Invalid Upload Environment, expected - ['s3', 'local', 'safe'], received - " +
env
)
this._clientMode = env as UploadEnvironment
if (env === 'safe') {
this._safeClient = new SafeUploadFactory()
} else if (env === 's3') {
this._s3Client = new S3Client({
region: process.env.AWS_REGION,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
},
})
} else {
this._localClient = new LocalUploadFactory()
}
console.log(`Upload Client initialized in '${env}' environment`)
}
}

View File

@ -0,0 +1,46 @@
import 'dotenv/config'
import cors from 'cors'
import express from 'express'
import morgan from 'morgan'
import helmet from 'helmet'
import { hgqlInit } from './helpers'
import routes from './routes'
import { errorHandler, notFoundHandler } from './libs'
import pkg from './package.json'
import configStore from './configs'
export const app: express.Application = express()
hgqlInit()
const isDev: boolean = process.env.NODE_ENV == 'production'
console.log(isDev ? '🚀 Production Mode' : '🚀 Development Mode')
const configs = new configStore(isDev)
app.use(cors())
app.use(helmet())
app.use(morgan('dev'))
app.use(express.json())
app.use(express.urlencoded({ extended: true, limit: '50mb' }))
app.use('/health', (req, res) => {
return res.status(200).json({
app: pkg.name,
request_ip: req.ip,
uptime: process.uptime(),
hrtime: process.hrtime(),
})
})
console.log('☄', 'Base Route', '/')
app.use('/', routes)
app.use(notFoundHandler)
app.use(errorHandler)
app.listen(process.env.PORT, async () => {
console.log(`\nServer running on port ${process.env.PORT}`)
})
export { configs as configKeys }

View File

@ -0,0 +1,31 @@
import { Response } from 'express'
import { ClientError } from 'graphql-request'
import { CustomError } from './error'
import { ValidationError } from 'joi'
const customErrorHandler = async (res: Response, error: any) => {
if (error instanceof ValidationError) {
return res.status(400).json({
success: false,
message: 'Data validation failed',
details: error.details,
})
}
if (error instanceof CustomError) {
return res
.status(error.statusCode)
.send({ success: false, message: error.message, data: error.data })
}
if (error instanceof ClientError) {
const { errors = [] } = error.response
const [err] = errors
if (err?.message) {
return res
.status(422)
.send({ success: false, message: err.message })
}
}
res.status(500).send({ success: false, message: 'Internal ServerError.' })
}
export default customErrorHandler

View File

@ -0,0 +1,24 @@
export class CustomError extends Error {
public statusCode: number
public data: any
constructor(args: { message?: string; statusCode?: number; data?: any }) {
super(args.message)
this.statusCode = args.statusCode || 500
this.data = args.data
}
toString() {
return {
message: this.message,
statusCode: this.statusCode,
data: this.data,
}
}
}
export class NotFoundError extends CustomError {
constructor() {
super({ message: 'NOT_FOUND', statusCode: 404 })
}
}

View File

@ -0,0 +1,2 @@
export * from './middleware'
export * from './utilities'

View File

@ -0,0 +1,72 @@
import { NextFunction, Request, Response } from 'express'
import Joi from 'joi'
import { CustomError, NotFoundError } from './error'
import { pick } from './utilities'
export const errorHandler = async (
err: any,
_req: Request,
res: Response,
// eslint-disable-next-line
_next: NextFunction
) => {
if ('statusCode' in err) {
return res.status(err.statusCode).json({
message: err.message,
error: true,
data: null,
})
}
return res.status(500).json({
message: err.message,
error: true,
data: null,
error_stack:
process.env.NODE_ENV === 'production' ? undefined : err.stack,
})
}
export const notFoundHandler = async (
_req: Request,
_res: Response,
next: NextFunction
) => {
return next(new NotFoundError())
}
export const validate =
(schema: any) => (req: Request, _res: Response, next: NextFunction) => {
// Request body should be JSON, if present
if (Object.keys(req.body).length !== 0 && !req.is('application/json')) {
return next(new Error('Supports JSON request body only'))
}
// cherry-pick from the input schema ["params", "query", "body"] fields
const validSchema = pick(schema, ['params', 'query', 'body'])
// cherry-pick from the request object ["params", "query", "body"] fields
const object = pick(req, Object.keys(validSchema))
// Compile schema to Joi schema object and validate the request object
const { value, error } = Joi.compile(validSchema)
.prefs({ errors: { label: 'key' } })
.validate(object)
// If validation fails throw 400 Bad Request error
if (error) {
const errorMessage = error.details
.map((details) => details.message)
.join(', ')
return next(
new CustomError({
message: errorMessage,
statusCode: 400,
})
)
}
// Update validated fields in request with returned value
Object.assign(req, value)
return next()
}

View File

@ -0,0 +1,165 @@
import { PaginationType } from '../types'
export const makeResponse = (
data: any,
meta_data: any = null,
message = 'Success'
) => ({
message,
error: false,
meta_data,
data,
})
const joinPrefix = (...keys: string[]) => keys.join('_')
export const flattenObject = (obj: any, prefix = '') => {
let newObj: any = {}
for (const key in obj) {
const pfx = prefix ? joinPrefix(prefix, key) : key
if (obj[key] instanceof Object) {
newObj = { ...newObj, ...flattenObject(obj[key], pfx) }
} else {
newObj = { ...newObj, [pfx]: obj[key] }
}
}
return newObj
}
export const cleanObject = (obj: any) => {
const newObj: any = obj
for (const k in obj) {
if (
(!k || !obj[k] || typeof k === 'undefined') &&
typeof obj[k] !== 'boolean' &&
obj[k] !== 0
)
delete obj[k]
}
return newObj
}
//helpful for update apis, such that an existing field can be updated to null
export const cleanObjectKeepNull = (obj: any) => {
const newObj: any = obj
for (const k in obj) {
if (
(!k || !obj[k] || typeof k === 'undefined') &&
typeof obj[k] !== 'boolean' &&
obj[k] !== 0 &&
obj[k] !== null
)
delete obj[k]
}
return newObj
}
export const paginateRequest = (q: any): PaginationType => {
const filter_keys = Object.keys(q).filter((c) => c.startsWith('filter_'))
const filters = filter_keys.length
? filter_keys
.map((filter_key) => {
const filter_subset = filter_key
.replace('filter_', '')
.split('.')
let mode =
typeof q[filter_key] === 'number' ? '_eq' : '_iregex'
// check if the provided value if uuid - if so, we use the _eq operator to match
if (q[filter_key].includes('-')) {
mode = '_eq'
}
return parseFilter(filter_subset, q[filter_key], 0, mode)
})
.reduceRight((agg, cur) => {
const [cur_key] = Object.keys(cur)
if (cur_key in agg) {
if (Array.isArray(agg[cur_key])) {
agg[cur_key].push(cur)
} else {
cur[cur_key] = [cur[cur_key], agg[cur_key]]
}
return cur
}
return {
...agg,
...cur,
}
}, {})
: undefined
return {
page: parseInt(q.page) || 0,
limit: parseInt(q.limit || q.items) || 50,
sort_by: q.sort_by,
sort_order: q.sort_order || 'asc',
filters,
} as PaginationType
}
// parse the filter from the query string into the filter object with graphql where object format
export const parseFilter = (
filter: string[],
value: string,
index = 0,
filterMode = '_iregex'
) => {
let fx: any = { [filterMode]: value }
if (index < filter.length - 1) {
fx = parseFilter(filter, value, index + 1, filterMode)
}
const key = filter[index]
return { [key]: fx }
}
//returns input timestamp - input hours in datestring
export const subtractHours = (date: Date, hours: number) => {
date.setHours(date.getHours() - hours)
return date.toISOString()
}
export const capitalizeEachWord = (str: string) => {
return str
.split(' ')
.map((word) =>
!word.length
? ''
: word[0].toUpperCase() + word.slice(1).toLowerCase()
)
.join(' ')
}
//filters requested key-values from an object
export const pick = (object: any, keys: any) => {
return keys.reduce((obj: any, key: any) => {
// removed hasOwnProperty check because of typescript compiler error
if (object && key in object) {
obj[key] = object[key]
}
return obj
}, {})
}
/**
* Get the sort column from the parsed query params and validate them based on a provided list of options
* @param pg_sort_by Query parameter from which the sort column needs to be located.
* @param def Default value if the parse fails
* @param options List of valid columns that can be used as filters
*/
export const getSortColumn = (
pg_sort_by?: string,
def = 'id',
options: string[] = []
) => {
pg_sort_by ||= def
return options.includes(pg_sort_by) ? pg_sort_by : def
}
/**
* Check whether a string matches the uuid format for postgres or not.
*/
export const is_uuid = (value: string) => {
// storing as a separate regex object for future modifications and code readibility
const regex = /^()/
return regex.test(value)
}

48
packages/api/package.json Normal file
View File

@ -0,0 +1,48 @@
{
"name": "api",
"version": "1.0.0",
"description": "Rewrite for Bravo68web API",
"main": "index.ts",
"repository": "git@github.com:BRAVO68WEB/api-rewrite.git",
"author": "Jyotirmoy Bandyopadhyaya [Bravo68] <jbandyopadhayaya@gmail.com>",
"license": "MIT",
"private": true,
"dependencies": {
"@aws-sdk/client-s3": "^3.226.0",
"@types/express": "^4.17.14",
"axios": "^1.2.1",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"envfile": "^6.18.0",
"express": "^4.18.2",
"form-data": "^4.0.0",
"graphql": "^16.6.0",
"graphql-request": "^5.0.0",
"helmet": "^6.0.1",
"joi": "^17.7.0",
"morgan": "^1.10.0",
"multer": "^1.4.5-lts.1",
"multer-s3": "^3.0.1",
"napi-nanoid": "^0.0.4",
"node-cache": "^5.1.2",
"nodemailer": "^6.8.0",
"redis": "^4.5.1",
"typescript": "^4.9.3"
},
"scripts": {
"dev": "concurrently \"npm run dev:express\" \"npm run dev:hasura\"",
"dev:hasura": "cd hasura && hasura --skip-update-check --envfile ../.env console",
"dev:express": "cross-env NODE_ENV=development nodemon -r dotenv/config --watch \"*/**/*.ts\" --exec \"ts-node\" --files \"index.ts\" --signal SIGKILL",
"build": "tsc",
"start": "node dist/index.js"
},
"devDependencies": {
"@swc/core": "^1.3.23",
"@swc/wasm": "^1.3.23",
"@types/cors": "^2.8.13",
"@types/morgan": "^1.9.3",
"concurrently": "^7.6.0",
"cross-env": "^7.0.3",
"hasura-cli": "^2.15.1"
}
}

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World me!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World auth!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,45 @@
import path from 'path'
import { readdirSync } from 'fs'
import { Router } from 'express'
const router = Router()
const isCompiled = path.extname(__filename) === '.js'
const thisFileName = path.basename(__filename)
const loadRoutes = async (dirPath: string, prefix = '/') => {
readdirSync(dirPath, {
withFileTypes: true,
}).forEach(async (f) => {
if (f.isFile()) {
if (f.name == thisFileName) return
const isRouteMod = f.name.endsWith(
`.routes.${isCompiled ? 'js' : 'ts'}`
)
if (isRouteMod) {
const route = f.name.replace(
`.routes.${isCompiled ? 'js' : 'ts'}`,
''
)
const modRoute = path.join(prefix, route)
console.log('🛰️', 'Loaded', modRoute)
const mod = await import(path.join(baseDir, prefix + f.name))
router.use(modRoute, mod.default)
}
} else if (f.isDirectory()) {
await loadRoutes(
path.resolve(dirPath, f.name),
prefix + f.name + '/'
)
}
})
}
let baseDir = path.dirname(__filename)
baseDir = path.resolve(baseDir)
loadRoutes(baseDir)
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

@ -0,0 +1,18 @@
import { Router } from 'express'
import { makeResponse } from '../../libs'
const router = Router()
router.get('/', (req, res) => {
res.send(makeResponse({ message: 'Hello World!' }))
})
router.all('/err', async (req, res, next) => {
try {
throw new Error('This is an error')
} catch (err) {
next(err)
}
})
export default router

View File

View File

View File

@ -0,0 +1,48 @@
import axios from '../helpers/axios_client'
export const getGithubUser = async (username: string) => {
const { data } = await axios.get(`https://api.github.com/users/${username}`)
return data
}
export const getGithubUserRepos = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/repos`
)
return data
}
export const getGithubUserGists = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/gists`
)
return data
}
export const getGithubUserFollowers = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/followers`
)
return data
}
export const getGithubUserFollowing = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/following`
)
return data
}
export const getGithubUserStarred = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/starred`
)
return data
}
export const getGithubUserEvents = async (username: string) => {
const { data } = await axios.get(
`https://api.github.com/users/${username}/events`
)
return data
}

View File

View File

View File

View File

View File

View File

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"lib": ["es2018", "es5", "dom"],
"typeRoots": ["node_modules/@types", "./types"],
"resolveJsonModule": true,
"esModuleInterop": true,
"target": "es6",
"strict": true,
"module": "commonjs",
"moduleResolution": "node",
"outDir": "./build",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"declaration": true,
"sourceMap": false,
"noImplicitAny": false
},
"exclude": ["./node_modules/**/*", "./build/**/*"],
"include": ["./**/*.ts", "./**/*.tsx", "./**/*.json", "./**/*.js"],
"ts-node": {
"swc": true
},
"files": ["types/index.d.ts"]
}

7
packages/api/types/index.d.ts vendored Normal file
View File

@ -0,0 +1,7 @@
export interface PaginationType {
page: number
limit: number
sort_order?: 'asc' | 'desc'
sort_by?: string
filters?: { [k: string]: any }
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 892 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 988 KiB

View File

@ -1,6 +1,7 @@
{
"name": "b68-cli",
"version": "1.0.2",
"name": "cli",
"private": true,
"version": "2.0.0",
"description": "A Command Line Application to interact with B68WEB API",
"main": "bin/app.js",
"scripts": {
@ -11,10 +12,6 @@
"bin": {
"b68": "./bin/app.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/BRAVO68WEB/b68-cli.git"
},
"keywords": [
"b68",
"b68cli",
@ -29,10 +26,6 @@
"author": "BRAVO68WEB",
"license": "ISC",
"type": "module",
"bugs": {
"url": "https://github.com/BRAVO68WEB/b68-cli/issues"
},
"homepage": "https://github.com/BRAVO68WEB/b68-cli#readme",
"dependencies": {
"axios": "^0.26.1",
"chalk": "^5.0.1",

View File

@ -1,365 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
ajv-formats@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520"
integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==
dependencies:
ajv "^8.0.0"
ajv@^8.0.0, ajv@^8.6.3:
version "8.11.0"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f"
integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==
dependencies:
fast-deep-equal "^3.1.1"
json-schema-traverse "^1.0.0"
require-from-string "^2.0.2"
uri-js "^4.2.2"
atomically@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/atomically/-/atomically-1.7.0.tgz#c07a0458432ea6dbc9a3506fffa424b48bccaafe"
integrity sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w==
axios@^0.26.1:
version "0.26.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
dependencies:
follow-redirects "^1.14.8"
chalk@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.0.1.tgz#ca57d71e82bb534a296df63bbacc4a1c22b2a4b6"
integrity sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==
commander@^9.2.0:
version "9.2.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-9.2.0.tgz#6e21014b2ed90d8b7c9647230d8b7a94a4a419a9"
integrity sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w==
conf@^10.1.2:
version "10.1.2"
resolved "https://registry.yarnpkg.com/conf/-/conf-10.1.2.tgz#50132158f388756fa9dea3048f6b47935315c14e"
integrity sha512-o9Fv1Mv+6A0JpoayQ8JleNp3hhkbOJP/Re/Q+QqxMPHPkABVsRjQGWZn9A5GcqLiTNC6d89p2PB5ZhHVDSMwyg==
dependencies:
ajv "^8.6.3"
ajv-formats "^2.1.1"
atomically "^1.7.0"
debounce-fn "^4.0.0"
dot-prop "^6.0.1"
env-paths "^2.2.1"
json-schema-typed "^7.0.3"
onetime "^5.1.2"
pkg-up "^3.1.0"
semver "^7.3.5"
configstore@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/configstore/-/configstore-6.0.0.tgz#49eca2ebc80983f77e09394a1a56e0aca8235566"
integrity sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==
dependencies:
dot-prop "^6.0.1"
graceful-fs "^4.2.6"
unique-string "^3.0.0"
write-file-atomic "^3.0.3"
xdg-basedir "^5.0.1"
crypto-random-string@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-4.0.0.tgz#5a3cc53d7dd86183df5da0312816ceeeb5bb1fc2"
integrity sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==
dependencies:
type-fest "^1.0.1"
data-uri-to-buffer@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz#b5db46aea50f6176428ac05b73be39a57701a64b"
integrity sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==
debounce-fn@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/debounce-fn/-/debounce-fn-4.0.0.tgz#ed76d206d8a50e60de0dd66d494d82835ffe61c7"
integrity sha512-8pYCQiL9Xdcg0UPSD3d+0KMlOjp+KGU5EPwYddgzQ7DATsg4fuUDjQtsYLmWjnk2obnNHgV3vE2Y4jejSOJVBQ==
dependencies:
mimic-fn "^3.0.0"
decode-uri-component@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545"
integrity sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og==
dot-prop@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-6.0.1.tgz#fc26b3cf142b9e59b74dbd39ed66ce620c681083"
integrity sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==
dependencies:
is-obj "^2.0.0"
env-paths@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2"
integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==
fast-deep-equal@^3.1.1:
version "3.1.3"
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
fetch-blob@^3.1.2, fetch-blob@^3.1.4:
version "3.1.5"
resolved "https://registry.yarnpkg.com/fetch-blob/-/fetch-blob-3.1.5.tgz#0077bf5f3fcdbd9d75a0b5362f77dbb743489863"
integrity sha512-N64ZpKqoLejlrwkIAnb9iLSA3Vx/kjgzpcDhygcqJ2KKjky8nCgUQ+dzXtbrLaWZGZNmNfQTsiQ0weZ1svglHg==
dependencies:
node-domexception "^1.0.0"
web-streams-polyfill "^3.0.3"
filter-obj@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b"
integrity sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==
find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
dependencies:
locate-path "^3.0.0"
follow-redirects@^1.14.8:
version "1.14.9"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7"
integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==
formdata-polyfill@^4.0.10:
version "4.0.10"
resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz#24807c31c9d402e002ab3d8c720144ceb8848423"
integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==
dependencies:
fetch-blob "^3.1.2"
graceful-fs@^4.2.6:
version "4.2.10"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
imurmurhash@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
is-obj@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
is-typedarray@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==
json-schema-traverse@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
json-schema-typed@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/json-schema-typed/-/json-schema-typed-7.0.3.tgz#23ff481b8b4eebcd2ca123b4fa0409e66469a2d9"
integrity sha512-7DE8mpG+/fVw+dTpjbxnx47TaMnDfOI1jwft9g1VybltZCduyRQPJPvc+zzKY9WPHxhPWczyFuYa6I8Mw4iU5A==
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
dependencies:
p-locate "^3.0.0"
path-exists "^3.0.0"
lru-cache@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
dependencies:
yallist "^4.0.0"
mimic-fn@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
mimic-fn@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-3.1.0.tgz#65755145bbf3e36954b949c16450427451d5ca74"
integrity sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==
moment@^2.29.3:
version "2.29.3"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3"
integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==
node-domexception@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.3.tgz#a03c9cc2044d21d1a021566bd52f080f333719a6"
integrity sha512-AXP18u4pidSZ1xYXRDPY/8jdv3RAozIt/WLNR/MBGZAz+xjtlr90RvCnsvHQRiXyWliZF/CpytExp32UU67/SA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"
formdata-polyfill "^4.0.10"
onetime@^5.1.2:
version "5.1.2"
resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
dependencies:
mimic-fn "^2.1.0"
p-limit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
dependencies:
p-try "^2.0.0"
p-locate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
dependencies:
p-limit "^2.0.0"
p-try@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
pkg-up@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5"
integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==
dependencies:
find-up "^3.0.0"
punycode@^2.1.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
query-string@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/query-string/-/query-string-7.1.1.tgz#754620669db978625a90f635f12617c271a088e1"
integrity sha512-MplouLRDHBZSG9z7fpuAAcI7aAYjDLhtsiVZsevsfaHWDS2IDdORKbSd1kWUA+V4zyva/HZoSfpwnYMMQDhb0w==
dependencies:
decode-uri-component "^0.2.0"
filter-obj "^1.1.0"
split-on-first "^1.0.0"
strict-uri-encode "^2.0.0"
querystringify@^2.1.1:
version "2.2.0"
resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6"
integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==
require-from-string@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909"
integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
requires-port@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==
semver@^7.3.5:
version "7.3.7"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f"
integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==
dependencies:
lru-cache "^6.0.0"
signal-exit@^3.0.2:
version "3.0.7"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
split-on-first@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f"
integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==
strict-uri-encode@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546"
integrity sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==
type-fest@^1.0.1:
version "1.4.0"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1"
integrity sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==
typedarray-to-buffer@^3.1.5:
version "3.1.5"
resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
dependencies:
is-typedarray "^1.0.0"
unique-string@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-3.0.0.tgz#84a1c377aff5fd7a8bc6b55d8244b2bd90d75b9a"
integrity sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==
dependencies:
crypto-random-string "^4.0.0"
uri-js@^4.2.2:
version "4.4.1"
resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
dependencies:
punycode "^2.1.0"
url-parse@^1.5.10:
version "1.5.10"
resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1"
integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==
dependencies:
querystringify "^2.1.1"
requires-port "^1.0.0"
web-streams-polyfill@^3.0.3:
version "3.2.1"
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6"
integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==
write-file-atomic@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
dependencies:
imurmurhash "^0.1.4"
is-typedarray "^1.0.0"
signal-exit "^3.0.2"
typedarray-to-buffer "^3.1.5"
xdg-basedir@^5.0.1:
version "5.1.0"
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9"
integrity sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==
yallist@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==

6262
yarn.lock

File diff suppressed because it is too large Load Diff