Bsky short link service (#4542)
* bskylink: scaffold service w/ initial config and schema * bskylink: implement link creation and redirects * bskylink: tidy * bskylink: tests * bskylink: tidy, add error handler * bskylink: add dockerfile * bskylink: add build * bskylink: fix some express plumbing * bskyweb: proxy fallthrough routes to link service redirects * bskyweb: build w/ link proxy * Add AASA to bskylink (#4588) --------- Co-authored-by: Hailey <me@haileyok.com>zio/stable
parent
ba21fddd78
commit
55812b0394
|
@ -4,6 +4,7 @@ on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
- divy/bskylink
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }}
|
REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }}
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
name: build-and-push-link-aws
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- divy/bskylink
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }}
|
||||||
|
USERNAME: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_USERNAME }}
|
||||||
|
PASSWORD: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_PASSWORD }}
|
||||||
|
IMAGE_NAME: bskylink
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
link-container-aws:
|
||||||
|
if: github.repository == 'bluesky-social/social-app'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Docker buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
|
- name: Log into registry ${{ env.REGISTRY }}
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ env.USERNAME}}
|
||||||
|
password: ${{ env.PASSWORD }}
|
||||||
|
|
||||||
|
- name: Extract Docker metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
id: build-and-push
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
file: ./Dockerfile.bskylink
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
|
@ -0,0 +1,41 @@
|
||||||
|
FROM node:20.11-alpine3.18 as build
|
||||||
|
|
||||||
|
# Move files into the image and install
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY ./bskylink/package.json ./
|
||||||
|
COPY ./bskylink/yarn.lock ./
|
||||||
|
RUN yarn install --frozen-lockfile
|
||||||
|
|
||||||
|
COPY ./bskylink ./
|
||||||
|
|
||||||
|
# build then prune dev deps
|
||||||
|
RUN yarn build
|
||||||
|
RUN yarn install --production --ignore-scripts --prefer-offline
|
||||||
|
|
||||||
|
# Uses assets from build stage to reduce build size
|
||||||
|
FROM node:20.11-alpine3.18
|
||||||
|
|
||||||
|
RUN apk add --update dumb-init
|
||||||
|
|
||||||
|
# Avoid zombie processes, handle signal forwarding
|
||||||
|
ENTRYPOINT ["dumb-init", "--"]
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=build /app /app
|
||||||
|
RUN mkdir /app/data && chown node /app/data
|
||||||
|
|
||||||
|
VOLUME /app/data
|
||||||
|
EXPOSE 3000
|
||||||
|
ENV LINK_PORT=3000
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
# potential perf issues w/ io_uring on this version of node
|
||||||
|
ENV UV_USE_IO_URING=0
|
||||||
|
|
||||||
|
# https://github.com/nodejs/docker-node/blob/master/docs/BestPractices.md#non-root-user
|
||||||
|
USER node
|
||||||
|
CMD ["node", "--heapsnapshot-signal=SIGUSR2", "--enable-source-maps", "dist/bin.js"]
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.source=https://github.com/bluesky-social/social-app
|
||||||
|
LABEL org.opencontainers.image.description="Bsky Link Service"
|
||||||
|
LABEL org.opencontainers.image.licenses=UNLICENSED
|
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"name": "bskylink",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"main": "index.ts",
|
||||||
|
"scripts": {
|
||||||
|
"test": "./tests/infra/with-test-db.sh node --loader ts-node/esm --test ./tests/index.ts",
|
||||||
|
"build": "tsc"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@atproto/common": "^0.4.0",
|
||||||
|
"body-parser": "^1.20.2",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"express": "^4.19.2",
|
||||||
|
"http-terminator": "^3.2.0",
|
||||||
|
"kysely": "^0.27.3",
|
||||||
|
"pg": "^8.12.0",
|
||||||
|
"pino": "^9.2.0",
|
||||||
|
"uint8arrays": "^5.1.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/cors": "^2.8.17",
|
||||||
|
"@types/pg": "^8.11.6",
|
||||||
|
"typescript": "^5.4.5"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
import {Database, envToCfg, httpLogger, LinkService, readEnv} from './index.js'
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const env = readEnv()
|
||||||
|
const cfg = envToCfg(env)
|
||||||
|
if (cfg.db.migrationUrl) {
|
||||||
|
const migrateDb = Database.postgres({
|
||||||
|
url: cfg.db.migrationUrl,
|
||||||
|
schema: cfg.db.schema,
|
||||||
|
})
|
||||||
|
await migrateDb.migrateToLatestOrThrow()
|
||||||
|
await migrateDb.close()
|
||||||
|
}
|
||||||
|
const link = await LinkService.create(cfg)
|
||||||
|
await link.start()
|
||||||
|
httpLogger.info('link service is running')
|
||||||
|
process.on('SIGTERM', async () => {
|
||||||
|
httpLogger.info('link service is stopping')
|
||||||
|
await link.destroy()
|
||||||
|
httpLogger.info('link service is stopped')
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
|
@ -0,0 +1,82 @@
|
||||||
|
import {envInt, envList, envStr} from '@atproto/common'
|
||||||
|
|
||||||
|
export type Config = {
|
||||||
|
service: ServiceConfig
|
||||||
|
db: DbConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ServiceConfig = {
|
||||||
|
port: number
|
||||||
|
version?: string
|
||||||
|
hostnames: string[]
|
||||||
|
appHostname: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DbConfig = {
|
||||||
|
url: string
|
||||||
|
migrationUrl?: string
|
||||||
|
pool: DbPoolConfig
|
||||||
|
schema?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DbPoolConfig = {
|
||||||
|
size: number
|
||||||
|
maxUses: number
|
||||||
|
idleTimeoutMs: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Environment = {
|
||||||
|
port?: number
|
||||||
|
version?: string
|
||||||
|
hostnames: string[]
|
||||||
|
appHostname?: string
|
||||||
|
dbPostgresUrl?: string
|
||||||
|
dbPostgresMigrationUrl?: string
|
||||||
|
dbPostgresSchema?: string
|
||||||
|
dbPostgresPoolSize?: number
|
||||||
|
dbPostgresPoolMaxUses?: number
|
||||||
|
dbPostgresPoolIdleTimeoutMs?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export const readEnv = (): Environment => {
|
||||||
|
return {
|
||||||
|
port: envInt('LINK_PORT'),
|
||||||
|
version: envStr('LINK_VERSION'),
|
||||||
|
hostnames: envList('LINK_HOSTNAMES'),
|
||||||
|
appHostname: envStr('LINK_APP_HOSTNAME'),
|
||||||
|
dbPostgresUrl: envStr('LINK_DB_POSTGRES_URL'),
|
||||||
|
dbPostgresMigrationUrl: envStr('LINK_DB_POSTGRES_MIGRATION_URL'),
|
||||||
|
dbPostgresSchema: envStr('LINK_DB_POSTGRES_SCHEMA'),
|
||||||
|
dbPostgresPoolSize: envInt('LINK_DB_POSTGRES_POOL_SIZE'),
|
||||||
|
dbPostgresPoolMaxUses: envInt('LINK_DB_POSTGRES_POOL_MAX_USES'),
|
||||||
|
dbPostgresPoolIdleTimeoutMs: envInt(
|
||||||
|
'LINK_DB_POSTGRES_POOL_IDLE_TIMEOUT_MS',
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const envToCfg = (env: Environment): Config => {
|
||||||
|
const serviceCfg: ServiceConfig = {
|
||||||
|
port: env.port ?? 3000,
|
||||||
|
version: env.version,
|
||||||
|
hostnames: env.hostnames,
|
||||||
|
appHostname: env.appHostname || 'bsky.app',
|
||||||
|
}
|
||||||
|
if (!env.dbPostgresUrl) {
|
||||||
|
throw new Error('Must configure postgres url (LINK_DB_POSTGRES_URL)')
|
||||||
|
}
|
||||||
|
const dbCfg: DbConfig = {
|
||||||
|
url: env.dbPostgresUrl,
|
||||||
|
migrationUrl: env.dbPostgresMigrationUrl,
|
||||||
|
schema: env.dbPostgresSchema,
|
||||||
|
pool: {
|
||||||
|
idleTimeoutMs: env.dbPostgresPoolIdleTimeoutMs ?? 10000,
|
||||||
|
maxUses: env.dbPostgresPoolMaxUses ?? Infinity,
|
||||||
|
size: env.dbPostgresPoolSize ?? 10,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
service: serviceCfg,
|
||||||
|
db: dbCfg,
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
import {Config} from './config.js'
|
||||||
|
import Database from './db/index.js'
|
||||||
|
|
||||||
|
export type AppContextOptions = {
|
||||||
|
cfg: Config
|
||||||
|
db: Database
|
||||||
|
}
|
||||||
|
|
||||||
|
export class AppContext {
|
||||||
|
cfg: Config
|
||||||
|
db: Database
|
||||||
|
abortController = new AbortController()
|
||||||
|
|
||||||
|
constructor(private opts: AppContextOptions) {
|
||||||
|
this.cfg = this.opts.cfg
|
||||||
|
this.db = this.opts.db
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromConfig(cfg: Config, overrides?: Partial<AppContextOptions>) {
|
||||||
|
const db = Database.postgres({
|
||||||
|
url: cfg.db.url,
|
||||||
|
schema: cfg.db.schema,
|
||||||
|
poolSize: cfg.db.pool.size,
|
||||||
|
poolMaxUses: cfg.db.pool.maxUses,
|
||||||
|
poolIdleTimeoutMs: cfg.db.pool.idleTimeoutMs,
|
||||||
|
})
|
||||||
|
return new AppContext({
|
||||||
|
cfg,
|
||||||
|
db,
|
||||||
|
...overrides,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,174 @@
|
||||||
|
import assert from 'assert'
|
||||||
|
import {
|
||||||
|
Kysely,
|
||||||
|
KyselyPlugin,
|
||||||
|
Migrator,
|
||||||
|
PluginTransformQueryArgs,
|
||||||
|
PluginTransformResultArgs,
|
||||||
|
PostgresDialect,
|
||||||
|
QueryResult,
|
||||||
|
RootOperationNode,
|
||||||
|
UnknownRow,
|
||||||
|
} from 'kysely'
|
||||||
|
import {default as Pg} from 'pg'
|
||||||
|
|
||||||
|
import {dbLogger as log} from '../logger.js'
|
||||||
|
import {default as migrations} from './migrations/index.js'
|
||||||
|
import {DbMigrationProvider} from './migrations/provider.js'
|
||||||
|
import {DbSchema} from './schema.js'
|
||||||
|
|
||||||
|
export class Database {
|
||||||
|
migrator: Migrator
|
||||||
|
destroyed = false
|
||||||
|
|
||||||
|
constructor(public db: Kysely<DbSchema>, public cfg: PgConfig) {
|
||||||
|
this.migrator = new Migrator({
|
||||||
|
db,
|
||||||
|
migrationTableSchema: cfg.schema,
|
||||||
|
provider: new DbMigrationProvider(migrations),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
static postgres(opts: PgOptions): Database {
|
||||||
|
const {schema, url, txLockNonce} = opts
|
||||||
|
const pool =
|
||||||
|
opts.pool ??
|
||||||
|
new Pg.Pool({
|
||||||
|
connectionString: url,
|
||||||
|
max: opts.poolSize,
|
||||||
|
maxUses: opts.poolMaxUses,
|
||||||
|
idleTimeoutMillis: opts.poolIdleTimeoutMs,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Select count(*) and other pg bigints as js integer
|
||||||
|
Pg.types.setTypeParser(Pg.types.builtins.INT8, n => parseInt(n, 10))
|
||||||
|
|
||||||
|
// Setup schema usage, primarily for test parallelism (each test suite runs in its own pg schema)
|
||||||
|
if (schema && !/^[a-z_]+$/i.test(schema)) {
|
||||||
|
throw new Error(`Postgres schema must only contain [A-Za-z_]: ${schema}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
pool.on('error', onPoolError)
|
||||||
|
|
||||||
|
const db = new Kysely<DbSchema>({
|
||||||
|
dialect: new PostgresDialect({pool}),
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Database(db, {
|
||||||
|
pool,
|
||||||
|
schema,
|
||||||
|
url,
|
||||||
|
txLockNonce,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async transaction<T>(fn: (db: Database) => Promise<T>): Promise<T> {
|
||||||
|
const leakyTxPlugin = new LeakyTxPlugin()
|
||||||
|
return this.db
|
||||||
|
.withPlugin(leakyTxPlugin)
|
||||||
|
.transaction()
|
||||||
|
.execute(txn => {
|
||||||
|
const dbTxn = new Database(txn, this.cfg)
|
||||||
|
return fn(dbTxn)
|
||||||
|
.catch(async err => {
|
||||||
|
leakyTxPlugin.endTx()
|
||||||
|
// ensure that all in-flight queries are flushed & the connection is open
|
||||||
|
await dbTxn.db.getExecutor().provideConnection(async () => {})
|
||||||
|
throw err
|
||||||
|
})
|
||||||
|
.finally(() => leakyTxPlugin.endTx())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
get schema(): string | undefined {
|
||||||
|
return this.cfg.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
get isTransaction() {
|
||||||
|
return this.db.isTransaction
|
||||||
|
}
|
||||||
|
|
||||||
|
assertTransaction() {
|
||||||
|
assert(this.isTransaction, 'Transaction required')
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNotTransaction() {
|
||||||
|
assert(!this.isTransaction, 'Cannot be in a transaction')
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
if (this.destroyed) return
|
||||||
|
await this.db.destroy()
|
||||||
|
this.destroyed = true
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrateToOrThrow(migration: string) {
|
||||||
|
if (this.schema) {
|
||||||
|
await this.db.schema.createSchema(this.schema).ifNotExists().execute()
|
||||||
|
}
|
||||||
|
const {error, results} = await this.migrator.migrateTo(migration)
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
if (!results) {
|
||||||
|
throw new Error('An unknown failure occurred while migrating')
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrateToLatestOrThrow() {
|
||||||
|
if (this.schema) {
|
||||||
|
await this.db.schema.createSchema(this.schema).ifNotExists().execute()
|
||||||
|
}
|
||||||
|
const {error, results} = await this.migrator.migrateToLatest()
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
if (!results) {
|
||||||
|
throw new Error('An unknown failure occurred while migrating')
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Database
|
||||||
|
|
||||||
|
export type PgConfig = {
|
||||||
|
pool: Pg.Pool
|
||||||
|
url: string
|
||||||
|
schema?: string
|
||||||
|
txLockNonce?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type PgOptions = {
|
||||||
|
url: string
|
||||||
|
pool?: Pg.Pool
|
||||||
|
schema?: string
|
||||||
|
poolSize?: number
|
||||||
|
poolMaxUses?: number
|
||||||
|
poolIdleTimeoutMs?: number
|
||||||
|
txLockNonce?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
class LeakyTxPlugin implements KyselyPlugin {
|
||||||
|
private txOver = false
|
||||||
|
|
||||||
|
endTx() {
|
||||||
|
this.txOver = true
|
||||||
|
}
|
||||||
|
|
||||||
|
transformQuery(args: PluginTransformQueryArgs): RootOperationNode {
|
||||||
|
if (this.txOver) {
|
||||||
|
throw new Error('tx already failed')
|
||||||
|
}
|
||||||
|
return args.node
|
||||||
|
}
|
||||||
|
|
||||||
|
async transformResult(
|
||||||
|
args: PluginTransformResultArgs,
|
||||||
|
): Promise<QueryResult<UnknownRow>> {
|
||||||
|
return args.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onPoolError = (err: Error) => log.error({err}, 'db pool error')
|
|
@ -0,0 +1,15 @@
|
||||||
|
import {Kysely} from 'kysely'
|
||||||
|
|
||||||
|
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||||
|
await db.schema
|
||||||
|
.createTable('link')
|
||||||
|
.addColumn('id', 'varchar', col => col.primaryKey())
|
||||||
|
.addColumn('type', 'smallint', col => col.notNull()) // integer enum: 1->starterpack
|
||||||
|
.addColumn('path', 'varchar', col => col.notNull())
|
||||||
|
.addUniqueConstraint('link_path_unique', ['path'])
|
||||||
|
.execute()
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||||
|
await db.schema.dropTable('link').execute()
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
import * as init from './001-init.js'
|
||||||
|
|
||||||
|
export default {
|
||||||
|
'001': init,
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
import {Migration, MigrationProvider} from 'kysely'
|
||||||
|
|
||||||
|
export class DbMigrationProvider implements MigrationProvider {
|
||||||
|
constructor(private migrations: Record<string, Migration>) {}
|
||||||
|
async getMigrations(): Promise<Record<string, Migration>> {
|
||||||
|
return this.migrations
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
import {Selectable} from 'kysely'
|
||||||
|
|
||||||
|
export type DbSchema = {
|
||||||
|
link: Link
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Link {
|
||||||
|
id: string
|
||||||
|
type: LinkType
|
||||||
|
path: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum LinkType {
|
||||||
|
StarterPack = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LinkEntry = Selectable<Link>
|
|
@ -0,0 +1,45 @@
|
||||||
|
import events from 'node:events'
|
||||||
|
import http from 'node:http'
|
||||||
|
|
||||||
|
import cors from 'cors'
|
||||||
|
import express from 'express'
|
||||||
|
import {createHttpTerminator, HttpTerminator} from 'http-terminator'
|
||||||
|
|
||||||
|
import {Config} from './config.js'
|
||||||
|
import {AppContext} from './context.js'
|
||||||
|
import {default as routes, errorHandler} from './routes/index.js'
|
||||||
|
|
||||||
|
export * from './config.js'
|
||||||
|
export * from './db/index.js'
|
||||||
|
export * from './logger.js'
|
||||||
|
|
||||||
|
export class LinkService {
|
||||||
|
public server?: http.Server
|
||||||
|
private terminator?: HttpTerminator
|
||||||
|
|
||||||
|
constructor(public app: express.Application, public ctx: AppContext) {}
|
||||||
|
|
||||||
|
static async create(cfg: Config): Promise<LinkService> {
|
||||||
|
let app = express()
|
||||||
|
app.use(cors())
|
||||||
|
|
||||||
|
const ctx = await AppContext.fromConfig(cfg)
|
||||||
|
app = routes(ctx, app)
|
||||||
|
app.use(errorHandler)
|
||||||
|
|
||||||
|
return new LinkService(app, ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
async start() {
|
||||||
|
this.server = this.app.listen(this.ctx.cfg.service.port)
|
||||||
|
this.server.keepAliveTimeout = 90000
|
||||||
|
this.terminator = createHttpTerminator({server: this.server})
|
||||||
|
await events.once(this.server, 'listening')
|
||||||
|
}
|
||||||
|
|
||||||
|
async destroy() {
|
||||||
|
this.ctx.abortController.abort()
|
||||||
|
await this.terminator?.terminate()
|
||||||
|
await this.ctx.db.close()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
import {subsystemLogger} from '@atproto/common'
|
||||||
|
|
||||||
|
export const httpLogger = subsystemLogger('bskylink')
|
||||||
|
export const dbLogger = subsystemLogger('bskylink:db')
|
|
@ -0,0 +1,111 @@
|
||||||
|
import assert from 'node:assert'
|
||||||
|
|
||||||
|
import bodyParser from 'body-parser'
|
||||||
|
import {Express, Request} from 'express'
|
||||||
|
|
||||||
|
import {AppContext} from '../context.js'
|
||||||
|
import {LinkType} from '../db/schema.js'
|
||||||
|
import {randomId} from '../util.js'
|
||||||
|
import {handler} from './util.js'
|
||||||
|
|
||||||
|
export default function (ctx: AppContext, app: Express) {
|
||||||
|
return app.post(
|
||||||
|
'/link',
|
||||||
|
bodyParser.json(),
|
||||||
|
handler(async (req, res) => {
|
||||||
|
let path: string
|
||||||
|
if (typeof req.body?.path === 'string') {
|
||||||
|
path = req.body.path
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'InvalidPath',
|
||||||
|
message: '"path" parameter is missing or not a string',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (!path.startsWith('/')) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'InvalidPath',
|
||||||
|
message:
|
||||||
|
'"path" parameter must be formatted as a path, starting with a "/"',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const parts = getPathParts(path)
|
||||||
|
if (parts.length === 3 && parts[0] === 'start') {
|
||||||
|
// link pattern: /start/{did}/{rkey}
|
||||||
|
if (!parts[1].startsWith('did:')) {
|
||||||
|
// enforce strong links
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'InvalidPath',
|
||||||
|
message:
|
||||||
|
'"path" parameter for starter pack must contain the actor\'s DID',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const id = await ensureLink(ctx, LinkType.StarterPack, parts)
|
||||||
|
return res.json({url: getUrl(ctx, req, id)})
|
||||||
|
}
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'InvalidPath',
|
||||||
|
message: '"path" parameter does not have a known format',
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ensureLink = async (ctx: AppContext, type: LinkType, parts: string[]) => {
|
||||||
|
const normalizedPath = normalizedPathFromParts(parts)
|
||||||
|
const created = await ctx.db.db
|
||||||
|
.insertInto('link')
|
||||||
|
.values({
|
||||||
|
id: randomId(),
|
||||||
|
type,
|
||||||
|
path: normalizedPath,
|
||||||
|
})
|
||||||
|
.onConflict(oc => oc.column('path').doNothing())
|
||||||
|
.returningAll()
|
||||||
|
.executeTakeFirst()
|
||||||
|
if (created) {
|
||||||
|
return created.id
|
||||||
|
}
|
||||||
|
const found = await ctx.db.db
|
||||||
|
.selectFrom('link')
|
||||||
|
.selectAll()
|
||||||
|
.where('path', '=', normalizedPath)
|
||||||
|
.executeTakeFirstOrThrow()
|
||||||
|
return found.id
|
||||||
|
}
|
||||||
|
|
||||||
|
const getUrl = (ctx: AppContext, req: Request, id: string) => {
|
||||||
|
if (!ctx.cfg.service.hostnames.length) {
|
||||||
|
assert(req.headers.host, 'request must be made with host header')
|
||||||
|
const baseUrl =
|
||||||
|
req.protocol === 'http' && req.headers.host.startsWith('localhost:')
|
||||||
|
? `http://${req.headers.host}`
|
||||||
|
: `https://${req.headers.host}`
|
||||||
|
return `${baseUrl}/${id}`
|
||||||
|
}
|
||||||
|
const baseUrl = ctx.cfg.service.hostnames.includes(req.headers.host)
|
||||||
|
? `https://${req.headers.host}`
|
||||||
|
: `https://${ctx.cfg.service.hostnames[0]}`
|
||||||
|
return `${baseUrl}/${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedPathFromParts = (parts: string[]): string => {
|
||||||
|
return (
|
||||||
|
'/' +
|
||||||
|
parts
|
||||||
|
.map(encodeURIComponent)
|
||||||
|
.map(part => part.replaceAll('%3A', ':')) // preserve colons
|
||||||
|
.join('/')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const getPathParts = (path: string): string[] => {
|
||||||
|
if (path === '/') return []
|
||||||
|
if (path.endsWith('/')) {
|
||||||
|
path = path.slice(0, -1) // ignore trailing slash
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
.slice(1) // remove leading slash
|
||||||
|
.split('/')
|
||||||
|
.map(decodeURIComponent)
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import {Express} from 'express'
|
||||||
|
import {sql} from 'kysely'
|
||||||
|
|
||||||
|
import {AppContext} from '../context.js'
|
||||||
|
import {handler} from './util.js'
|
||||||
|
|
||||||
|
export default function (ctx: AppContext, app: Express) {
|
||||||
|
return app.get(
|
||||||
|
'/_health',
|
||||||
|
handler(async (_req, res) => {
|
||||||
|
const {version} = ctx.cfg.service
|
||||||
|
try {
|
||||||
|
await sql`select 1`.execute(ctx.db.db)
|
||||||
|
return res.send({version})
|
||||||
|
} catch (err) {
|
||||||
|
return res.status(503).send({version, error: 'Service Unavailable'})
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
import {Express} from 'express'
|
||||||
|
|
||||||
|
import {AppContext} from '../context.js'
|
||||||
|
import {default as create} from './create.js'
|
||||||
|
import {default as health} from './health.js'
|
||||||
|
import {default as redirect} from './redirect.js'
|
||||||
|
import {default as siteAssociation} from './siteAssociation.js'
|
||||||
|
|
||||||
|
export * from './util.js'
|
||||||
|
|
||||||
|
export default function (ctx: AppContext, app: Express) {
|
||||||
|
app = health(ctx, app) // GET /_health
|
||||||
|
app = siteAssociation(ctx, app) // GET /.well-known/apple-app-site-association
|
||||||
|
app = create(ctx, app) // POST /link
|
||||||
|
app = redirect(ctx, app) // GET /:linkId (should go last due to permissive matching)
|
||||||
|
return app
|
||||||
|
}
|
|
@ -0,0 +1,40 @@
|
||||||
|
import assert from 'node:assert'
|
||||||
|
|
||||||
|
import {DAY, SECOND} from '@atproto/common'
|
||||||
|
import {Express} from 'express'
|
||||||
|
|
||||||
|
import {AppContext} from '../context.js'
|
||||||
|
import {handler} from './util.js'
|
||||||
|
|
||||||
|
export default function (ctx: AppContext, app: Express) {
|
||||||
|
return app.get(
|
||||||
|
'/:linkId',
|
||||||
|
handler(async (req, res) => {
|
||||||
|
const linkId = req.params.linkId
|
||||||
|
assert(
|
||||||
|
typeof linkId === 'string',
|
||||||
|
'express guarantees id parameter is a string',
|
||||||
|
)
|
||||||
|
const found = await ctx.db.db
|
||||||
|
.selectFrom('link')
|
||||||
|
.selectAll()
|
||||||
|
.where('id', '=', linkId)
|
||||||
|
.executeTakeFirst()
|
||||||
|
if (!found) {
|
||||||
|
// potentially broken or mistyped link— send user to the app
|
||||||
|
res.setHeader('Location', `https://${ctx.cfg.service.appHostname}`)
|
||||||
|
res.setHeader('Cache-Control', 'no-store')
|
||||||
|
return res.status(302).end()
|
||||||
|
}
|
||||||
|
// build url from original url in order to preserve query params
|
||||||
|
const url = new URL(
|
||||||
|
req.originalUrl,
|
||||||
|
`https://${ctx.cfg.service.appHostname}`,
|
||||||
|
)
|
||||||
|
url.pathname = found.path
|
||||||
|
res.setHeader('Location', url.href)
|
||||||
|
res.setHeader('Cache-Control', `max-age=${(7 * DAY) / SECOND}`)
|
||||||
|
return res.status(301).end()
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
import {Express} from 'express'
|
||||||
|
|
||||||
|
import {AppContext} from '../context.js'
|
||||||
|
|
||||||
|
export default function (ctx: AppContext, app: Express) {
|
||||||
|
return app.get('/.well-known/apple-app-site-association', (req, res) => {
|
||||||
|
res.json({
|
||||||
|
appclips: {
|
||||||
|
apps: ['B3LX46C5HS.xyz.blueskyweb.app.AppClip'],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
import {ErrorRequestHandler, Request, RequestHandler, Response} from 'express'
|
||||||
|
|
||||||
|
import {httpLogger} from '../logger.js'
|
||||||
|
|
||||||
|
export type Handler = (req: Request, res: Response) => Awaited<void>
|
||||||
|
|
||||||
|
export const handler = (runHandler: Handler): RequestHandler => {
|
||||||
|
return async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
await runHandler(req, res)
|
||||||
|
} catch (err) {
|
||||||
|
next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const errorHandler: ErrorRequestHandler = (err, _req, res, next) => {
|
||||||
|
httpLogger.error({err}, 'request error')
|
||||||
|
if (res.headersSent) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return res.status(500).end('server error')
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
import {randomBytes} from 'node:crypto'
|
||||||
|
|
||||||
|
import {toString} from 'uint8arrays'
|
||||||
|
|
||||||
|
// 40bit random id of 5-7 characters
|
||||||
|
export const randomId = () => {
|
||||||
|
return toString(randomBytes(5), 'base58btc')
|
||||||
|
}
|
|
@ -0,0 +1,84 @@
|
||||||
|
import assert from 'node:assert'
|
||||||
|
import {AddressInfo} from 'node:net'
|
||||||
|
import {after, before, describe, it} from 'node:test'
|
||||||
|
|
||||||
|
import {Database, envToCfg, LinkService, readEnv} from '../src/index.js'
|
||||||
|
|
||||||
|
describe('link service', async () => {
|
||||||
|
let linkService: LinkService
|
||||||
|
let baseUrl: string
|
||||||
|
before(async () => {
|
||||||
|
const env = readEnv()
|
||||||
|
const cfg = envToCfg({
|
||||||
|
...env,
|
||||||
|
hostnames: ['test.bsky.link'],
|
||||||
|
appHostname: 'test.bsky.app',
|
||||||
|
dbPostgresSchema: 'link_test',
|
||||||
|
dbPostgresUrl: process.env.DB_POSTGRES_URL,
|
||||||
|
})
|
||||||
|
const migrateDb = Database.postgres({
|
||||||
|
url: cfg.db.url,
|
||||||
|
schema: cfg.db.schema,
|
||||||
|
})
|
||||||
|
await migrateDb.migrateToLatestOrThrow()
|
||||||
|
await migrateDb.close()
|
||||||
|
linkService = await LinkService.create(cfg)
|
||||||
|
await linkService.start()
|
||||||
|
const {port} = linkService.server?.address() as AddressInfo
|
||||||
|
baseUrl = `http://localhost:${port}`
|
||||||
|
})
|
||||||
|
|
||||||
|
after(async () => {
|
||||||
|
await linkService?.destroy()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('creates a starter pack link', async () => {
|
||||||
|
const link = await getLink('/start/did:example:alice/xxx')
|
||||||
|
const url = new URL(link)
|
||||||
|
assert.strictEqual(url.origin, 'https://test.bsky.link')
|
||||||
|
assert.match(url.pathname, /^\/[a-z0-9]+$/i)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('normalizes input paths and provides same link each time.', async () => {
|
||||||
|
const link1 = await getLink('/start/did%3Aexample%3Abob/yyy')
|
||||||
|
const link2 = await getLink('/start/did:example:bob/yyy/')
|
||||||
|
assert.strictEqual(link1, link2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('serves permanent redirect, preserving query params.', async () => {
|
||||||
|
const link = await getLink('/start/did:example:carol/zzz/')
|
||||||
|
const [status, location] = await getRedirect(`${link}?a=b`)
|
||||||
|
assert.strictEqual(status, 301)
|
||||||
|
const locationUrl = new URL(location)
|
||||||
|
assert.strictEqual(
|
||||||
|
locationUrl.pathname + locationUrl.search,
|
||||||
|
'/start/did:example:carol/zzz?a=b',
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
async function getRedirect(link: string): Promise<[number, string]> {
|
||||||
|
const url = new URL(link)
|
||||||
|
const base = new URL(baseUrl)
|
||||||
|
url.protocol = base.protocol
|
||||||
|
url.host = base.host
|
||||||
|
const res = await fetch(url, {redirect: 'manual'})
|
||||||
|
await res.arrayBuffer() // drain
|
||||||
|
assert(
|
||||||
|
res.status === 301 || res.status === 303,
|
||||||
|
'response was not a redirect',
|
||||||
|
)
|
||||||
|
return [res.status, res.headers.get('location') ?? '']
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getLink(path: string): Promise<string> {
|
||||||
|
const res = await fetch(new URL('/link', baseUrl), {
|
||||||
|
method: 'post',
|
||||||
|
headers: {'content-type': 'application/json'},
|
||||||
|
body: JSON.stringify({path}),
|
||||||
|
})
|
||||||
|
assert.strictEqual(res.status, 200)
|
||||||
|
const payload = await res.json()
|
||||||
|
assert(typeof payload.url === 'string')
|
||||||
|
return payload.url
|
||||||
|
}
|
||||||
|
})
|
|
@ -0,0 +1,157 @@
|
||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
# Exit if any command fails
|
||||||
|
set -e
|
||||||
|
|
||||||
|
get_container_id() {
|
||||||
|
local compose_file=$1
|
||||||
|
local service=$2
|
||||||
|
if [ -z "${compose_file}" ] || [ -z "${service}" ]; then
|
||||||
|
echo "usage: get_container_id <compose_file> <service>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# first line of jq normalizes for docker compose breaking change, see docker/compose#10958
|
||||||
|
docker compose --file $compose_file ps --format json --status running \
|
||||||
|
| jq -sc '.[] | if type=="array" then .[] else . end' | jq -s \
|
||||||
|
| jq -r '.[]? | select(.Service == "'${service}'") | .ID'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Exports all environment variables
|
||||||
|
export_env() {
|
||||||
|
export_pg_env
|
||||||
|
}
|
||||||
|
|
||||||
|
# Exports postgres environment variables
|
||||||
|
export_pg_env() {
|
||||||
|
# Based on creds in compose.yaml
|
||||||
|
export PGPORT=5433
|
||||||
|
export PGHOST=localhost
|
||||||
|
export PGUSER=pg
|
||||||
|
export PGPASSWORD=password
|
||||||
|
export PGDATABASE=postgres
|
||||||
|
export DB_POSTGRES_URL="postgresql://pg:password@127.0.0.1:5433/postgres"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pg_clear() {
|
||||||
|
local pg_uri=$1
|
||||||
|
|
||||||
|
for schema_name in `psql "${pg_uri}" -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT LIKE 'pg_%' AND schema_name NOT LIKE 'information_schema';" -t`; do
|
||||||
|
psql "${pg_uri}" -c "DROP SCHEMA \"${schema_name}\" CASCADE;"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
pg_init() {
|
||||||
|
local pg_uri=$1
|
||||||
|
|
||||||
|
psql "${pg_uri}" -c "CREATE SCHEMA IF NOT EXISTS \"public\";"
|
||||||
|
}
|
||||||
|
|
||||||
|
main_native() {
|
||||||
|
local services=${SERVICES}
|
||||||
|
local postgres_url_env_var=`[[ $services == *"db_test"* ]] && echo "DB_TEST_POSTGRES_URL" || echo "DB_POSTGRES_URL"`
|
||||||
|
|
||||||
|
postgres_url="${!postgres_url_env_var}"
|
||||||
|
|
||||||
|
if [ -n "${postgres_url}" ]; then
|
||||||
|
echo "Using ${postgres_url_env_var} (${postgres_url}) to connect to postgres."
|
||||||
|
pg_init "${postgres_url}"
|
||||||
|
else
|
||||||
|
echo "Postgres connection string missing did you set ${postgres_url_env_var}?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
local services=$@
|
||||||
|
|
||||||
|
if [ -n "${postgres_url}" ] && [[ $services == *"db_test"* ]]; then
|
||||||
|
pg_clear "${postgres_url}" &> /dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# trap SIGINT and performs cleanup
|
||||||
|
trap "on_sigint ${services}" INT
|
||||||
|
on_sigint() {
|
||||||
|
cleanup $@
|
||||||
|
exit $?
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the arguments as a command
|
||||||
|
DB_POSTGRES_URL="${postgres_url}" \
|
||||||
|
"$@"
|
||||||
|
code=$?
|
||||||
|
|
||||||
|
cleanup ${services}
|
||||||
|
|
||||||
|
exit ${code}
|
||||||
|
}
|
||||||
|
|
||||||
|
main_docker() {
|
||||||
|
# Expect a SERVICES env var to be set with the docker service names
|
||||||
|
local services=${SERVICES}
|
||||||
|
|
||||||
|
dir=$(dirname $0)
|
||||||
|
compose_file="${dir}/docker-compose.yaml"
|
||||||
|
|
||||||
|
# whether this particular script started the container(s)
|
||||||
|
started_container=false
|
||||||
|
|
||||||
|
# performs cleanup as necessary, i.e. taking down containers
|
||||||
|
# if this script started them
|
||||||
|
cleanup() {
|
||||||
|
local services=$@
|
||||||
|
echo # newline
|
||||||
|
if $started_container; then
|
||||||
|
docker compose --file $compose_file rm --force --stop --volumes ${services}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# trap SIGINT and performs cleanup
|
||||||
|
trap "on_sigint ${services}" INT
|
||||||
|
on_sigint() {
|
||||||
|
cleanup $@
|
||||||
|
exit $?
|
||||||
|
}
|
||||||
|
|
||||||
|
# check if all services are running already
|
||||||
|
not_running=false
|
||||||
|
for service in $services; do
|
||||||
|
container_id=$(get_container_id $compose_file $service)
|
||||||
|
if [ -z $container_id ]; then
|
||||||
|
not_running=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# if any are missing, recreate all services
|
||||||
|
if $not_running; then
|
||||||
|
started_container=true
|
||||||
|
docker compose --file $compose_file up --wait --force-recreate ${services}
|
||||||
|
else
|
||||||
|
echo "all services ${services} are already running"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# do not exit when following commands fail, so we can intercept exit code & tear down docker
|
||||||
|
set +e
|
||||||
|
|
||||||
|
# setup environment variables and run args
|
||||||
|
export_env
|
||||||
|
"$@"
|
||||||
|
# save return code for later
|
||||||
|
code=$?
|
||||||
|
|
||||||
|
# performs cleanup as necessary
|
||||||
|
cleanup ${services}
|
||||||
|
exit ${code}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main entry point
|
||||||
|
main() {
|
||||||
|
if ! docker ps >/dev/null 2>&1; then
|
||||||
|
echo "Docker unavailable. Running on host."
|
||||||
|
main_native $@
|
||||||
|
else
|
||||||
|
main_docker $@
|
||||||
|
fi
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
version: '3.8'
|
||||||
|
services:
|
||||||
|
# An ephermerally-stored postgres database for single-use test runs
|
||||||
|
db_test: &db_test
|
||||||
|
image: postgres:14.11-alpine
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=pg
|
||||||
|
- POSTGRES_PASSWORD=password
|
||||||
|
ports:
|
||||||
|
- '5433:5432'
|
||||||
|
# Healthcheck ensures db is queryable when `docker-compose up --wait` completes
|
||||||
|
healthcheck:
|
||||||
|
test: 'pg_isready -U pg'
|
||||||
|
interval: 500ms
|
||||||
|
timeout: 10s
|
||||||
|
retries: 20
|
||||||
|
# A persistently-stored postgres database
|
||||||
|
db:
|
||||||
|
<<: *db_test
|
||||||
|
ports:
|
||||||
|
- '5432:5432'
|
||||||
|
healthcheck:
|
||||||
|
disable: true
|
||||||
|
volumes:
|
||||||
|
- link_db:/var/lib/postgresql/data
|
||||||
|
volumes:
|
||||||
|
link_db:
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
# Example usage:
|
||||||
|
# ./with-test-db.sh psql postgresql://pg:password@localhost:5433/postgres -c 'select 1;'
|
||||||
|
|
||||||
|
dir=$(dirname $0)
|
||||||
|
. ${dir}/_common.sh
|
||||||
|
|
||||||
|
SERVICES="db_test" main "$@"
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "NodeNext",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"outDir": "dist",
|
||||||
|
"lib": ["ES2021.String"]
|
||||||
|
},
|
||||||
|
"include": ["./src/index.ts", "./src/bin.ts"]
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -35,7 +35,7 @@ func run(args []string) {
|
||||||
Flags: []cli.Flag{
|
Flags: []cli.Flag{
|
||||||
&cli.StringFlag{
|
&cli.StringFlag{
|
||||||
Name: "appview-host",
|
Name: "appview-host",
|
||||||
Usage: "method, hostname, and port of PDS instance",
|
Usage: "scheme, hostname, and port of PDS instance",
|
||||||
Value: "http://localhost:2584",
|
Value: "http://localhost:2584",
|
||||||
// retain old PDS env var for easy transition
|
// retain old PDS env var for easy transition
|
||||||
EnvVars: []string{"ATP_APPVIEW_HOST", "ATP_PDS_HOST"},
|
EnvVars: []string{"ATP_APPVIEW_HOST", "ATP_PDS_HOST"},
|
||||||
|
@ -47,6 +47,13 @@ func run(args []string) {
|
||||||
Value: ":8100",
|
Value: ":8100",
|
||||||
EnvVars: []string{"HTTP_ADDRESS"},
|
EnvVars: []string{"HTTP_ADDRESS"},
|
||||||
},
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "link-host",
|
||||||
|
Usage: "scheme, hostname, and port of link service",
|
||||||
|
Required: false,
|
||||||
|
Value: "",
|
||||||
|
EnvVars: []string{"LINK_HOST"},
|
||||||
|
},
|
||||||
&cli.BoolFlag{
|
&cli.BoolFlag{
|
||||||
Name: "debug",
|
Name: "debug",
|
||||||
Usage: "Enable debug mode",
|
Usage: "Enable debug mode",
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -36,6 +37,7 @@ func serve(cctx *cli.Context) error {
|
||||||
debug := cctx.Bool("debug")
|
debug := cctx.Bool("debug")
|
||||||
httpAddress := cctx.String("http-address")
|
httpAddress := cctx.String("http-address")
|
||||||
appviewHost := cctx.String("appview-host")
|
appviewHost := cctx.String("appview-host")
|
||||||
|
linkHost := cctx.String("link-host")
|
||||||
|
|
||||||
// Echo
|
// Echo
|
||||||
e := echo.New()
|
e := echo.New()
|
||||||
|
@ -221,6 +223,14 @@ func serve(cctx *cli.Context) error {
|
||||||
e.GET("/profile/:handleOrDID/post/:rkey/liked-by", server.WebGeneric)
|
e.GET("/profile/:handleOrDID/post/:rkey/liked-by", server.WebGeneric)
|
||||||
e.GET("/profile/:handleOrDID/post/:rkey/reposted-by", server.WebGeneric)
|
e.GET("/profile/:handleOrDID/post/:rkey/reposted-by", server.WebGeneric)
|
||||||
|
|
||||||
|
if linkHost != "" {
|
||||||
|
linkUrl, err := url.Parse(linkHost)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
e.Group("/:linkId", server.LinkProxyMiddleware(linkUrl))
|
||||||
|
}
|
||||||
|
|
||||||
// Start the server.
|
// Start the server.
|
||||||
log.Infof("starting server address=%s", httpAddress)
|
log.Infof("starting server address=%s", httpAddress)
|
||||||
go func() {
|
go func() {
|
||||||
|
@ -292,6 +302,30 @@ func (srv *Server) Download(c echo.Context) error {
|
||||||
return c.Redirect(http.StatusFound, "/")
|
return c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handler for proxying top-level paths to link service, which ends up serving a redirect
|
||||||
|
func (srv *Server) LinkProxyMiddleware(url *url.URL) echo.MiddlewareFunc {
|
||||||
|
return middleware.ProxyWithConfig(
|
||||||
|
middleware.ProxyConfig{
|
||||||
|
Balancer: middleware.NewRoundRobinBalancer(
|
||||||
|
[]*middleware.ProxyTarget{{URL: url}},
|
||||||
|
),
|
||||||
|
Skipper: func(c echo.Context) bool {
|
||||||
|
req := c.Request()
|
||||||
|
if req.Method == "GET" &&
|
||||||
|
strings.LastIndex(strings.TrimRight(req.URL.Path, "/"), "/") == 0 && // top-level path
|
||||||
|
!strings.HasPrefix(req.URL.Path, "/_") { // e.g. /_health endpoint
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
RetryCount: 2,
|
||||||
|
ErrorHandler: func(c echo.Context, err error) error {
|
||||||
|
return c.Redirect(302, "/")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// handler for endpoint that have no specific server-side handling
|
// handler for endpoint that have no specific server-side handling
|
||||||
func (srv *Server) WebGeneric(c echo.Context) error {
|
func (srv *Server) WebGeneric(c echo.Context) error {
|
||||||
data := pongo2.Context{}
|
data := pongo2.Context{}
|
||||||
|
|
Loading…
Reference in New Issue