feature: move schema updates into a separate oneshot service

main
Max Ignatenko 2024-09-07 11:18:31 +01:00
parent de51bd9015
commit a8bcde76e2
4 changed files with 198 additions and 12 deletions

View File

@ -25,8 +25,6 @@ import (
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/uabluerail/indexer/pds"
"github.com/uabluerail/indexer/repo"
"github.com/uabluerail/indexer/util/gormzerolog"
)
@ -55,16 +53,6 @@ func runMain(ctx context.Context) error {
}
log.Debug().Msgf("DB connection established")
for _, f := range []func(*gorm.DB) error{
pds.AutoMigrate,
repo.AutoMigrate,
} {
if err := f(db); err != nil {
return fmt.Errorf("auto-migrating DB schema: %w", err)
}
}
log.Debug().Msgf("DB schema updated")
lister, err := NewLister(ctx, db)
if err != nil {
return fmt.Errorf("failed to create lister: %w", err)

View File

@ -0,0 +1,14 @@
FROM golang:1.22.3 as builder
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY . ./
RUN go build -trimpath ./cmd/update-db-schema
FROM alpine:latest as certs
RUN apk --update add ca-certificates
FROM debian:stable-slim
COPY --from=certs /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
COPY --from=builder /app/update-db-schema .
ENTRYPOINT ["./update-db-schema"]

View File

@ -0,0 +1,155 @@
package main
import (
"context"
"flag"
"fmt"
"io"
"log"
_ "net/http/pprof"
"os"
"os/signal"
"path/filepath"
"runtime"
"runtime/debug"
"strings"
"syscall"
"time"
_ "github.com/joho/godotenv/autoload"
"github.com/kelseyhightower/envconfig"
"github.com/rs/zerolog"
"gorm.io/driver/postgres"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/uabluerail/indexer/pds"
"github.com/uabluerail/indexer/repo"
"github.com/uabluerail/indexer/util/gormzerolog"
)
type Config struct {
LogFile string
LogFormat string `default:"text"`
LogLevel int64 `default:"1"`
DBUrl string `envconfig:"POSTGRES_URL"`
}
var config Config
func runMain(ctx context.Context) error {
ctx = setupLogging(ctx)
log := zerolog.Ctx(ctx)
log.Debug().Msgf("Starting up...")
db, err := gorm.Open(postgres.Open(config.DBUrl), &gorm.Config{
Logger: gormzerolog.New(&logger.Config{
SlowThreshold: 1 * time.Second,
IgnoreRecordNotFoundError: true,
}, nil),
})
if err != nil {
return fmt.Errorf("connecting to the database: %w", err)
}
log.Debug().Msgf("DB connection established")
for _, f := range []func(*gorm.DB) error{
pds.AutoMigrate,
repo.AutoMigrate,
} {
if err := f(db); err != nil {
return fmt.Errorf("auto-migrating DB schema: %w", err)
}
}
log.Debug().Msgf("DB schema updated")
return nil
}
func main() {
flag.StringVar(&config.LogFile, "log", "", "Path to the log file. If empty, will log to stderr")
flag.StringVar(&config.LogFormat, "log-format", "text", "Logging format. 'text' or 'json'")
flag.Int64Var(&config.LogLevel, "log-level", 1, "Log level. -1 - trace, 0 - debug, 1 - info, 5 - panic")
if err := envconfig.Process("update-db-schema", &config); err != nil {
log.Fatalf("envconfig.Process: %s", err)
}
flag.Parse()
ctx, _ := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)
if err := runMain(ctx); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func setupLogging(ctx context.Context) context.Context {
logFile := os.Stderr
if config.LogFile != "" {
f, err := os.OpenFile(config.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatalf("Failed to open the specified log file %q: %s", config.LogFile, err)
}
logFile = f
}
var output io.Writer
switch config.LogFormat {
case "json":
output = logFile
case "text":
prefixList := []string{}
info, ok := debug.ReadBuildInfo()
if ok {
prefixList = append(prefixList, info.Path+"/")
}
basedir := ""
_, sourceFile, _, ok := runtime.Caller(0)
if ok {
basedir = filepath.Dir(sourceFile)
}
if basedir != "" && strings.HasPrefix(basedir, "/") {
prefixList = append(prefixList, basedir+"/")
head, _ := filepath.Split(basedir)
for head != "/" {
prefixList = append(prefixList, head)
head, _ = filepath.Split(strings.TrimSuffix(head, "/"))
}
}
output = zerolog.ConsoleWriter{
Out: logFile,
NoColor: true,
TimeFormat: time.RFC3339,
PartsOrder: []string{
zerolog.LevelFieldName,
zerolog.TimestampFieldName,
zerolog.CallerFieldName,
zerolog.MessageFieldName,
},
FormatFieldName: func(i interface{}) string { return fmt.Sprintf("%s:", i) },
FormatFieldValue: func(i interface{}) string { return fmt.Sprintf("%s", i) },
FormatCaller: func(i interface{}) string {
s := i.(string)
for _, p := range prefixList {
s = strings.TrimPrefix(s, p)
}
return s
},
}
default:
log.Fatalf("Invalid log format specified: %q", config.LogFormat)
}
logger := zerolog.New(output).Level(zerolog.Level(config.LogLevel)).With().Caller().Timestamp().Logger()
ctx = logger.WithContext(ctx)
zerolog.DefaultContextLogger = &logger
log.SetOutput(logger)
return ctx
}

View File

@ -28,6 +28,23 @@ services:
shm_size: '16gb'
stop_grace_period: 24h
update-db-schema:
build:
context: .
dockerfile: cmd/update-db-schema/Dockerfile
extra_hosts:
- "host.docker.internal:host-gateway"
restart: on-failure
image: uabluerail/update-db-schema
links:
- postgres:db
depends_on:
postgres:
condition: service_healthy
environment:
UPDATE-DB-SCHEMA_POSTGRES_URL: "postgres://postgres:${POSTGRES_PASSWORD}@db/bluesky?sslmode=disable"
command: [ "--log-level=0" ]
plc:
build:
context: .
@ -45,6 +62,8 @@ services:
depends_on:
postgres:
condition: service_healthy
update-db-schema:
condition: service_completed_successfully
environment:
PLC_METRICS_PORT: '8080'
PLC_POSTGRES_URL: "postgres://postgres:${POSTGRES_PASSWORD}@db/bluesky?sslmode=disable"
@ -70,6 +89,8 @@ services:
depends_on:
postgres:
condition: service_healthy
update-db-schema:
condition: service_completed_successfully
environment:
LISTER_METRICS_PORT: '8080'
LISTER_POSTGRES_URL: "postgres://postgres:${POSTGRES_PASSWORD}@db/bluesky?sslmode=disable"
@ -95,6 +116,8 @@ services:
depends_on:
postgres:
condition: service_healthy
update-db-schema:
condition: service_completed_successfully
environment:
CONSUMER_METRICS_PORT: '8080'
CONSUMER_POSTGRES_URL: "postgres://postgres:${POSTGRES_PASSWORD}@db/bluesky?sslmode=disable"
@ -122,6 +145,8 @@ services:
depends_on:
postgres:
condition: service_healthy
update-db-schema:
condition: service_completed_successfully
dns:
- 1.1.1.1
- 8.8.8.8
@ -149,4 +174,8 @@ services:
depends_on:
postgres:
condition: service_healthy
update-db-schema:
# Not a strict dependency, but it's better to not have it running
# unnecessary queries during a costly migration.
condition: service_completed_successfully