Merge branch 'main' of github.com:uabluerail/indexer

This commit is contained in:
mathan 2024-02-22 18:56:19 -08:00
commit 78a17bf238
11 changed files with 154 additions and 46 deletions

View file

@ -28,6 +28,7 @@ import (
"github.com/uabluerail/indexer/models"
"github.com/uabluerail/indexer/pds"
"github.com/uabluerail/indexer/repo"
"github.com/uabluerail/indexer/util/resolver"
)
type BadRecord struct {
@ -245,10 +246,32 @@ func (c *Consumer) processMessage(ctx context.Context, typ string, r io.Reader,
if err != nil {
return fmt.Errorf("repo.EnsureExists(%q): %w", payload.Repo, err)
}
if repoInfo.PDS != models.ID(c.remote.ID) {
log.Error().Str("did", payload.Repo).Str("rev", payload.Rev).
Msgf("Commit from an incorrect PDS, skipping")
return nil
if repoInfo.PDS != c.remote.ID {
u, err := resolver.GetPDSEndpoint(ctx, payload.Repo)
if err == nil {
cur, err := pds.EnsureExists(ctx, c.db, u.String())
if err == nil {
if repoInfo.PDS != cur.ID {
// Repo was migrated, lets update our record.
err := c.db.Model(repoInfo).Where(&repo.Repo{ID: repoInfo.ID}).Updates(&repo.Repo{PDS: cur.ID}).Error
if err != nil {
log.Error().Err(err).Msgf("Repo %q was migrated to %q, but updating the repo has failed: %s", payload.Repo, cur.Host, err)
}
}
repoInfo.PDS = cur.ID
} else {
log.Error().Err(err).Msgf("Failed to get PDS record for %q: %s", u, err)
}
} else {
log.Error().Err(err).Msgf("Failed to get PDS endpoint for repo %q: %s", payload.Repo, err)
}
if repoInfo.PDS != c.remote.ID {
// We checked a recent version of DID doc and this is still not a correct PDS.
log.Error().Str("did", payload.Repo).Str("rev", payload.Rev).
Msgf("Commit from an incorrect PDS, skipping")
return nil
}
}
if created {
reposDiscovered.WithLabelValues(c.remote.Host).Inc()
@ -443,8 +466,25 @@ func (c *Consumer) processMessage(ctx context.Context, typ string, r io.Reader,
default:
log.Error().Msgf("Unknown #info message %q: %+v", payload.Name, payload)
}
case "#identity":
payload := &comatproto.SyncSubscribeRepos_Identity{}
if err := payload.UnmarshalCBOR(r); err != nil {
return fmt.Errorf("failed to unmarshal commit: %w", err)
}
exportEventTimestamp(ctx, c.remote.Host, payload.Time)
log.Trace().Str("did", payload.Did).Str("type", typ).Int64("seq", payload.Seq).
Msgf("#identity message: %s seq=%d time=%q", payload.Did, payload.Seq, payload.Time)
resolver.Resolver.FlushCacheFor(payload.Did)
// TODO: fetch DID doc and update PDS field?
default:
log.Warn().Msgf("Unknown message type received: %s", typ)
b, err := io.ReadAll(r)
if err != nil {
log.Error().Err(err).Msgf("Failed to read message payload: %s", err)
}
log.Warn().Msgf("Unknown message type received: %s payload=%q", typ, string(b))
}
return nil
}

View file

@ -79,6 +79,9 @@ func runMain(ctx context.Context) error {
}
// TODO: check for changes and start/stop consumers as needed
for _, remote := range remotes {
if remote.Disabled {
continue
}
c, err := NewConsumer(ctx, &remote, db)
if err != nil {
return fmt.Errorf("failed to create a consumer for %q: %w", remote.Host, err)

View file

@ -57,13 +57,22 @@ func (l *Lister) run(ctx context.Context) {
remote := pds.PDS{}
if err := db.Model(&remote).
Where("last_list is null or last_list < ?", time.Now().Add(-l.listRefreshInterval)).
Where("disabled=false and (last_list is null or last_list < ?)", time.Now().Add(-l.listRefreshInterval)).
Take(&remote).Error; err != nil {
if !errors.Is(err, gorm.ErrRecordNotFound) {
log.Error().Err(err).Msgf("Failed to query DB for a PDS to list repos from: %s", err)
}
break
}
if !pds.IsWhitelisted(remote.Host) {
log.Info().Msgf("PDS %q is not whitelisted, disabling it", remote.Host)
if err := db.Model(&remote).Where(&pds.PDS{ID: remote.ID}).Updates(&pds.PDS{Disabled: true}).Error; err != nil {
log.Error().Err(err).Msgf("Failed to disable PDS %q: %s", remote.Host, err)
}
break
}
client := xrpcauth.NewAnonymousClient(ctx)
client.Host = remote.Host

View file

@ -0,0 +1,26 @@
package main
import (
"testing"
)
func TestPostgresFix(t *testing.T) {
type testCase struct{ input, want string }
cases := []testCase{
{`"a"`, `"a"`},
{`"\u0000"`, `"<0x00>"`},
{`"description":"\u0000"`, `"description":"<0x00>"`},
{`"\\u0000"`, `"\\u0000"`},
{`"\\\u0000"`, `"\\<0x00>"`},
{`\n\n\u0000\u0000 \u0000\u0000\u0000\u0000 \u0000\u0000\u0000\u0000\u0000`,
`\n\n<0x00><0x00> <0x00><0x00><0x00><0x00> <0x00><0x00><0x00><0x00><0x00>`},
}
for _, tc := range cases {
got := escapeNullCharForPostgres([]byte(tc.input))
if string(got) != tc.want {
t.Errorf("escapeNullCharForPostgres(%s) = %s, want %s", tc.input, string(got), tc.want)
}
}
}

View file

@ -3,6 +3,7 @@ package main
import (
"context"
"fmt"
"slices"
"time"
"github.com/rs/zerolog"
@ -101,7 +102,11 @@ func (s *Scheduler) fillQueue(ctx context.Context) error {
if err := s.db.Find(&remotes).Error; err != nil {
return fmt.Errorf("failed to get the list of PDSs: %w", err)
}
perPDSLimit := 0
remotes = slices.DeleteFunc(remotes, func(pds pds.PDS) bool {
return pds.Disabled
})
perPDSLimit := maxQueueLen
if len(remotes) > 0 {
perPDSLimit = maxQueueLen * 2 / len(remotes)
}

View file

@ -4,7 +4,6 @@ import (
"bytes"
"context"
"fmt"
"net/url"
"regexp"
"strings"
"time"
@ -133,37 +132,32 @@ func (p *WorkerPool) worker(ctx context.Context, signal chan struct{}) {
}
}
var postgresFixRegexp = regexp.MustCompile(`[^\\](\\\\)*(\\u0000)`)
var postgresFixRegexp = regexp.MustCompile(`([^\\](\\\\)*)(\\u0000)+`)
func escapeNullCharForPostgres(b []byte) []byte {
return postgresFixRegexp.ReplaceAll(b, []byte(`$1<0x00>`))
return postgresFixRegexp.ReplaceAllFunc(b, func(b []byte) []byte {
return bytes.ReplaceAll(b, []byte(`\u0000`), []byte(`<0x00>`))
})
}
func (p *WorkerPool) doWork(ctx context.Context, work WorkItem) error {
log := zerolog.Ctx(ctx)
defer close(work.signal)
doc, err := resolver.GetDocument(ctx, work.Repo.DID)
u, err := resolver.GetPDSEndpoint(ctx, work.Repo.DID)
if err != nil {
return fmt.Errorf("resolving did %q: %w", work.Repo.DID, err)
return err
}
pdsHost := ""
for _, srv := range doc.Service {
if srv.Type != "AtprotoPersonalDataServer" {
continue
}
pdsHost = srv.ServiceEndpoint
}
if pdsHost == "" {
return fmt.Errorf("did not find any PDS in DID Document")
}
u, err := url.Parse(pdsHost)
remote, err := pds.EnsureExists(ctx, p.db, u.String())
if err != nil {
return fmt.Errorf("PDS endpoint (%q) is an invalid URL: %w", pdsHost, err)
return fmt.Errorf("failed to get PDS records for %q: %w", u, err)
}
if u.Host == "" {
return fmt.Errorf("PDS endpoint (%q) doesn't have a host part", pdsHost)
if work.Repo.PDS != remote.ID {
if err := p.db.Model(&work.Repo).Where(&repo.Repo{ID: work.Repo.ID}).Updates(&repo.Repo{PDS: remote.ID}).Error; err != nil {
return fmt.Errorf("failed to update repo's PDS to %q: %w", u, err)
}
work.Repo.PDS = remote.ID
}
client := xrpcauth.NewAnonymousClient(ctx)