remove the latest migration until further notice.
parent
6fe357b54a
commit
64b7533315
|
@ -24,13 +24,13 @@ docker compose exec -iT postgres psql -U postgres -d bluesky <<- EOF
|
|||
\echo Refreshing follows...
|
||||
refresh materialized view export_follows;
|
||||
\echo Refreshing like counts...
|
||||
refresh materialized view export_likes_ladder;
|
||||
refresh materialized view export_likes;
|
||||
\echo Refreshing reply counts...
|
||||
refresh materialized view export_replies_ladder;
|
||||
refresh materialized view export_replies;
|
||||
\echo Refreshing block list...
|
||||
refresh materialized view export_blocks;
|
||||
\echo Refreshing DID list...
|
||||
refresh materialized view export_dids_ladder;
|
||||
refresh materialized view export_dids;
|
||||
\echo Refreshing optout list...
|
||||
refresh materialized view export_optouts;
|
||||
EOF
|
||||
|
@ -67,7 +67,7 @@ likes_started=$(date -Iseconds --utc)
|
|||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "insert into incremental_export_log (started, to_tsmp, collection) values ('$likes_started', '$to_timestamp', 'app.bsky.feed.like')"
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "copy (select * from export_likes_ladder) to stdout with csv header;" > ${CSV_DIR}/full/${date}/like_counts.csv
|
||||
-c "copy (select * from export_likes) to stdout with csv header;" > ${CSV_DIR}/full/${date}/like_counts.csv
|
||||
echo "Finishing likes export..."
|
||||
likes_finished=$(date -Iseconds --utc)
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
|
@ -78,7 +78,7 @@ posts_started=$(date -Iseconds --utc)
|
|||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "insert into incremental_export_log (started, to_tsmp, collection) values ('$posts_started', '$to_timestamp', 'app.bsky.feed.post')"
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "copy (select * from export_replies_ladder) to stdout with csv header;" > ${CSV_DIR}/full/${date}/post_counts.csv
|
||||
-c "copy (select * from export_replies) to stdout with csv header;" > ${CSV_DIR}/full/${date}/post_counts.csv
|
||||
echo "Finishing posts export..."
|
||||
posts_finished=$(date -Iseconds --utc)
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
|
@ -89,7 +89,7 @@ dids_started=$(date -Iseconds --utc)
|
|||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "insert into incremental_export_log (started, to_tsmp, collection) values ('$dids_started', '$to_timestamp', 'did')"
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "copy (select * from export_dids_ladder) to stdout with csv header;" > ${CSV_DIR}/full/${date}/dids.csv
|
||||
-c "copy (select * from export_dids) to stdout with csv header;" > ${CSV_DIR}/full/${date}/dids.csv
|
||||
echo "Finishing dids export..."
|
||||
dids_finished=$(date -Iseconds --utc)
|
||||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
|
@ -116,3 +116,5 @@ handles_finished=$(date -Iseconds --utc)
|
|||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "update incremental_export_log set finished='$handles_finished' where started='$handles_started' and to_tsmp='$to_timestamp' and collection = 'handle'"
|
||||
|
||||
echo "Export finished."
|
||||
|
||||
|
|
|
@ -115,3 +115,4 @@ handles_finished=$(date -Iseconds --utc)
|
|||
docker compose exec -it postgres psql -U postgres -d bluesky \
|
||||
-c "update incremental_export_log set finished='$handles_finished' where started='$handles_started' and to_tsmp='$to_timestamp' and collection = 'handle_month'"
|
||||
|
||||
echo "Export finished."
|
|
@ -18,7 +18,6 @@ as select repos.did as ":START_ID",
|
|||
sum(ladderq(records.created_at::TIMESTAMP)) as "count:long"
|
||||
from records join repos on records.repo = repos.id
|
||||
where records.collection = 'app.bsky.feed.like'
|
||||
and records.created_at > CURRENT_DATE - INTERVAL '180' DAY
|
||||
and repos.did <> split_part(jsonb_extract_path_text(content, 'subject', 'uri'), '/', 3)
|
||||
group by repos.did, split_part(jsonb_extract_path_text(content, 'subject', 'uri'), '/', 3)
|
||||
with no data;
|
||||
|
@ -30,7 +29,6 @@ as select repos.did as ":START_ID",
|
|||
sum(ladderq(records.created_at::TIMESTAMP)) as "count:long"
|
||||
from records join repos on records.repo = repos.id
|
||||
where records.collection = 'app.bsky.feed.post'
|
||||
and records.created_at > CURRENT_DATE - INTERVAL '180' DAY
|
||||
and repos.did <> split_part(jsonb_extract_path_text(content, 'reply', 'parent', 'uri'), '/', 3)
|
||||
group by repos.did, split_part(jsonb_extract_path_text(content, 'reply', 'parent', 'uri'), '/', 3)
|
||||
with no data;
|
||||
|
@ -48,4 +46,7 @@ as select distinct did as "did:ID" from (
|
|||
union
|
||||
select distinct ":END_ID" as did from export_blocks
|
||||
)
|
||||
with no data;
|
||||
with no data;
|
||||
|
||||
create index idx_records_repo on records (repo);
|
||||
create index idx_records_created_at on records (created_at);
|
Loading…
Reference in New Issue