Lex refactor (#362)

* Remove the hackcheck for upgrades

* Rename the PostEmbeds folder to match the codebase style

* Updates to latest lex refactor

* Update to use new bsky agent

* Update to use api package's richtext library

* Switch to upsertProfile

* Add TextEncoder/TextDecoder polyfill

* Add Intl.Segmenter polyfill

* Update composer to calculate lengths by grapheme

* Fix detox

* Fix login in e2e

* Create account e2e passing

* Implement an e2e mocking framework

* Don't use private methods on mobx models as mobx can't track them

* Add tooling for e2e-specific builds and add e2e media-picker mock

* Add some tests and fix some bugs around profile editing

* Add shell tests

* Add home screen tests

* Add thread screen tests

* Add tests for other user profile screens

* Add search screen tests

* Implement profile imagery change tools and tests

* Update to new embed behaviors

* Add post tests

* Fix to profile-screen test

* Fix session resumption

* Update web composer to new api

* 1.11.0

* Fix pagination cursor parameters

* Add quote posts to notifications

* Fix embed layouts

* Remove youtube inline player and improve tap handling on link cards

* Reset minimal shell mode on all screen loads and feed swipes (close #299)

* Update podfile.lock

* Improve post notfound UI (close #366)

* Bump atproto packages
This commit is contained in:
Paul Frazee 2023-03-31 13:17:26 -05:00 committed by GitHub
parent 19f3a2fa92
commit a3334a01a2
133 changed files with 3103 additions and 2839 deletions

View file

@ -1,11 +1,11 @@
import AtpAgent from '@atproto/api'
import {BskyAgent, stringifyLex, jsonToLex} from '@atproto/api'
import RNFS from 'react-native-fs'
const GET_TIMEOUT = 15e3 // 15s
const POST_TIMEOUT = 60e3 // 60s
export function doPolyfill() {
AtpAgent.configure({fetch: fetchHandler})
BskyAgent.configure({fetch: fetchHandler})
}
interface FetchHandlerResponse {
@ -22,7 +22,7 @@ async function fetchHandler(
): Promise<FetchHandlerResponse> {
const reqMimeType = reqHeaders['Content-Type'] || reqHeaders['content-type']
if (reqMimeType && reqMimeType.startsWith('application/json')) {
reqBody = JSON.stringify(reqBody)
reqBody = stringifyLex(reqBody)
} else if (
typeof reqBody === 'string' &&
(reqBody.startsWith('/') || reqBody.startsWith('file:'))
@ -65,7 +65,7 @@ async function fetchHandler(
let resBody
if (resMimeType) {
if (resMimeType.startsWith('application/json')) {
resBody = await res.json()
resBody = jsonToLex(await res.json())
} else if (resMimeType.startsWith('text/')) {
resBody = await res.text()
} else {

View file

@ -1,4 +1,3 @@
export function doPolyfill() {
// TODO needed? native fetch may work fine -prf
// AtpApi.xrpc.fetch = fetchHandler
// no polyfill is needed on web
}

View file

@ -1,9 +1,9 @@
import {RootStoreModel} from 'state/index'
import {
AppBskyFeedFeedViewPost,
AppBskyFeedDefs,
AppBskyFeedGetAuthorFeed as GetAuthorFeed,
} from '@atproto/api'
type ReasonRepost = AppBskyFeedFeedViewPost.ReasonRepost
type ReasonRepost = AppBskyFeedDefs.ReasonRepost
async function getMultipleAuthorsPosts(
rootStore: RootStoreModel,
@ -12,12 +12,12 @@ async function getMultipleAuthorsPosts(
limit: number = 10,
) {
const responses = await Promise.all(
authors.map((author, index) =>
rootStore.api.app.bsky.feed
authors.map((actor, index) =>
rootStore.agent
.getAuthorFeed({
author,
actor,
limit,
before: cursor ? cursor.split(',')[index] : undefined,
cursor: cursor ? cursor.split(',')[index] : undefined,
})
.catch(_err => ({success: false, headers: {}, data: {feed: []}})),
),
@ -29,14 +29,14 @@ function mergePosts(
responses: GetAuthorFeed.Response[],
{repostsOnly, bestOfOnly}: {repostsOnly?: boolean; bestOfOnly?: boolean},
) {
let posts: AppBskyFeedFeedViewPost.Main[] = []
let posts: AppBskyFeedDefs.FeedViewPost[] = []
if (bestOfOnly) {
for (const res of responses) {
if (res.success) {
// filter the feed down to the post with the most upvotes
// filter the feed down to the post with the most likes
res.data.feed = res.data.feed.reduce(
(acc: AppBskyFeedFeedViewPost.Main[], v) => {
(acc: AppBskyFeedDefs.FeedViewPost[], v) => {
if (
!acc?.[0] &&
!v.reason &&
@ -49,7 +49,7 @@ function mergePosts(
acc &&
!v.reason &&
!v.reply &&
v.post.upvoteCount > acc[0]?.post.upvoteCount &&
(v.post.likeCount || 0) > (acc[0]?.post.likeCount || 0) &&
isRecentEnough(v.post.indexedAt)
) {
return [v]
@ -92,7 +92,7 @@ function mergePosts(
return posts
}
function isARepostOfSomeoneElse(post: AppBskyFeedFeedViewPost.Main): boolean {
function isARepostOfSomeoneElse(post: AppBskyFeedDefs.FeedViewPost): boolean {
return (
post.reason?.$type === 'app.bsky.feed.feedViewPost#reasonRepost' &&
post.post.author.did !== (post.reason as ReasonRepost).by.did

View file

@ -1,8 +1,8 @@
import {AppBskyFeedFeedViewPost} from '@atproto/api'
import {AppBskyFeedDefs} from '@atproto/api'
import lande from 'lande'
type FeedViewPost = AppBskyFeedFeedViewPost.Main
import {hasProp} from '@atproto/lexicon'
import {hasProp} from 'lib/type-guards'
import {LANGUAGES_MAP_CODE2} from '../../locale/languages'
type FeedViewPost = AppBskyFeedDefs.FeedViewPost
export type FeedTunerFn = (
tuner: FeedTuner,
@ -174,7 +174,7 @@ export class FeedTuner {
}
const item = slices[i].rootItem
const isRepost = Boolean(item.reason)
if (!isRepost && item.post.upvoteCount < 2) {
if (!isRepost && (item.post.likeCount || 0) < 2) {
slices.splice(i, 1)
}
}

View file

@ -1,16 +1,16 @@
import {
AppBskyEmbedImages,
AppBskyEmbedExternal,
ComAtprotoBlobUpload,
AppBskyEmbedRecord,
AppBskyEmbedRecordWithMedia,
ComAtprotoRepoUploadBlob,
RichText,
} from '@atproto/api'
import {AtUri} from '../../third-party/uri'
import {RootStoreModel} from 'state/models/root-store'
import {extractEntities} from 'lib/strings/rich-text-detection'
import {isNetworkError} from 'lib/strings/errors'
import {LinkMeta} from '../link-meta/link-meta'
import {Image} from '../media/manip'
import {RichText} from '../strings/rich-text'
import {isWeb} from 'platform/detection'
export interface ExternalEmbedDraft {
@ -27,7 +27,7 @@ export async function resolveName(store: RootStoreModel, didOrHandle: string) {
if (didOrHandle.startsWith('did:')) {
return didOrHandle
}
const res = await store.api.com.atproto.handle.resolve({
const res = await store.agent.resolveHandle({
handle: didOrHandle,
})
return res.data.did
@ -37,15 +37,15 @@ export async function uploadBlob(
store: RootStoreModel,
blob: string,
encoding: string,
): Promise<ComAtprotoBlobUpload.Response> {
): Promise<ComAtprotoRepoUploadBlob.Response> {
if (isWeb) {
// `blob` should be a data uri
return store.api.com.atproto.blob.upload(convertDataURIToUint8Array(blob), {
return store.agent.uploadBlob(convertDataURIToUint8Array(blob), {
encoding,
})
} else {
// `blob` should be a path to a file in the local FS
return store.api.com.atproto.blob.upload(
return store.agent.uploadBlob(
blob, // this will be special-cased by the fetch monkeypatch in /src/state/lib/api.ts
{encoding},
)
@ -70,22 +70,18 @@ export async function post(store: RootStoreModel, opts: PostOpts) {
| AppBskyEmbedImages.Main
| AppBskyEmbedExternal.Main
| AppBskyEmbedRecord.Main
| AppBskyEmbedRecordWithMedia.Main
| undefined
let reply
const text = new RichText(opts.rawText, undefined, {
cleanNewlines: true,
}).text.trim()
const rt = new RichText(
{text: opts.rawText.trim()},
{
cleanNewlines: true,
},
)
opts.onStateChange?.('Processing...')
const entities = extractEntities(text, opts.knownHandles)
if (entities) {
for (const ent of entities) {
if (ent.type === 'mention') {
const prof = await store.profiles.getProfile(ent.value)
ent.value = prof.data.did
}
}
}
await rt.detectFacets(store.agent)
if (opts.quote) {
embed = {
@ -95,24 +91,37 @@ export async function post(store: RootStoreModel, opts: PostOpts) {
cid: opts.quote.cid,
},
} as AppBskyEmbedRecord.Main
} else if (opts.images?.length) {
embed = {
$type: 'app.bsky.embed.images',
images: [],
} as AppBskyEmbedImages.Main
let i = 1
}
if (opts.images?.length) {
const images: AppBskyEmbedImages.Image[] = []
for (const image of opts.images) {
opts.onStateChange?.(`Uploading image #${i++}...`)
opts.onStateChange?.(`Uploading image #${images.length + 1}...`)
const res = await uploadBlob(store, image, 'image/jpeg')
embed.images.push({
image: {
cid: res.data.cid,
mimeType: 'image/jpeg',
},
images.push({
image: res.data.blob,
alt: '', // TODO supply alt text
})
}
} else if (opts.extLink) {
if (opts.quote) {
embed = {
$type: 'app.bsky.embed.recordWithMedia',
record: embed,
media: {
$type: 'app.bsky.embed.images',
images,
},
} as AppBskyEmbedRecordWithMedia.Main
} else {
embed = {
$type: 'app.bsky.embed.images',
images,
} as AppBskyEmbedImages.Main
}
}
if (opts.extLink && !opts.images?.length) {
let thumb
if (opts.extLink.localThumb) {
opts.onStateChange?.('Uploading link thumbnail...')
@ -138,27 +147,41 @@ export async function post(store: RootStoreModel, opts: PostOpts) {
opts.extLink.localThumb.path,
encoding,
)
thumb = {
cid: thumbUploadRes.data.cid,
mimeType: encoding,
}
thumb = thumbUploadRes.data.blob
}
}
embed = {
$type: 'app.bsky.embed.external',
external: {
uri: opts.extLink.uri,
title: opts.extLink.meta?.title || '',
description: opts.extLink.meta?.description || '',
thumb,
},
} as AppBskyEmbedExternal.Main
if (opts.quote) {
embed = {
$type: 'app.bsky.embed.recordWithMedia',
record: embed,
media: {
$type: 'app.bsky.embed.external',
external: {
uri: opts.extLink.uri,
title: opts.extLink.meta?.title || '',
description: opts.extLink.meta?.description || '',
thumb,
},
} as AppBskyEmbedExternal.Main,
} as AppBskyEmbedRecordWithMedia.Main
} else {
embed = {
$type: 'app.bsky.embed.external',
external: {
uri: opts.extLink.uri,
title: opts.extLink.meta?.title || '',
description: opts.extLink.meta?.description || '',
thumb,
},
} as AppBskyEmbedExternal.Main
}
}
if (opts.replyTo) {
const replyToUrip = new AtUri(opts.replyTo)
const parentPost = await store.api.app.bsky.feed.post.get({
user: replyToUrip.host,
const parentPost = await store.agent.getPost({
repo: replyToUrip.host,
rkey: replyToUrip.rkey,
})
if (parentPost) {
@ -175,16 +198,12 @@ export async function post(store: RootStoreModel, opts: PostOpts) {
try {
opts.onStateChange?.('Posting...')
return await store.api.app.bsky.feed.post.create(
{did: store.me.did || ''},
{
text,
reply,
embed,
entities,
createdAt: new Date().toISOString(),
},
)
return await store.agent.post({
text: rt.text,
facets: rt.facets,
reply,
embed,
})
} catch (e: any) {
console.error(`Failed to create post: ${e.toString()}`)
if (isNetworkError(e)) {
@ -197,49 +216,6 @@ export async function post(store: RootStoreModel, opts: PostOpts) {
}
}
export async function repost(store: RootStoreModel, uri: string, cid: string) {
return await store.api.app.bsky.feed.repost.create(
{did: store.me.did || ''},
{
subject: {uri, cid},
createdAt: new Date().toISOString(),
},
)
}
export async function unrepost(store: RootStoreModel, repostUri: string) {
const repostUrip = new AtUri(repostUri)
return await store.api.app.bsky.feed.repost.delete({
did: repostUrip.hostname,
rkey: repostUrip.rkey,
})
}
export async function follow(
store: RootStoreModel,
subjectDid: string,
subjectDeclarationCid: string,
) {
return await store.api.app.bsky.graph.follow.create(
{did: store.me.did || ''},
{
subject: {
did: subjectDid,
declarationCid: subjectDeclarationCid,
},
createdAt: new Date().toISOString(),
},
)
}
export async function unfollow(store: RootStoreModel, followUri: string) {
const followUrip = new AtUri(followUri)
return await store.api.app.bsky.graph.follow.delete({
did: followUrip.hostname,
rkey: followUrip.rkey,
})
}
// helpers
// =

View file

@ -0,0 +1,116 @@
import {RootStoreModel} from 'state/index'
import {PickerOpts, CameraOpts, CropperOpts, PickedMedia} from './types'
import {
scaleDownDimensions,
Dim,
compressIfNeeded,
moveToPremanantPath,
} from 'lib/media/manip'
export type {PickedMedia} from './types'
import RNFS from 'react-native-fs'
let _imageCounter = 0
async function getFile() {
const files = await RNFS.readDir(
RNFS.LibraryDirectoryPath.split('/')
.slice(0, -5)
.concat(['Media', 'DCIM', '100APPLE'])
.join('/'),
)
return files[_imageCounter++ % files.length]
}
export async function openPicker(
_store: RootStoreModel,
opts: PickerOpts,
): Promise<PickedMedia[]> {
const mediaType = opts.mediaType || 'photo'
const items = await getFile()
const toMedia = (item: RNFS.ReadDirItem) => ({
mediaType,
path: item.path,
mime: 'image/jpeg',
size: item.size,
width: 4288,
height: 2848,
})
if (Array.isArray(items)) {
return items.map(toMedia)
}
return [toMedia(items)]
}
export async function openCamera(
_store: RootStoreModel,
opts: CameraOpts,
): Promise<PickedMedia> {
const mediaType = opts.mediaType || 'photo'
const item = await getFile()
return {
mediaType,
path: item.path,
mime: 'image/jpeg',
size: item.size,
width: 4288,
height: 2848,
}
}
export async function openCropper(
_store: RootStoreModel,
opts: CropperOpts,
): Promise<PickedMedia> {
const mediaType = opts.mediaType || 'photo'
const item = await getFile()
return {
mediaType,
path: item.path,
mime: 'image/jpeg',
size: item.size,
width: 4288,
height: 2848,
}
}
export async function pickImagesFlow(
store: RootStoreModel,
maxFiles: number,
maxDim: Dim,
maxSize: number,
) {
const items = await openPicker(store, {
multiple: true,
maxFiles,
mediaType: 'photo',
})
const result = []
for (const image of items) {
result.push(
await cropAndCompressFlow(store, image.path, image, maxDim, maxSize),
)
}
return result
}
export async function cropAndCompressFlow(
store: RootStoreModel,
path: string,
imgDim: Dim,
maxDim: Dim,
maxSize: number,
) {
// choose target dimensions based on the original
// this causes the photo cropper to start with the full image "selected"
const {width, height} = scaleDownDimensions(imgDim, maxDim)
const cropperRes = await openCropper(store, {
mediaType: 'photo',
path,
freeStyleCropEnabled: true,
width,
height,
})
const img = await compressIfNeeded(cropperRes, maxSize)
const permanentPath = await moveToPremanantPath(img.path)
return permanentPath
}

View file

@ -45,7 +45,7 @@ export function displayNotificationFromModel(
let author = notif.author.displayName || notif.author.handle
let title: string
let body: string = ''
if (notif.isUpvote) {
if (notif.isLike) {
title = `${author} liked your post`
body = notif.additionalPost?.thread?.postRecord?.text || ''
} else if (notif.isRepost) {
@ -65,7 +65,7 @@ export function displayNotificationFromModel(
}
let image
if (
AppBskyEmbedImages.isPresented(notif.additionalPost?.thread?.post.embed) &&
AppBskyEmbedImages.isView(notif.additionalPost?.thread?.post.embed) &&
notif.additionalPost?.thread?.post.embed.images[0]?.thumb
) {
image = notif.additionalPost.thread.post.embed.images[0].thumb

View file

@ -10,7 +10,7 @@ export type CommonNavigatorParams = {
ProfileFollowers: {name: string}
ProfileFollows: {name: string}
PostThread: {name: string; rkey: string}
PostUpvotedBy: {name: string; rkey: string}
PostLikedBy: {name: string; rkey: string}
PostRepostedBy: {name: string; rkey: string}
Debug: undefined
Log: undefined

View file

@ -1,64 +1,5 @@
import {AppBskyFeedPost} from '@atproto/api'
type Entity = AppBskyFeedPost.Entity
import {isValidDomain} from './url-helpers'
export function extractEntities(
text: string,
knownHandles?: Set<string>,
): Entity[] | undefined {
let match
let ents: Entity[] = []
{
// mentions
const re = /(^|\s|\()(@)([a-zA-Z0-9.-]+)(\b)/g
while ((match = re.exec(text))) {
if (knownHandles && !knownHandles.has(match[3])) {
continue // not a known handle
} else if (!match[3].includes('.')) {
continue // probably not a handle
}
const start = text.indexOf(match[3], match.index) - 1
ents.push({
type: 'mention',
value: match[3],
index: {start, end: start + match[3].length + 1},
})
}
}
{
// links
const re =
/(^|\s|\()((https?:\/\/[\S]+)|((?<domain>[a-z][a-z0-9]*(\.[a-z0-9]+)+)[\S]*))/gim
while ((match = re.exec(text))) {
let value = match[2]
if (!value.startsWith('http')) {
const domain = match.groups?.domain
if (!domain || !isValidDomain(domain)) {
continue
}
value = `https://${value}`
}
const start = text.indexOf(match[2], match.index)
const index = {start, end: start + match[2].length}
// strip ending puncuation
if (/[.,;!?]$/.test(value)) {
value = value.slice(0, -1)
index.end--
}
if (/[)]$/.test(value) && !value.includes('(')) {
value = value.slice(0, -1)
index.end--
}
ents.push({
type: 'link',
value,
index,
})
}
}
return ents.length > 0 ? ents : undefined
}
interface DetectedLink {
link: string
}

View file

@ -1,32 +0,0 @@
import {RichText} from './rich-text'
const EXCESS_SPACE_RE = /[\r\n]([\u00AD\u2060\u200D\u200C\u200B\s]*[\r\n]){2,}/
const REPLACEMENT_STR = '\n\n'
export function removeExcessNewlines(richText: RichText): RichText {
return clean(richText, EXCESS_SPACE_RE, REPLACEMENT_STR)
}
// TODO: check on whether this works correctly with multi-byte codepoints
export function clean(
richText: RichText,
targetRegexp: RegExp,
replacementString: string,
): RichText {
richText = richText.clone()
let match = richText.text.match(targetRegexp)
while (match && typeof match.index !== 'undefined') {
const oldText = richText.text
const removeStartIndex = match.index
const removeEndIndex = removeStartIndex + match[0].length
richText.delete(removeStartIndex, removeEndIndex)
if (richText.text === oldText) {
break // sanity check
}
richText.insert(removeStartIndex, replacementString)
match = richText.text.match(targetRegexp)
}
return richText
}

View file

@ -1,216 +0,0 @@
/*
= Rich Text Manipulation
When we sanitize rich text, we have to update the entity indices as the
text is modified. This can be modeled as inserts() and deletes() of the
rich text string. The possible scenarios are outlined below, along with
their expected behaviors.
NOTE: Slices are start inclusive, end exclusive
== richTextInsert()
Target string:
0 1 2 3 4 5 6 7 8 910 // string indices
h e l l o w o r l d // string value
^-------^ // target slice {start: 2, end: 7}
Scenarios:
A: ^ // insert "test" at 0
B: ^ // insert "test" at 4
C: ^ // insert "test" at 8
A = before -> move both by num added
B = inner -> move end by num added
C = after -> noop
Results:
A: 0 1 2 3 4 5 6 7 8 910 // string indices
t e s t h e l l o w // string value
^-------^ // target slice {start: 6, end: 11}
B: 0 1 2 3 4 5 6 7 8 910 // string indices
h e l l t e s t o w // string value
^---------------^ // target slice {start: 2, end: 11}
C: 0 1 2 3 4 5 6 7 8 910 // string indices
h e l l o w o t e s // string value
^-------^ // target slice {start: 2, end: 7}
== richTextDelete()
Target string:
0 1 2 3 4 5 6 7 8 910 // string indices
h e l l o w o r l d // string value
^-------^ // target slice {start: 2, end: 7}
Scenarios:
A: ^---------------^ // remove slice {start: 0, end: 9}
B: ^-----^ // remove slice {start: 7, end: 11}
C: ^-----------^ // remove slice {start: 4, end: 11}
D: ^-^ // remove slice {start: 3, end: 5}
E: ^-----^ // remove slice {start: 1, end: 5}
F: ^-^ // remove slice {start: 0, end: 2}
A = entirely outer -> delete slice
B = entirely after -> noop
C = partially after -> move end to remove-start
D = entirely inner -> move end by num removed
E = partially before -> move start to remove-start index, move end by num removed
F = entirely before -> move both by num removed
Results:
A: 0 1 2 3 4 5 6 7 8 910 // string indices
l d // string value
// target slice (deleted)
B: 0 1 2 3 4 5 6 7 8 910 // string indices
h e l l o w // string value
^-------^ // target slice {start: 2, end: 7}
C: 0 1 2 3 4 5 6 7 8 910 // string indices
h e l l // string value
^-^ // target slice {start: 2, end: 4}
D: 0 1 2 3 4 5 6 7 8 910 // string indices
h e l w o r l d // string value
^---^ // target slice {start: 2, end: 5}
E: 0 1 2 3 4 5 6 7 8 910 // string indices
h w o r l d // string value
^-^ // target slice {start: 1, end: 3}
F: 0 1 2 3 4 5 6 7 8 910 // string indices
l l o w o r l d // string value
^-------^ // target slice {start: 0, end: 5}
*/
import cloneDeep from 'lodash.clonedeep'
import {AppBskyFeedPost} from '@atproto/api'
import {removeExcessNewlines} from './rich-text-sanitize'
export type Entity = AppBskyFeedPost.Entity
export interface RichTextOpts {
cleanNewlines?: boolean
}
export class RichText {
constructor(
public text: string,
public entities?: Entity[],
opts?: RichTextOpts,
) {
if (opts?.cleanNewlines) {
removeExcessNewlines(this).copyInto(this)
}
}
clone() {
return new RichText(this.text, cloneDeep(this.entities))
}
copyInto(target: RichText) {
target.text = this.text
target.entities = cloneDeep(this.entities)
}
insert(insertIndex: number, insertText: string) {
this.text =
this.text.slice(0, insertIndex) +
insertText +
this.text.slice(insertIndex)
if (!this.entities?.length) {
return this
}
const numCharsAdded = insertText.length
for (const ent of this.entities) {
// see comment at top of file for labels of each scenario
// scenario A (before)
if (insertIndex <= ent.index.start) {
// move both by num added
ent.index.start += numCharsAdded
ent.index.end += numCharsAdded
}
// scenario B (inner)
else if (insertIndex >= ent.index.start && insertIndex < ent.index.end) {
// move end by num added
ent.index.end += numCharsAdded
}
// scenario C (after)
// noop
}
return this
}
delete(removeStartIndex: number, removeEndIndex: number) {
this.text =
this.text.slice(0, removeStartIndex) + this.text.slice(removeEndIndex)
if (!this.entities?.length) {
return this
}
const numCharsRemoved = removeEndIndex - removeStartIndex
for (const ent of this.entities) {
// see comment at top of file for labels of each scenario
// scenario A (entirely outer)
if (
removeStartIndex <= ent.index.start &&
removeEndIndex >= ent.index.end
) {
// delete slice (will get removed in final pass)
ent.index.start = 0
ent.index.end = 0
}
// scenario B (entirely after)
else if (removeStartIndex > ent.index.end) {
// noop
}
// scenario C (partially after)
else if (
removeStartIndex > ent.index.start &&
removeStartIndex <= ent.index.end &&
removeEndIndex > ent.index.end
) {
// move end to remove start
ent.index.end = removeStartIndex
}
// scenario D (entirely inner)
else if (
removeStartIndex >= ent.index.start &&
removeEndIndex <= ent.index.end
) {
// move end by num removed
ent.index.end -= numCharsRemoved
}
// scenario E (partially before)
else if (
removeStartIndex < ent.index.start &&
removeEndIndex >= ent.index.start &&
removeEndIndex <= ent.index.end
) {
// move start to remove-start index, move end by num removed
ent.index.start = removeStartIndex
ent.index.end -= numCharsRemoved
}
// scenario F (entirely before)
else if (removeEndIndex < ent.index.start) {
// move both by num removed
ent.index.start -= numCharsRemoved
ent.index.end -= numCharsRemoved
}
}
// filter out any entities that were made irrelevant
this.entities = this.entities.filter(ent => ent.index.start < ent.index.end)
return this
}
}

View file

@ -71,6 +71,7 @@ export const s = StyleSheet.create({
borderBottom1: {borderBottomWidth: 1},
borderLeft1: {borderLeftWidth: 1},
hidden: {display: 'none'},
dimmed: {opacity: 0.5},
// font weights
fw600: {fontWeight: '600'},