Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pds sqlite refactor #1705

Merged
merged 143 commits into from Nov 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
143 commits
Select commit Hold shift + click to select a range
99862e3
wip
dholms Oct 3, 2023
f9e9096
wip
dholms Oct 3, 2023
82fd36a
rework readers & transactors
dholms Oct 3, 2023
66bca94
yay it compiles again
dholms Oct 4, 2023
8f9e3eb
wip
dholms Oct 4, 2023
9bd5ca9
crud all working
dholms Oct 5, 2023
98bfbea
tidy store interface
dholms Oct 5, 2023
2ca5010
clean up sequencing
dholms Oct 5, 2023
5262b00
get sequencer working
dholms Oct 6, 2023
ea68d14
fixing build errors
dholms Oct 6, 2023
00c2a3b
handle races on actor store transacts
dholms Oct 6, 2023
554e88b
fix file uploads tests
dholms Oct 6, 2023
cfc525b
spec out new simple pds mod routes
dholms Oct 7, 2023
db53a27
introduce new admin state endpoints
dholms Oct 9, 2023
6135d32
merge
dholms Oct 9, 2023
cae1381
wire up routes
dholms Oct 9, 2023
76f6326
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 9, 2023
f801465
clean up pds
dholms Oct 9, 2023
ff7365f
revoke refresh tokens
dholms Oct 9, 2023
05c65b7
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 9, 2023
68f502c
getUserAccountInfo
dholms Oct 9, 2023
7c76bed
pr tidy
dholms Oct 10, 2023
dcc0048
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 10, 2023
3da5105
mege
dholms Oct 10, 2023
1460dd2
tidy
dholms Oct 10, 2023
b2804a6
clean up tests & blobstore
dholms Oct 10, 2023
2f02c1c
fixing up more tests
dholms Oct 10, 2023
eb8d8a9
clean up rest of tests
dholms Oct 10, 2023
b1c565a
actor store in lru cache
dholms Oct 10, 2023
6978e46
fix open handles
dholms Oct 10, 2023
598f55d
Merge branch 'main' into pds-sqlite-refactor
dholms Oct 11, 2023
4224113
move proxied back to pds
dholms Oct 11, 2023
d655e27
fixing some tests
dholms Oct 11, 2023
9b2afaf
fixing up more tests
dholms Oct 11, 2023
e58ec0d
fanout takedowns to pds
dholms Oct 11, 2023
91c65f0
fanout admin reqs to pds
dholms Oct 12, 2023
a1298b4
tidy
dholms Oct 12, 2023
9b13517
more tidy & add more pds moderation tests
dholms Oct 12, 2023
9519d80
getUserAccountInfo -> getAccountInfo
dholms Oct 12, 2023
fa6ac0b
dont hydrate pds info on searchRepos
dholms Oct 12, 2023
d114ccc
fix build
dholms Oct 12, 2023
2deda23
port admin tests to bsky package
dholms Oct 12, 2023
8782f94
clean up old snaps
dholms Oct 12, 2023
0aeb12f
tests on fanout
dholms Oct 12, 2023
70f067f
tweak naming
dholms Oct 12, 2023
074d4c0
missed a rename
dholms Oct 12, 2023
68b7eb7
merge
dholms Oct 12, 2023
23f4d7f
tidy renames
dholms Oct 12, 2023
0fe2546
fix lex name
dholms Oct 12, 2023
09832ae
tidy & move snap
dholms Oct 12, 2023
8260dbb
merge
dholms Oct 12, 2023
2ede595
cleaning up from merge
dholms Oct 12, 2023
9e8d5a6
ensure ordering of replies
dholms Oct 12, 2023
8c966b3
fixing up bsky tests cuz sequencer is faster
dholms Oct 12, 2023
2688764
give sequencer its own db
dholms Oct 12, 2023
d452ab7
fix account deletion test
dholms Oct 12, 2023
84d1d2d
tidying migrations & tables
dholms Oct 12, 2023
9377c93
delte unused file
dholms Oct 12, 2023
5fb5ab5
change actor store layout to consistent hashes
dholms Oct 13, 2023
9b83d1e
fix some tests
dholms Oct 13, 2023
81b8fe4
fix up read after write
dholms Oct 13, 2023
a5c4caf
fix ordering issue in invite code test
dholms Oct 13, 2023
dfd7f68
Merge branch 'main' into mod-account-state
dholms Oct 13, 2023
f725796
merge
dholms Oct 13, 2023
ff7e96a
merge
dholms Oct 13, 2023
43853ff
small tidy
dholms Oct 13, 2023
74d5219
fix merge in auth verifier
dholms Oct 13, 2023
89b396f
fix todo in getBlob
dholms Oct 13, 2023
8be0af9
fix devenv build
dholms Oct 13, 2023
1af3650
fix build
dholms Oct 14, 2023
8c54840
merge
dholms Oct 14, 2023
81833d8
Merge branch 'simplify-pds-mod' into pds-sqlite-refactor
dholms Oct 14, 2023
2efe4fa
cleanup repeat process all
dholms Oct 14, 2023
39c27aa
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 14, 2023
78757f0
Merge branch 'simplify-pds-mod' into pds-sqlite-refactor
dholms Oct 14, 2023
b92907d
Merge branch 'main' into mod-account-state
dholms Oct 23, 2023
3b3db43
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 23, 2023
97bb4fe
merge
dholms Oct 23, 2023
7b6fd5e
skip actor search test
dholms Oct 23, 2023
243cd30
skip actor search test
dholms Oct 23, 2023
1a15819
tweak processAll
dholms Oct 23, 2023
ea42056
decrease wait to 1 sec
dholms Oct 23, 2023
8a52a2e
repo_blob -> record_blob
dholms Oct 23, 2023
7532beb
simplify backlink linkTo
dholms Oct 23, 2023
0186405
return repo_root to one row
dholms Oct 23, 2023
4a4d237
sequence before updating repo_root
dholms Oct 23, 2023
1cebfa0
invite code forUser -> forAccount
dholms Oct 23, 2023
22e43fd
ipld_block -> repo_block
dholms Oct 23, 2023
fcc9332
use lru-cache fetchMethod
dholms Oct 23, 2023
1cd4683
move did_cache to own db
dholms Oct 23, 2023
17ce995
better error handling on did cache
dholms Oct 23, 2023
abac865
drop did_handle
dholms Oct 23, 2023
696b68c
fix sequencer wait time
dholms Oct 23, 2023
05dcff2
debug
dholms Oct 23, 2023
4881b0a
debug
dholms Oct 23, 2023
a7d871e
more debug
dholms Oct 23, 2023
aca5234
check something
dholms Oct 23, 2023
b7638e0
fix bday paradox
dholms Oct 23, 2023
78a43a4
fix bday paradox
dholms Oct 23, 2023
02db21a
tidy up pds service auth
dholms Oct 24, 2023
4ab5fa2
rm skipped test
dholms Oct 24, 2023
9bd958e
retry http
dholms Oct 24, 2023
94af2ef
tidy
dholms Oct 24, 2023
6c44552
improve fanout error handling
dholms Oct 24, 2023
9c2b6e1
fix test
dholms Oct 24, 2023
a11596d
return signing key in did-web
dholms Oct 24, 2023
0015963
more tests
dholms Oct 24, 2023
178a74b
tidy serivce auth checks
dholms Oct 24, 2023
15f1c61
merge
dholms Oct 24, 2023
4206f6f
user_account -> account
dholms Oct 24, 2023
c042e3f
remove inviteNote
dholms Oct 24, 2023
bd3ce41
keypair per repo
dholms Oct 24, 2023
5a91d70
use an lru cache for keypairs as well
dholms Oct 24, 2023
1f7d4f7
Merge branch 'main' into mod-account-state
dholms Oct 25, 2023
07b6266
Merge branch 'mod-account-state' into simplify-pds-mod
dholms Oct 25, 2023
6112ca7
set pragmas
dholms Oct 26, 2023
17bbb19
rename pref transactor
dholms Oct 26, 2023
542617c
user pref -> account pref
dholms Oct 26, 2023
b14ddb5
Merge branch 'main' into mod-account-state
dholms Oct 26, 2023
4150815
merge
dholms Oct 26, 2023
c070c98
merge
dholms Oct 26, 2023
a18144a
merge
dholms Oct 30, 2023
bb71401
tweak scripts
dholms Oct 30, 2023
5a8cd30
tidy
dholms Oct 31, 2023
a36944c
better config for actorstore & dbs
dholms Oct 31, 2023
396c431
clean up cfg more
dholms Oct 31, 2023
b22835a
reorg actorstore fs layout
dholms Oct 31, 2023
f9f1171
handle erros on actor db create
dholms Oct 31, 2023
49a01ac
pr tidy & fix accoutn deletion test
dholms Oct 31, 2023
0366ee8
pr feedback
dholms Oct 31, 2023
8ca484f
merge main
dholms Oct 31, 2023
3af8e15
fix bad merge
dholms Oct 31, 2023
37807a8
unskip test
dholms Oct 31, 2023
0cb274f
fix subscribe repos tests
dholms Oct 31, 2023
9b66921
tidy repo root tables
dholms Oct 31, 2023
f25cef4
tidy
dholms Oct 31, 2023
d891b30
fix tests
dholms Oct 31, 2023
2df9506
bulk deletesg
dholms Oct 31, 2023
5d7e838
increase chunk size
dholms Oct 31, 2023
36a5412
tweak sequencer
dholms Nov 1, 2023
f84f5ce
deleted app migration table
dholms Nov 1, 2023
bf6d725
Merge branch 'pds-v2' into pds-sqlite-refactor
dholms Nov 1, 2023
2166add
patch up new auth test
dholms Nov 1, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
38 changes: 33 additions & 5 deletions packages/aws/src/s3.ts
Expand Up @@ -17,7 +17,7 @@ export class S3BlobStore implements BlobStore {
private client: aws.S3
private bucket: string

constructor(cfg: S3Config) {
constructor(public did: string, cfg: S3Config) {
const { bucket, ...rest } = cfg
this.bucket = bucket
this.client = new aws.S3({
Expand All @@ -26,20 +26,26 @@ export class S3BlobStore implements BlobStore {
})
}

static creator(cfg: S3Config) {
return (did: string) => {
return new S3BlobStore(did, cfg)
}
}

private genKey() {
return randomStr(32, 'base32')
}

private getTmpPath(key: string): string {
return `tmp/${key}`
return `tmp/${this.did}/${key}`
}

private getStoredPath(cid: CID): string {
return `blocks/${cid.toString()}`
return `blocks/${this.did}/${cid.toString()}`
}

private getQuarantinedPath(cid: CID): string {
return `quarantine/${cid.toString()}`
return `quarantine/${this.did}/${cid.toString()}`
}

async putTemp(bytes: Uint8Array | stream.Readable): Promise<string> {
Expand Down Expand Up @@ -122,11 +128,24 @@ export class S3BlobStore implements BlobStore {
await this.deleteKey(this.getStoredPath(cid))
}

async deleteMany(cids: CID[]): Promise<void> {
const keys = cids.map((cid) => this.getStoredPath(cid))
await this.deleteManyKeys(keys)
}

async hasStored(cid: CID): Promise<boolean> {
return this.hasKey(this.getStoredPath(cid))
}

async hasTemp(key: string): Promise<boolean> {
return this.hasKey(this.getTmpPath(key))
}

private async hasKey(key: string) {
try {
const res = await this.client.headObject({
Bucket: this.bucket,
Key: this.getStoredPath(cid),
Key: key,
})
return res.$metadata.httpStatusCode === 200
} catch (err) {
Expand All @@ -141,6 +160,15 @@ export class S3BlobStore implements BlobStore {
})
}

private async deleteManyKeys(keys: string[]) {
await this.client.deleteObjects({
Bucket: this.bucket,
Delete: {
Objects: keys.map((k) => ({ Key: k })),
},
})
}

private async move(keys: { from: string; to: string }) {
await this.client.copyObject({
Bucket: this.bucket,
Expand Down
3 changes: 1 addition & 2 deletions packages/bsky/tests/algos/hot-classic.test.ts
Expand Up @@ -31,7 +31,6 @@ describe('algo hot-classic', () => {
alice = sc.dids.alice
bob = sc.dids.bob
await network.processAll()
await network.bsky.processAll()
})

afterAll(async () => {
Expand Down Expand Up @@ -59,7 +58,7 @@ describe('algo hot-classic', () => {
await sc.like(sc.dids[name], two.ref)
await sc.like(sc.dids[name], three.ref)
}
await network.bsky.processAll()
await network.processAll()

const res = await agent.api.app.bsky.feed.getFeed(
{ feed: feedUri },
Expand Down
2 changes: 1 addition & 1 deletion packages/bsky/tests/auth.test.ts
Expand Up @@ -36,7 +36,7 @@ describe('auth', () => {
{ headers: { authorization: `Bearer ${jwt}` } },
)
}
const origSigningKey = network.pds.ctx.repoSigningKey
const origSigningKey = await network.pds.ctx.actorStore.keypair(issuer)
const newSigningKey = await Secp256k1Keypair.create({ exportable: true })
// confirm original signing key works
await expect(attemptWithKey(origSigningKey)).resolves.toBeDefined()
Expand Down
39 changes: 19 additions & 20 deletions packages/bsky/tests/auto-moderator/labeler.test.ts
Expand Up @@ -40,26 +40,25 @@ describe('labeler', () => {
await usersSeed(sc)
await network.processAll()
alice = sc.dids.alice
const repoSvc = pdsCtx.services.repo(pdsCtx.db)
const storeBlob = async (bytes: Uint8Array) => {
const blobRef = await repoSvc.blobs.addUntetheredBlob(
alice,
'image/jpeg',
Readable.from([bytes], { objectMode: false }),
)
const preparedBlobRef = {
cid: blobRef.ref,
mimeType: 'image/jpeg',
constraints: {},
}
await repoSvc.blobs.verifyBlobAndMakePermanent(alice, preparedBlobRef)
await repoSvc.blobs.associateBlob(
preparedBlobRef,
postUri(),
TID.nextStr(),
alice,
)
return blobRef
const storeBlob = (bytes: Uint8Array) => {
return pdsCtx.actorStore.transact(alice, async (store) => {
const blobRef = await store.repo.blob.addUntetheredBlob(
'image/jpeg',
Readable.from([bytes], { objectMode: false }),
)
const preparedBlobRef = {
cid: blobRef.ref,
mimeType: 'image/jpeg',
constraints: {},
}
await store.repo.blob.verifyBlobAndMakePermanent(preparedBlobRef)
await store.repo.blob.associateBlob(
preparedBlobRef,
postUri(),
TID.nextStr(),
)
return blobRef
})
}
const bytes1 = new Uint8Array([1, 2, 3, 4])
const bytes2 = new Uint8Array([5, 6, 7, 8])
Expand Down
6 changes: 4 additions & 2 deletions packages/bsky/tests/auto-moderator/takedowns.test.ts
Expand Up @@ -93,7 +93,8 @@ describe('takedowner', () => {
.executeTakeFirst()
expect(record?.takedownId).toEqual(modAction.id)

const recordPds = await network.pds.ctx.db.db
const actorDb = await network.pds.ctx.actorStore.db(post.ref.uri.hostname)
const recordPds = await actorDb.db
.selectFrom('record')
.where('uri', '=', post.ref.uriStr)
.select('takedownRef')
Expand Down Expand Up @@ -135,7 +136,8 @@ describe('takedowner', () => {
.executeTakeFirst()
expect(record?.takedownId).toEqual(modAction.id)

const recordPds = await network.pds.ctx.db.db
const actorDb = await network.pds.ctx.actorStore.db(alice)
const recordPds = await actorDb.db
.selectFrom('record')
.where('uri', '=', res.data.uri)
.select('takedownRef')
Expand Down
6 changes: 4 additions & 2 deletions packages/bsky/tests/blob-resolver.test.ts
Expand Up @@ -77,8 +77,10 @@ describe('blob resolver', () => {
})

it('fails on blob with bad signature check.', async () => {
await network.pds.ctx.blobstore.delete(fileCid)
await network.pds.ctx.blobstore.putPermanent(fileCid, randomBytes(100))
await network.pds.ctx.blobstore(fileDid).delete(fileCid)
await network.pds.ctx
.blobstore(fileDid)
.putPermanent(fileCid, randomBytes(100))
const tryGetBlob = client.get(`/blob/${fileDid}/${fileCid.toString()}`)
await expect(tryGetBlob).rejects.toThrow(
'maxContentLength size of -1 exceeded',
Expand Down
2 changes: 1 addition & 1 deletion packages/bsky/tests/handle-invalidation.test.ts
Expand Up @@ -103,7 +103,7 @@ describe('handle invalidation', () => {
await backdateIndexedAt(bob)
// update alices handle so that the pds will let bob take her old handle
await network.pds.ctx.db.db
.updateTable('did_handle')
.updateTable('account')
.where('did', '=', alice)
.set({ handle: 'not-alice.test' })
.execute()
Expand Down
13 changes: 11 additions & 2 deletions packages/bsky/tests/indexing.test.ts
Expand Up @@ -513,9 +513,18 @@ describe('indexing', () => {
validate: false,
}),
])
const writeCommit = await network.pds.ctx.actorStore.transact(
sc.dids.alice,
(store) => store.repo.processWrites(writes),
)
await pdsServices
.repo(pdsDb)
.processWrites({ did: sc.dids.alice, writes }, 1)
.account(pdsDb)
.updateRepoRoot(sc.dids.alice, writeCommit.cid, writeCommit.rev)
await network.pds.ctx.sequencer.sequenceCommit(
sc.dids.alice,
writeCommit,
writes,
)
// Index
const { data: commit } =
await pdsAgent.api.com.atproto.sync.getLatestCommit({
Expand Down
3 changes: 3 additions & 0 deletions packages/bsky/tests/seeds/basic.ts
Expand Up @@ -103,6 +103,8 @@ export default async (sc: SeedClient, users = true) => {
'tests/sample-img/key-landscape-small.jpg',
'image/jpeg',
)
// must ensure ordering of replies in indexing
await sc.network.processAll()
await sc.reply(
bob,
sc.posts[alice][1].ref,
Expand All @@ -117,6 +119,7 @@ export default async (sc: SeedClient, users = true) => {
sc.posts[alice][1].ref,
replies.carol[0],
)
await sc.network.processAll()
const alicesReplyToBob = await sc.reply(
alice,
sc.posts[alice][1].ref,
Expand Down
11 changes: 4 additions & 7 deletions packages/bsky/tests/subscription/repo.test.ts
@@ -1,7 +1,6 @@
import AtpAgent from '@atproto/api'
import { TestNetwork, SeedClient } from '@atproto/dev-env'
import { CommitData } from '@atproto/repo'
import { RepoService } from '@atproto/pds/src/services/repo'
import { PreparedWrite } from '@atproto/pds/src/repo'
import * as sequencer from '@atproto/pds/src/sequencer'
import { cborDecode, cborEncode } from '@atproto/common'
Expand Down Expand Up @@ -84,9 +83,8 @@ describe('sync', () => {

it('indexes actor when commit is unprocessable.', async () => {
// mock sequencing to create an unprocessable commit event
const afterWriteProcessingOriginal =
RepoService.prototype.afterWriteProcessing
RepoService.prototype.afterWriteProcessing = async function (
const sequenceCommitOrig = network.pds.ctx.sequencer.sequenceCommit
network.pds.ctx.sequencer.sequenceCommit = async function (
did: string,
commitData: CommitData,
writes: PreparedWrite[],
Expand All @@ -95,20 +93,19 @@ describe('sync', () => {
const evt = cborDecode(seqEvt.event) as sequencer.CommitEvt
evt.blocks = new Uint8Array() // bad blocks
seqEvt.event = cborEncode(evt)
await sequencer.sequenceEvt(this.db, seqEvt)
await network.pds.ctx.sequencer.sequenceEvt(seqEvt)
}
// create account and index the initial commit event
await sc.createAccount('jack', {
handle: 'jack.test',
email: 'jack@test.com',
password: 'password',
})
await network.pds.ctx.sequencerLeader?.isCaughtUp()
await network.processAll()
// confirm jack was indexed as an actor despite the bad event
const actors = await dumpTable(ctx.db.getPrimary(), 'actor', ['did'])
expect(actors.map((a) => a.handle)).toContain('jack.test')
RepoService.prototype.afterWriteProcessing = afterWriteProcessingOriginal
network.pds.ctx.sequencer.sequenceCommit = sequenceCommitOrig
})

async function updateProfile(
Expand Down
11 changes: 11 additions & 0 deletions packages/bsky/tests/views/threadgating.test.ts
Expand Up @@ -36,6 +36,7 @@ describe('views with thread gating', () => {
{ post: post.ref.uriStr, createdAt: iso(), allow: [] },
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
await sc.reply(sc.dids.alice, post.ref, post.ref, 'empty rules reply')
await network.processAll()
const {
Expand Down Expand Up @@ -78,6 +79,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
await sc.reply(
sc.dids.alice,
post.ref,
Expand Down Expand Up @@ -125,6 +127,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
// carol only follows alice
await sc.reply(
sc.dids.dan,
Expand Down Expand Up @@ -213,6 +216,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
//
await sc.reply(sc.dids.bob, post.ref, post.ref, 'list rule reply disallow')
const aliceReply = await sc.reply(
Expand Down Expand Up @@ -277,6 +281,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
await sc.reply(
sc.dids.alice,
post.ref,
Expand Down Expand Up @@ -317,6 +322,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
// carol only follows alice, and the post mentions dan.
await sc.reply(sc.dids.bob, post.ref, post.ref, 'multi rule reply disallow')
const aliceReply = await sc.reply(
Expand Down Expand Up @@ -372,6 +378,7 @@ describe('views with thread gating', () => {
{ post: post.ref.uriStr, createdAt: iso() },
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
const aliceReply = await sc.reply(
sc.dids.alice,
post.ref,
Expand Down Expand Up @@ -406,6 +413,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
// carol only follows alice
const orphanedReply = await sc.reply(
sc.dids.alice,
Expand Down Expand Up @@ -465,6 +473,7 @@ describe('views with thread gating', () => {
{ post: post.ref.uriStr, createdAt: iso(), allow: [] },
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
const selfReply = await sc.reply(
sc.dids.carol,
post.ref,
Expand Down Expand Up @@ -498,6 +507,7 @@ describe('views with thread gating', () => {
},
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
// carol only follows alice
const badReply = await sc.reply(
sc.dids.dan,
Expand Down Expand Up @@ -541,6 +551,7 @@ describe('views with thread gating', () => {
{ post: postB.ref.uriStr, createdAt: iso(), allow: [] },
sc.getHeaders(sc.dids.carol),
)
await network.processAll()
await sc.reply(sc.dids.alice, postA.ref, postA.ref, 'ungated reply')
await sc.reply(sc.dids.alice, postB.ref, postB.ref, 'ungated reply')
await network.processAll()
Expand Down
15 changes: 15 additions & 0 deletions packages/common/src/fs.ts
Expand Up @@ -13,3 +13,18 @@ export const fileExists = async (location: string): Promise<boolean> => {
throw err
}
}

export const rmIfExists = async (
filepath: string,
recursive = false,
): Promise<void> => {
try {
await fs.rm(filepath, { recursive })
} catch (err) {
if (isErrnoException(err) && err.code === 'ENOENT') {
// if blob not found, then it's already been deleted & we can just return
return
}
throw err
}
}
7 changes: 7 additions & 0 deletions packages/crypto/src/sha.ts
Expand Up @@ -9,3 +9,10 @@ export const sha256 = async (
typeof input === 'string' ? uint8arrays.fromString(input, 'utf8') : input
return noble.sha256(bytes)
}

export const sha256Hex = async (
input: Uint8Array | string,
): Promise<string> => {
const hash = await sha256(input)
return uint8arrays.toString(hash, 'hex')
}