Skip to content

Commit

Permalink
deps: switch varint for uint8-varint (#337)
Browse files Browse the repository at this point in the history
It's faster and we use it elsewhere in the stack so save ourselves
an extra dependency.
  • Loading branch information
achingbrain committed Aug 16, 2023
1 parent 9e786a0 commit 166fd43
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 12 deletions.
5 changes: 2 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -170,12 +170,11 @@
"@libp2p/interface": "^0.1.1",
"dns-over-http-resolver": "^2.1.0",
"multiformats": "^12.0.1",
"uint8arrays": "^4.0.2",
"varint": "^6.0.0"
"uint8-varint": "^2.0.1",
"uint8arrays": "^4.0.2"
},
"devDependencies": {
"@types/sinon": "^10.0.14",
"@types/varint": "^6.0.0",
"aegir": "^40.0.2",
"sinon": "^15.0.0"
},
Expand Down
10 changes: 5 additions & 5 deletions src/codec.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as varint from 'uint8-varint'
import { concat as uint8ArrayConcat } from 'uint8arrays/concat'
import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
import varint from 'varint'
import { convertToBytes, convertToString } from './convert.js'
import { getProtocol } from './protocols-table.js'
import type { StringTuple, Tuple, Protocol } from './index.js'
Expand Down Expand Up @@ -79,7 +79,7 @@ export function bytesToMultiaddrParts (bytes: Uint8Array): MultiaddrParts {
let i = 0
while (i < bytes.length) {
const code = varint.decode(bytes, i)
const n = varint.decode.bytes ?? 0
const n = varint.encodingLength(code)

const p = getProtocol(code)

Expand Down Expand Up @@ -165,8 +165,8 @@ function sizeForAddr (p: Protocol, addr: Uint8Array | number[]): number {
} else if (p.size === 0) {
return 0
} else {
const size = varint.decode(addr)
return size + (varint.decode.bytes ?? 0)
const size = varint.decode(addr instanceof Uint8Array ? addr : Uint8Array.from(addr))
return size + varint.encodingLength(size)
}
}

Expand All @@ -175,7 +175,7 @@ export function bytesToTuples (buf: Uint8Array): Tuple[] {
let i = 0
while (i < buf.length) {
const code = varint.decode(buf, i)
const n = varint.decode.bytes ?? 0
const n = varint.encodingLength(code)

const p = getProtocol(code)

Expand Down
8 changes: 4 additions & 4 deletions src/convert.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ import { base58btc } from 'multiformats/bases/base58'
import { bases } from 'multiformats/basics'
import { CID } from 'multiformats/cid'
import * as Digest from 'multiformats/hashes/digest'
import * as varint from 'uint8-varint'
import { concat as uint8ArrayConcat } from 'uint8arrays/concat'
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
import varint from 'varint'
import * as ip from './ip.js'
import { getProtocol } from './protocols-table.js'
import type { Multiaddr } from './index.js'
Expand Down Expand Up @@ -176,7 +176,7 @@ function str2bytes (str: string): Uint8Array {

function bytes2str (buf: Uint8Array): string {
const size = varint.decode(buf)
buf = buf.slice(varint.decode.bytes)
buf = buf.slice(varint.encodingLength(size))

if (buf.length !== size) {
throw new Error('inconsistent lengths')
Expand Down Expand Up @@ -206,7 +206,7 @@ function mb2bytes (mbstr: string): Uint8Array {
}
function bytes2mb (buf: Uint8Array): string {
const size = varint.decode(buf)
const hash = buf.slice(varint.decode.bytes)
const hash = buf.slice(varint.encodingLength(size))

if (hash.length !== size) {
throw new Error('inconsistent lengths')
Expand All @@ -220,7 +220,7 @@ function bytes2mb (buf: Uint8Array): string {
*/
function bytes2mh (buf: Uint8Array): string {
const size = varint.decode(buf)
const address = buf.slice(varint.decode.bytes)
const address = buf.slice(varint.encodingLength(size))

if (address.length !== size) {
throw new Error('inconsistent lengths')
Expand Down

0 comments on commit 166fd43

Please sign in to comment.