Skip to content

Commit

Permalink
Bump packed
Browse files Browse the repository at this point in the history
  • Loading branch information
paulmillr committed May 16, 2024
1 parent 9299313 commit f8436bc
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 37 deletions.
8 changes: 4 additions & 4 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"dependencies": {
"@noble/curves": "~1.4.0",
"@noble/hashes": "~1.4.0",
"micro-packed": "~0.5.3"
"micro-packed": "~0.6.2"
},
"devDependencies": {
"@paulmillr/jsbt": "0.1.0",
Expand Down
42 changes: 20 additions & 22 deletions src/abi/decoder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,17 @@ There is some really crazy stuff going on here with Typescript types.
function EPad<T>(p: P.CoderType<T>) {
return P.padLeft(32, p, P.ZeroPad);
}
// Save pointers values next to array. ETH only stuff.
// By some reason, ptr value inside arrays is placed right after ptr.
// This should work by default without 'wrappedArray'.
// TODO: Debug more later.
function wrappedArray<T>(len: P.Length, inner: P.CoderType<T>): P.CoderType<T[]> {

// Main difference between regular array: length stored outside and offsets calculated without length
function ethArray<T>(inner: P.CoderType<T>): P.CoderType<T[]> {
return P.wrap({
size: typeof len === 'number' && inner.size ? len * inner.size : undefined,
size: undefined,
encodeStream: (w: P.Writer, value: T[]) => {
w.length(len, value.length);
U256BE_LEN.encodeStream(w, value.length);
w.bytes(P.array(value.length, inner).encode(value));
},
decodeStream: (r: P.Reader): T[] =>
P.array(r.length(len), inner).decodeStream(r.offsetReader(r.pos)),
P.array(U256BE_LEN.decodeStream(r), inner).decodeStream(r.offsetReader(r.pos)),
});
}

Expand All @@ -38,19 +36,19 @@ const ethInt = (bits: number, signed = false) => {
throw new Error('ethInt: invalid numeric type');
const _bits = BigInt(bits);
const inner = P.bigint(32, false, signed);
return P.wrap({
size: inner.size,
encodeStream: (w: P.Writer, value: bigint) => {
const _value = BigInt(value);
P.checkBounds(w, _value, _bits, !!signed);
inner.encodeStream(w, BigInt(_value));
},
decodeStream: (r: P.Reader): bigint => {
const value = inner.decodeStream(r);
P.checkBounds(r, value, _bits, !!signed);
return P.validate(
P.wrap({
size: inner.size,
encodeStream: (w: P.Writer, value: bigint) => inner.encodeStream(w, value),
decodeStream: (r: P.Reader): bigint => inner.decodeStream(r),
}),
(value) => {
// TODO: validate useful for narrowing types, need to add support in types?
if (typeof value === 'number') value = BigInt(value);
P.utils.checkBounds(value, _bits, !!signed);
return value;
},
});
}
);
};

// Ugly hack, because tuple of pointers considered "dynamic" without any reason.
Expand Down Expand Up @@ -169,7 +167,7 @@ export function mapComponent<T extends BaseComponent>(c: T): P.CoderType<MapType
return out as any;
} else {
// Dynamic array
return P.pointer(PTR, wrappedArray(U256BE_LEN, inner)) as any;
return P.pointer(PTR, ethArray(inner)) as any;
}
}
if (c.type === 'tuple') {
Expand Down Expand Up @@ -198,7 +196,7 @@ export function mapComponent<T extends BaseComponent>(c: T): P.CoderType<MapType
return P.pointer(PTR, P.padRight(32, P.string(U256BE_LEN), P.ZeroPad)) as any;
if (c.type === 'bytes')
return P.pointer(PTR, P.padRight(32, P.bytes(U256BE_LEN), P.ZeroPad)) as any;
if (c.type === 'address') return EPad(P.hex(20, false, true)) as any;
if (c.type === 'address') return EPad(P.hex(20, { isLE: false, with0x: true })) as any;
if (c.type === 'bool') return EPad(P.bool) as any;
if ((m = /^(u?)int([0-9]+)?$/.exec(c.type)))
return ethInt(m[2] ? +m[2] : 256, m[1] !== 'u') as any;
Expand Down
13 changes: 3 additions & 10 deletions src/ssz.ts
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ const array = <T>(len: P.Length, inner: SSZCoder<T>): P.CoderType<T[]> => {
// definitions are hardcoded. Also, pointers very strict here.
for (let i = 0; i < offsets.length; i++) {
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.data.length;
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos) throw r.err('SSZ/array: decreasing offset');
const len = next - pos;
if (r.pos !== pos) throw r.err('SSZ/array: wrong offset');
Expand Down Expand Up @@ -248,7 +248,7 @@ export const container = <T extends Record<string, SSZCoder<any>>>(
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.data.length;
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos) throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos) throw r.err('SSZ/container: wrong offset');
Expand Down Expand Up @@ -353,13 +353,6 @@ export const bitlist = (maxLen: number): SSZCoder<boolean[]> => {
},
};
};
// Breaks pointer offsets
// TODO: move to packed?
const noop = <T>(inner: P.CoderType<T>): P.CoderType<T> =>
P.wrap({
encodeStream: (w, value) => w.bytes(inner.encode(value)),
decodeStream: (r) => inner.decode(r.bytes(r.leftBytes)),
});

/**
* Union type (None is null)
Expand All @@ -379,7 +372,7 @@ export const union = (
P.tag(
P.U8,
Object.fromEntries(
types.map((t, i) => [i, t === null ? P.magicBytes(P.EMPTY) : noop(t)]) as any
types.map((t, i) => [i, t === null ? P.magicBytes(P.EMPTY) : P.prefix(null, t)]) as any
)
),
{
Expand Down

0 comments on commit f8436bc

Please sign in to comment.