Skip to content

Commit

Permalink
feat(spec-test): add eth_aggregate_pubkeys
Browse files Browse the repository at this point in the history
  • Loading branch information
matthewkeil committed May 8, 2023
1 parent 909ce8d commit ea748da
Show file tree
Hide file tree
Showing 11 changed files with 465 additions and 37 deletions.
2 changes: 1 addition & 1 deletion rebuild/lib/index.d.ts
Expand Up @@ -114,6 +114,6 @@ export class Signature implements Serializable {
}

export function aggregatePublicKeys(keys: PublicKeyArg[]): Promise<PublicKey>;
export function aggregatePublicKeysSync(keys: PublicKeyArg[]): PublicKey;
export function aggregatePublicKeysSync(keys: PublicKeyArg[]): PublicKey | null;
export function aggregateSignatures(signatures: SignatureArg[]): Promise<Signature>;
export function aggregateSignaturesSync(signatures: SignatureArg[]): Signature;
6 changes: 5 additions & 1 deletion rebuild/package.json
Expand Up @@ -18,8 +18,10 @@
"build:clean": "npm run clean && npm run build",
"build:debug": "node-gyp build --debug",
"lint": "eslint --color --ext .ts lib/ test/",
"download-spec-tests": "ts-node test/spec/downloadTests.ts",
"test": "yarn test:unit",
"test:unit": "mocha test/unit/**/*.test.ts"
"test:unit": "mocha test/unit/**/*.test.ts",
"test:spec": "mocha test/spec/**/*.test.ts"
},
"repository": {
"type": "git",
Expand All @@ -45,7 +47,9 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^4.2.1",
"mocha": "^10.2.0",
"node-fetch": "2.6.6",
"prettier": "^2.8.7",
"tar": "^6.1.14",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
},
Expand Down
11 changes: 11 additions & 0 deletions rebuild/src/addon.cc
Expand Up @@ -156,6 +156,17 @@ bool Uint8ArrayArg::ValidateLength(size_t length1, size_t length2)
}
return is_valid;
};
bool Uint8ArrayArg::IsZeroBytes()
{
for (size_t i = 0; i < ByteLength(); i++)
{
if (_data[i] != 0)
{
return false;
}
}
return true;
}
/**
*
*
Expand Down
1 change: 1 addition & 0 deletions rebuild/src/addon.h
Expand Up @@ -203,6 +203,7 @@ class Uint8ArrayArg : public BlstBase
const uint8_t *Data();
size_t ByteLength();
bool ValidateLength(size_t length1, size_t length2 = 0);
bool IsZeroBytes();

protected:
std::string _error_prefix;
Expand Down
17 changes: 17 additions & 0 deletions rebuild/src/functions.cc
Expand Up @@ -16,6 +16,7 @@ namespace
public:
AggregatePublicKeysWorker(const Napi::CallbackInfo &info, size_t arg_position)
: BlstAsyncWorker(info),
_is_valid{true},
_result{},
_public_keys{_env, _info[arg_position]} {}

Expand All @@ -29,8 +30,19 @@ namespace

void Execute() override
{
if (_public_keys.Size() == 0)
{
_is_valid = false;
return;
}
for (size_t i = 0; i < _public_keys.Size(); i++)
{
bool is_valid = _public_keys[i].NativeValidate();
if (!is_valid)
{
_is_valid = false;
return;
}
try
{
_result.add(*_public_keys[i].AsJacobian());
Expand All @@ -47,6 +59,10 @@ namespace
Napi::Value GetReturnValue() override
{
Napi::EscapableHandleScope scope(_env);
if (!_is_valid)
{
return scope.Escape(_env.Null());
}
Napi::Object wrapped = _module->_public_key_ctr.New({Napi::External<void *>::New(Env(), nullptr)});
wrapped.TypeTag(&_module->_public_key_tag);
PublicKey *pk = PublicKey::Unwrap(wrapped);
Expand All @@ -58,6 +74,7 @@ namespace
blst::P1 &GetAggregate() { return _result; };

private:
bool _is_valid;
blst::P1 _result;
PublicKeyArgArray _public_keys;
};
Expand Down
100 changes: 65 additions & 35 deletions rebuild/src/public_key.cc
Expand Up @@ -149,37 +149,10 @@ Napi::Value PublicKey::Deserialize(const Napi::CallbackInfo &info)
return scope.Escape(wrapped);
}

const blst::P1 *PublicKey::AsJacobian()
{
if (!_has_jacobian && !_has_affine)
{
throw Napi::Error::New(BlstBase::_env, "PublicKey not initialized");
}
if (!_has_jacobian)
{
_jacobian.reset(new blst::P1{_affine->to_jacobian()});
_has_jacobian = true;
}
return _jacobian.get();
}

const blst::P1_Affine *PublicKey::AsAffine()
{
if (!_has_jacobian && !_has_affine)
{
throw Napi::Error::New(BlstBase::_env, "PublicKey not initialized");
}
if (!_has_affine)
{
_affine.reset(new blst::P1_Affine{_jacobian->to_affine()});
_has_affine = true;
}
return _affine.get();
}

PublicKey::PublicKey(const Napi::CallbackInfo &info)
: BlstBase{info.Env()},
Napi::ObjectWrap<PublicKey>{info},
_is_zero_key{false},
_has_jacobian{false},
_has_affine{false},
_jacobian{nullptr},
Expand Down Expand Up @@ -209,10 +182,18 @@ Napi::Value PublicKey::Serialize(const Napi::CallbackInfo &info)
: _module->_public_key_uncompressed_length);
if (_has_jacobian)
{
// if (_jacobian->is_inf())
// {
// return scope.Escape(env.Null());
// }
compressed ? _jacobian->compress(serialized.Data()) : _jacobian->serialize(serialized.Data());
}
else if (_has_affine)
{
// if (_affine->is_inf())
// {
// return scope.Escape(env.Null());
// }
compressed ? _affine->compress(serialized.Data()) : _affine->serialize(serialized.Data());
}
else
Expand Down Expand Up @@ -247,6 +228,47 @@ Napi::Value PublicKey::KeyValidateSync(const Napi::CallbackInfo &info)
return scope.Escape(worker.RunSync());
}

const blst::P1 *PublicKey::AsJacobian()
{
if (!_has_jacobian && !_has_affine)
{
throw Napi::Error::New(BlstBase::_env, "PublicKey not initialized");
}
if (!_has_jacobian)
{
_jacobian.reset(new blst::P1{_affine->to_jacobian()});
_has_jacobian = true;
}
return _jacobian.get();
}

const blst::P1_Affine *PublicKey::AsAffine()
{
if (!_has_jacobian && !_has_affine)
{
throw Napi::Error::New(BlstBase::_env, "PublicKey not initialized");
}
if (!_has_affine)
{
_affine.reset(new blst::P1_Affine{_jacobian->to_affine()});
_has_affine = true;
}
return _affine.get();
}

bool PublicKey::NativeValidate()
{
if (_has_jacobian && !_jacobian->is_inf() && _jacobian->in_group())
{
return true;
}
else if (_has_affine && !_affine->is_inf() && _affine->in_group())
{
return true;
}
return false;
}

/**
*
*
Expand Down Expand Up @@ -279,16 +301,24 @@ PublicKeyArg::PublicKeyArg(Napi::Env env, Napi::Value raw_arg)
wrapped.TypeTag(&_module->_public_key_tag);
_ref = Napi::Persistent(wrapped);
_public_key = PublicKey::Unwrap(wrapped);
try
if (_bytes.IsZeroBytes())
{
_public_key->_jacobian.reset(new blst::P1{_bytes.Data(), _bytes.ByteLength()});
_public_key->_jacobian.reset(new blst::P1{});
_public_key->_is_zero_key = true;
}
catch (blst::BLST_ERROR &err)
else
{
std::ostringstream msg;
msg << _module->GetBlstErrorString(err) << ": Invalid PublicKey";
SetError(msg.str());
return;
try
{
_public_key->_jacobian.reset(new blst::P1{_bytes.Data(), _bytes.ByteLength()});
}
catch (blst::BLST_ERROR &err)
{
std::ostringstream msg;
msg << _module->GetBlstErrorString(err) << ": Invalid PublicKey";
SetError(msg.str());
return;
}
}
_public_key->_has_jacobian = true;
return;
Expand Down
3 changes: 3 additions & 0 deletions rebuild/src/public_key.h
Expand Up @@ -8,6 +8,7 @@
class PublicKey : public BlstBase, public Napi::ObjectWrap<PublicKey>
{
public:
bool _is_zero_key;
bool _has_jacobian;
bool _has_affine;
std::unique_ptr<blst::P1> _jacobian;
Expand All @@ -22,6 +23,7 @@ class PublicKey : public BlstBase, public Napi::ObjectWrap<PublicKey>

const blst::P1 *AsJacobian();
const blst::P1_Affine *AsAffine();
bool NativeValidate();
};

class PublicKeyArg : public BlstBase
Expand All @@ -37,6 +39,7 @@ class PublicKeyArg : public BlstBase

const blst::P1 *AsJacobian();
const blst::P1_Affine *AsAffine();
bool NativeValidate() { return _public_key->NativeValidate(); };

private:
PublicKey *_public_key;
Expand Down
68 changes: 68 additions & 0 deletions rebuild/test/spec/downloadTests.ts
@@ -0,0 +1,68 @@
import fs from "node:fs";
import path from "node:path";
import {execSync} from "node:child_process";
import tar from "tar";
import fetch from "node-fetch";
import {SPEC_TEST_LOCATION, SPEC_TEST_VERSION, SPEC_TEST_REPO_URL, SPEC_TEST_TO_DOWNLOAD} from "./specTestVersioning";

/* eslint-disable no-console */

const specVersion = SPEC_TEST_VERSION;
const outputDir = SPEC_TEST_LOCATION;
const specTestsRepoUrl = SPEC_TEST_REPO_URL;

const versionFile = path.join(outputDir, "version.txt");
const existingVersion = fs.existsSync(versionFile) ? fs.readFileSync(versionFile, "utf8").trim() : "none";

if (existingVersion === specVersion) {
console.log(`version ${specVersion} already downloaded`);
process.exit(0);
} else {
console.log(`Downloading new version: ${specVersion} existingVersion: ${existingVersion}`);
}

if (fs.existsSync(outputDir)) {
console.log(`Cleaning existing version ${existingVersion} at ${outputDir}`);
shell(`rm -rf ${outputDir}`);
}

fs.mkdirSync(outputDir, {recursive: true});

const urls = SPEC_TEST_TO_DOWNLOAD.map((test) => `${specTestsRepoUrl}/releases/download/${specVersion}/${test}.tar.gz`);

downloadAndExtract(urls, outputDir)
.then(() => {
console.log("Downloads and extractions complete.");
fs.writeFileSync(versionFile, specVersion);
})
.catch((error) => {
console.error(`Error downloading test files: ${error}`);
process.exit(1);
});

function shell(cmd: string): string {
try {
return execSync(cmd, {encoding: "utf8"}).trim();
} catch (error) {
console.error(`Error executing shell command: ${cmd}`);
throw error;
}
}

async function downloadAndExtract(urls: string[], outputDir: string): Promise<void> {
for (const url of urls) {
const fileName = url.split("/").pop();
const filePath = path.resolve(outputDir, String(fileName));
const response = await fetch(url);
if (!response.ok || !response.body) {
throw new Error(`Failed to download ${url}`);
}

await fs.promises.writeFile(filePath, response.body);

await tar.x({
file: filePath,
cwd: outputDir,
});
}
}

0 comments on commit ea748da

Please sign in to comment.