Skip to content

Commit

Permalink
chore: Use simple "flat" CRS. (AztecProtocol#3748)
Browse files Browse the repository at this point in the history
There's no need for us to have such a convoluted set of transcripts for
our CRS in a complicated format. This:
* Uses a single 6GB "flat" CRS with no header data in the ignition s3
bucket, in normalised big-endian form.
* Allows us to use our normal deserialization functions for the fields,
as opposed to specific custom SRS reading code.
* Means we can now download, without complexity, any size CRS we need.
* Changes names to eg `bn254_g1.dat` so doesn't conflict with old cache.
* Get's rid of `size` file and just directly analyses file sizes.

It doesn't address:
* The new grumpkin CRS that arrived yesterday. Someone else can tackle
that one. We also need to figure out how to generate a real one. Another
trusted setup??....
  • Loading branch information
charlielye committed Dec 21, 2023
1 parent 9f3d972 commit 5c6c2ca
Show file tree
Hide file tree
Showing 8 changed files with 108 additions and 112 deletions.
27 changes: 20 additions & 7 deletions barretenberg/cpp/src/barretenberg/bb/file_io.hpp
Original file line number Diff line number Diff line change
@@ -1,29 +1,42 @@
#pragma once
#include <barretenberg/common/log.hpp>
#include <cstdint>
#include <fstream>
#include <ios>
#include <vector>

inline std::vector<uint8_t> read_file(const std::string& filename, size_t bytes = 0)
inline size_t get_file_size(std::string const& filename)
{
// Open the file in binary mode and move to the end.
std::ifstream file(filename, std::ios::binary | std::ios::ate);
if (!file) {
throw std::runtime_error("Unable to open file: " + filename);
return 0;
}

file.seekg(0, std::ios::end);
return (size_t)file.tellg();
}

inline std::vector<uint8_t> read_file(const std::string& filename, size_t bytes = 0)
{
// Get the file size.
std::streamsize size = bytes == 0 ? (std::streamsize)file.tellg() : (std::streamsize)bytes;
auto size = get_file_size(filename);
if (size <= 0) {
throw std::runtime_error("File is empty or there's an error reading it: " + filename);
}

auto to_read = bytes == 0 ? size : bytes;

std::ifstream file(filename, std::ios::binary);
if (!file) {
throw std::runtime_error("Unable to open file: " + filename);
}

// Create a vector with enough space for the file data.
std::vector<uint8_t> fileData((size_t)size);
std::vector<uint8_t> fileData(to_read);

// Go back to the start of the file and read all its contents.
file.seekg(0, std::ios::beg);
file.read(reinterpret_cast<char*>(fileData.data()), size);
// Read all its contents.
file.read(reinterpret_cast<char*>(fileData.data()), (std::streamsize)to_read);

return fileData;
}
Expand Down
94 changes: 31 additions & 63 deletions barretenberg/cpp/src/barretenberg/bb/get_bn254_crs.cpp
Original file line number Diff line number Diff line change
@@ -1,34 +1,17 @@
#include "get_bn254_crs.hpp"

// Gets the transcript URL from the BARRETENBERG_TRANSCRIPT_URL environment variable, if set.
// Otherwise returns the default URL.
namespace {
std::string get_bn254_transcript_url()
{
const char* ENV_VAR_NAME = "BARRETENBERG_TRANSCRIPT_URL";
const std::string DEFAULT_URL = "https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/monomial/transcript00.dat";

const char* env_url = std::getenv(ENV_VAR_NAME);

auto environment_variable_exists = ((env_url != nullptr) && *env_url);

return environment_variable_exists ? std::string(env_url) : DEFAULT_URL;
}
} // namespace
#include "barretenberg/bb/file_io.hpp"

std::vector<uint8_t> download_bn254_g1_data(size_t num_points)
{
size_t g1_start = 28;
size_t g1_end = g1_start + num_points * 64 - 1;
size_t g1_end = num_points * 64 - 1;

std::string url = get_bn254_transcript_url();
std::string url = "https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/flat/g1.dat";

std::string command =
"curl -s -H \"Range: bytes=" + std::to_string(g1_start) + "-" + std::to_string(g1_end) + "\" '" + url + "'";
std::string command = "curl -s -H \"Range: bytes=0-" + std::to_string(g1_end) + "\" '" + url + "'";

auto data = exec_pipe(command);
// Header + num_points * sizeof point.
if (data.size() < g1_end - g1_start) {
if (data.size() < g1_end) {
throw std::runtime_error("Failed to download g1 data.");
}

Expand All @@ -37,67 +20,52 @@ std::vector<uint8_t> download_bn254_g1_data(size_t num_points)

std::vector<uint8_t> download_bn254_g2_data()
{
size_t g2_start = 28 + 5040001 * 64;
size_t g2_end = g2_start + 128 - 1;

std::string url = get_bn254_transcript_url();

std::string command =
"curl -s -H \"Range: bytes=" + std::to_string(g2_start) + "-" + std::to_string(g2_end) + "\" '" + url + "'";

std::string url = "https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/flat/g2.dat";
std::string command = "curl -s '" + url + "'";
return exec_pipe(command);
}

std::vector<barretenberg::g1::affine_element> get_bn254_g1_data(const std::filesystem::path& path, size_t num_points)
{
std::filesystem::create_directories(path);
std::ifstream size_file(path / "size");
size_t size = 0;
if (size_file) {
size_file >> size;
size_file.close();
}
if (size >= num_points) {
vinfo("using cached crs at: ", path);
auto data = read_file(path / "g1.dat", 28 + num_points * 64);

auto g1_path = path / "bn254_g1.dat";
size_t g1_file_size = get_file_size(g1_path);

if (g1_file_size >= num_points * 64 && g1_file_size % 64 == 0) {
vinfo("using cached crs of size ", std::to_string(g1_file_size / 64), " at ", g1_path);
auto data = read_file(g1_path, g1_file_size);
auto points = std::vector<barretenberg::g1::affine_element>(num_points);
auto size_of_points_in_bytes = num_points * 64;
barretenberg::srs::IO<curve::BN254>::read_affine_elements_from_buffer(
points.data(), (char*)data.data(), size_of_points_in_bytes);
for (size_t i = 0; i < num_points; ++i) {
points[i] = from_buffer<barretenberg::g1::affine_element>(data, i * 64);
}
return points;
}

vinfo("downloading crs...");
auto data = download_bn254_g1_data(num_points);
write_file(path / "g1.dat", data);

std::ofstream new_size_file(path / "size");
if (!new_size_file) {
throw std::runtime_error("Failed to open size file for writing");
}
new_size_file << num_points;
new_size_file.close();
write_file(g1_path, data);

auto points = std::vector<barretenberg::g1::affine_element>(num_points);
barretenberg::srs::IO<curve::BN254>::read_affine_elements_from_buffer(
points.data(), (char*)data.data(), data.size());
for (size_t i = 0; i < num_points; ++i) {
points[i] = from_buffer<barretenberg::g1::affine_element>(data, i * 64);
}
return points;
}

barretenberg::g2::affine_element get_bn254_g2_data(const std::filesystem::path& path)
{
std::filesystem::create_directories(path);

try {
auto data = read_file(path / "g2.dat");
barretenberg::g2::affine_element g2_point;
barretenberg::srs::IO<curve::BN254>::read_affine_elements_from_buffer(&g2_point, (char*)data.data(), 128);
return g2_point;
} catch (std::exception&) {
auto data = download_bn254_g2_data();
write_file(path / "g2.dat", data);
barretenberg::g2::affine_element g2_point;
barretenberg::srs::IO<curve::BN254>::read_affine_elements_from_buffer(&g2_point, (char*)data.data(), 128);
return g2_point;
auto g2_path = path / "bn254_g2.dat";
size_t g2_file_size = get_file_size(g2_path);

if (g2_file_size == 128) {
auto data = read_file(g2_path);
return from_buffer<barretenberg::g2::affine_element>(data.data());
}

auto data = download_bn254_g2_data();
write_file(g2_path, data);
return from_buffer<barretenberg::g2::affine_element>(data.data());
}
5 changes: 3 additions & 2 deletions barretenberg/cpp/src/barretenberg/bb/get_grumpkin_crs.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,9 @@ std::vector<curve::Grumpkin::AffineElement> get_grumpkin_g1_data(const std::file
size_file.close();
}
if (size >= num_points) {
vinfo("using cached crs at: ", path);
auto data = read_file(path / "grumpkin_g1.dat", 28 + num_points * 64);
auto file = path / "grumpkin_g1.dat";
vinfo("using cached crs at: ", file);
auto data = read_file(file, 28 + num_points * 64);
auto points = std::vector<curve::Grumpkin::AffineElement>(num_points);
auto size_of_points_in_bytes = num_points * 64;
barretenberg::srs::IO<curve::Grumpkin>::read_affine_elements_from_buffer(
Expand Down
11 changes: 9 additions & 2 deletions barretenberg/cpp/src/barretenberg/bb/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,14 @@
#include <vector>

using namespace barretenberg;
std::string CRS_PATH = "./crs";

std::string getHomeDir()
{
char* home = std::getenv("HOME");
return home != nullptr ? std::string(home) : "./";
}

std::string CRS_PATH = getHomeDir() + "/.bb-crs";
bool verbose = false;

const std::filesystem::path current_path = std::filesystem::current_path();
Expand Down Expand Up @@ -441,7 +448,7 @@ int main(int argc, char* argv[])
std::string proof_path = get_option(args, "-p", "./proofs/proof");
std::string vk_path = get_option(args, "-k", "./target/vk");
std::string pk_path = get_option(args, "-r", "./target/pk");
CRS_PATH = get_option(args, "-c", "./crs");
CRS_PATH = get_option(args, "-c", CRS_PATH);
bool recursive = flag_present(args, "-r") || flag_present(args, "--recursive");

// Skip CRS initialization for any command which doesn't require the CRS.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,4 +138,16 @@ template <class base_field, class Params> struct alignas(32) field2 {
}
};

template <typename B, typename base_field, typename Params> void read(B& it, field2<base_field, Params>& value)
{
using serialize::read;
read(it, value.c0);
read(it, value.c1);
}
template <typename B, typename base_field, typename Params> void write(B& buf, field2<base_field, Params> const& value)
{
using serialize::write;
write(buf, value.c0);
write(buf, value.c1);
}
} // namespace barretenberg
20 changes: 10 additions & 10 deletions barretenberg/cpp/src/barretenberg/srs/c_bind.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,18 @@
using namespace barretenberg;

/**
* WARNING: The SRS is not encoded the same way as all the read/write methods encode.
* Have to use the old school io functions to parse the buffers.
* We are not passed a vector (length prefixed), but the buffer and num points independently.
* Saves on having the generate the vector awkwardly calling side after downloading crs.
*/
WASM_EXPORT void srs_init_srs(uint8_t const* points_buf, uint32_t const* num_points, uint8_t const* g2_point_buf)
WASM_EXPORT void srs_init_srs(uint8_t const* points_buf, uint32_t const* num_points_buf, uint8_t const* g2_point_buf)
{
auto points = std::vector<g1::affine_element>(ntohl(*num_points));
srs::IO<curve::BN254>::read_affine_elements_from_buffer(points.data(), (char*)points_buf, points.size() * 64);

g2::affine_element g2_point;
srs::IO<curve::BN254>::read_affine_elements_from_buffer(&g2_point, (char*)g2_point_buf, 128);

barretenberg::srs::init_crs_factory(points, g2_point);
auto num_points = ntohl(*num_points_buf);
auto g1_points = std::vector<g1::affine_element>(num_points);
for (size_t i = 0; i < num_points; ++i) {
g1_points[i] = from_buffer<barretenberg::g1::affine_element>(points_buf, i * 64);
}
auto g2_point = from_buffer<g2::affine_element>(g2_point_buf);
barretenberg::srs::init_crs_factory(g1_points, g2_point);
}

/**
Expand Down
15 changes: 4 additions & 11 deletions barretenberg/ts/src/crs/net_crs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,11 @@ export class NetCrs {
}

async downloadG1Data() {
const g1Start = 28;
const g1End = g1Start + this.numPoints * 64 - 1;
const g1End = this.numPoints * 64 - 1;

const response = await fetch('https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/monomial/transcript00.dat', {
const response = await fetch('https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/flat/g1.dat', {
headers: {
Range: `bytes=${g1Start}-${g1End}`,
Range: `bytes=0-${g1End}`,
},
cache: 'force-cache',
});
Expand All @@ -38,13 +37,7 @@ export class NetCrs {
* Download the G2 points data.
*/
async downloadG2Data() {
const g2Start = 28 + 5040001 * 64;
const g2End = g2Start + 128 - 1;

const response2 = await fetch('https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/monomial/transcript00.dat', {
headers: {
Range: `bytes=${g2Start}-${g2End}`,
},
const response2 = await fetch('https://aztec-ignition.s3.amazonaws.com/MAIN%20IGNITION/flat/g2.dat', {
cache: 'force-cache',
});

Expand Down
36 changes: 19 additions & 17 deletions barretenberg/ts/src/crs/node/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { NetCrs } from '../net_crs.js';
import { GRUMPKIN_SRS_DEV_PATH, IgnitionFilesCrs } from './ignition_files_crs.js';
import { mkdirSync, readFileSync, writeFileSync } from 'fs';
import { readFile } from 'fs/promises';
import { readFile, stat } from 'fs/promises';
import createDebug from 'debug';
import { homedir } from 'os';

const debug = createDebug('bb.js:crs');

Expand All @@ -12,47 +13,48 @@ const debug = createDebug('bb.js:crs');
export class Crs {
constructor(public readonly numPoints: number, public readonly path: string) {}

static async new(numPoints: number, crsPath = './crs') {
static async new(numPoints: number, crsPath = homedir() + '/.bb-crs') {
const crs = new Crs(numPoints, crsPath);
await crs.init();
return crs;
}

async init() {
mkdirSync(this.path, { recursive: true });
const size = await readFile(this.path + '/size', 'ascii').catch(() => undefined);
if (size && +size >= this.numPoints) {
debug(`using cached crs of size: ${size}`);

const g1FileSize = await stat(this.path + '/bn254_g1.dat')
.then(stats => stats.size)
.catch(() => 0);
const g2FileSize = await stat(this.path + '/bn254_g2.dat')
.then(stats => stats.size)
.catch(() => 0);

if (g1FileSize >= this.numPoints * 64 && g1FileSize % 64 == 0 && g2FileSize == 128) {
debug(`using cached crs of size: ${g1FileSize / 64}`);
return;
}

const ignitionCrs = new IgnitionFilesCrs(this.numPoints);
const crs = ignitionCrs.pathExists() ? new IgnitionFilesCrs(this.numPoints) : new NetCrs(this.numPoints);
if (crs instanceof NetCrs) {
debug(`downloading crs of size: ${this.numPoints}`);
} else {
debug(`loading igntion file crs of size: ${this.numPoints}`);
}
debug(`downloading crs of size: ${this.numPoints}`);
const crs = new NetCrs(this.numPoints);
await crs.init();
writeFileSync(this.path + '/size', this.numPoints.toString());
writeFileSync(this.path + '/g1.dat', crs.getG1Data());
writeFileSync(this.path + '/g2.dat', crs.getG2Data());
writeFileSync(this.path + '/bn254_g1.dat', crs.getG1Data());
writeFileSync(this.path + '/bn254_g2.dat', crs.getG2Data());
}

/**
* G1 points data for prover key.
* @returns The points data.
*/
getG1Data(): Uint8Array {
return readFileSync(this.path + '/g1.dat');
return readFileSync(this.path + '/bn254_g1.dat');
}

/**
* G2 points data for verification key.
* @returns The points data.
*/
getG2Data(): Uint8Array {
return readFileSync(this.path + '/g2.dat');
return readFileSync(this.path + '/bn254_g2.dat');
}
}

Expand Down

0 comments on commit 5c6c2ca

Please sign in to comment.