From 45e70747bb8cbf98741d1b3365ec2217f92958dc Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 18 Apr 2024 15:02:30 +0100 Subject: [PATCH 01/60] Update polynomial-commitments-sampling.md --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 829e16ebaa..cc10650fca 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -106,7 +106,7 @@ def bytes_to_cell(cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]) -> Cell: #### `g2_lincomb` ```python -def g2_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElement]) -> Bytes96: +def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> Bytes96: """ BLS multiscalar multiplication in G2. This function can be optimized using Pippenger's algorithm and variants. """ From 3b889645ffc1603bdc976b69cddd09aea58b8354 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 17:38:04 +0100 Subject: [PATCH 02/60] number of cells in an extended blob --- .../polynomial-commitments-sampling.md | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 829e16ebaa..c719e0a629 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -84,7 +84,7 @@ Cells are the smallest unit of blob data that can come with their own KZG proofs | `FIELD_ELEMENTS_PER_EXT_BLOB` | `2 * FIELD_ELEMENTS_PER_BLOB` | Number of field elements in a Reed-Solomon extended blob | | `FIELD_ELEMENTS_PER_CELL` | `uint64(64)` | Number of field elements in a cell | | `BYTES_PER_CELL` | `FIELD_ELEMENTS_PER_CELL * BYTES_PER_FIELD_ELEMENT` | The number of bytes in a cell | -| `CELLS_PER_BLOB` | `FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL` | The number of cells in a blob | +| `CELLS_PER_EXT_BLOB` | `FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL` | The number of cells in an extended blob | | `RANDOM_CHALLENGE_KZG_CELL_BATCH_DOMAIN` | `b'RCKZGCBATCH__V1_'` | ## Helper functions @@ -359,7 +359,7 @@ def coset_for_cell(cell_id: CellID) -> Cell: """ Get the coset for a given ``cell_id`` """ - assert cell_id < CELLS_PER_BLOB + assert cell_id < CELLS_PER_EXT_BLOB roots_of_unity_brp = bit_reversal_permutation( compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) ) @@ -374,8 +374,8 @@ def coset_for_cell(cell_id: CellID) -> Cell: ```python def compute_cells_and_proofs(blob: Blob) -> Tuple[ - Vector[Cell, CELLS_PER_BLOB], - Vector[KZGProof, CELLS_PER_BLOB]]: + Vector[Cell, CELLS_PER_EXT_BLOB], + Vector[KZGProof, CELLS_PER_EXT_BLOB]]: """ Compute all the cell proofs for one blob. This is an inefficient O(n^2) algorithm, for performant implementation the FK20 algorithm that runs in O(n log n) should be @@ -389,7 +389,7 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ cells = [] proofs = [] - for i in range(CELLS_PER_BLOB): + for i in range(CELLS_PER_EXT_BLOB): coset = coset_for_cell(i) proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) cells.append(ys) @@ -401,7 +401,7 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ #### `compute_cells` ```python -def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_BLOB]: +def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: """ Compute the cell data for a blob (without computing the proofs). @@ -414,7 +414,7 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_BLOB]: compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) extended_data_rbo = bit_reversal_permutation(extended_data) return [extended_data_rbo[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] - for i in range(CELLS_PER_BLOB)] + for i in range(CELLS_PER_EXT_BLOB)] ``` ### Cell verification @@ -491,11 +491,11 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ corresponds to a missing field element. """ # Get the small domain - roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_BLOB) + roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_EXT_BLOB) # Compute polynomial that vanishes at all the missing cells (over the small domain) short_zero_poly = vanishing_polynomialcoeff([ - roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_BLOB)] + roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_EXT_BLOB)] for missing_cell_id in missing_cell_ids ]) @@ -510,7 +510,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ zero_poly_eval_brp = bit_reversal_permutation(zero_poly_eval) # Sanity check - for cell_id in range(CELLS_PER_BLOB): + for cell_id in range(CELLS_PER_EXT_BLOB): start = cell_id * FIELD_ELEMENTS_PER_CELL end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL if cell_id in missing_cell_ids: @@ -605,7 +605,7 @@ def recover_polynomial(cell_ids: Sequence[CellID], """ assert len(cell_ids) == len(cells_bytes) # Check we have enough cells to be able to perform the reconstruction - assert CELLS_PER_BLOB / 2 <= len(cell_ids) <= CELLS_PER_BLOB + assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB # Check for duplicates assert len(cell_ids) == len(set(cell_ids)) @@ -615,7 +615,7 @@ def recover_polynomial(cell_ids: Sequence[CellID], # Convert from bytes to cells cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes] - missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_BLOB) if cell_id not in cell_ids] + missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] zero_poly_coeff, zero_poly_eval, zero_poly_eval_brp = construct_vanishing_polynomial(missing_cell_ids) eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( From 7e85c962f2a2defa5f2d50dbd3743dc03e89f8a8 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 17:40:51 +0100 Subject: [PATCH 03/60] change CELLS_PER_BLOB in das-core.md --- specs/_features/eip7594/das-core.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 49b387558f..0e3b0809e3 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -147,7 +147,7 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count: full_polynomial = recover_polynomial(cell_ids, cells_bytes) cells_from_full_polynomial = [ full_polynomial[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] - for i in range(CELLS_PER_BLOB) + for i in range(CELLS_PER_EXT_BLOB) ] extended_matrix.extend(cells_from_full_polynomial) return ExtendedMatrix(extended_matrix) From d91314016411f0fa74d210c6c83d80be49a0af96 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 17:52:33 +0100 Subject: [PATCH 04/60] fix comments for kzg_proof_multi --- .../eip7594/polynomial-commitments-sampling.md | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 829e16ebaa..1937ac9793 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -308,18 +308,26 @@ def compute_kzg_proof_multi_impl( polynomial_coeff: PolynomialCoeff, zs: Sequence[BLSFieldElement]) -> Tuple[KZGProof, Sequence[BLSFieldElement]]: """ - Helper function that computes multi-evaluation KZG proofs. + Compute a KZG multi-evaluation proof for a set of `k` points. + + This is done by committing to the following quotient polynomial: + Q(X) = f(X) - r(X) / Z(X) + Where: + - r(X) is the degree k-1 polynomial that agrees with f(x) at all `k` points + - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points """ - # For all x_i, compute p(x_i) - p(z) + # For all points, compute the evaluation of those points. ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs] + # Compute r(X) interpolation_polynomial = interpolate_polynomialcoeff(zs, ys) + # Compute f(X) - r(X) polynomial_shifted = add_polynomialcoeff(polynomial_coeff, neg_polynomialcoeff(interpolation_polynomial)) - # For all x_i, compute (x_i - z) + # Compute Z(X) denominator_poly = vanishing_polynomialcoeff(zs) - # Compute the quotient polynomial directly in evaluation form + # Compute the quotient polynomial directly in monomial form quotient_polynomial = divide_polynomialcoeff(polynomial_shifted, denominator_poly) return KZGProof(g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(quotient_polynomial)], quotient_polynomial)), ys From 4d073f10a2683ad5b41e1b0ad81b6f5a615df891 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 18:00:01 +0100 Subject: [PATCH 05/60] remove remainder polynomial when in monomial form --- specs/_features/eip7594/polynomial-commitments-sampling.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 829e16ebaa..14548dea64 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -313,14 +313,12 @@ def compute_kzg_proof_multi_impl( # For all x_i, compute p(x_i) - p(z) ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs] - interpolation_polynomial = interpolate_polynomialcoeff(zs, ys) - polynomial_shifted = add_polynomialcoeff(polynomial_coeff, neg_polynomialcoeff(interpolation_polynomial)) # For all x_i, compute (x_i - z) denominator_poly = vanishing_polynomialcoeff(zs) # Compute the quotient polynomial directly in evaluation form - quotient_polynomial = divide_polynomialcoeff(polynomial_shifted, denominator_poly) + quotient_polynomial = divide_polynomialcoeff(polynomial_coeff, denominator_poly) return KZGProof(g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(quotient_polynomial)], quotient_polynomial)), ys ``` From 912038bb083d0fd4519072101978a643f9a78820 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 18:11:41 +0100 Subject: [PATCH 06/60] modify cell proofs comment --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index c719e0a629..4fdb42d96f 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -377,7 +377,7 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ Vector[Cell, CELLS_PER_EXT_BLOB], Vector[KZGProof, CELLS_PER_EXT_BLOB]]: """ - Compute all the cell proofs for one blob. This is an inefficient O(n^2) algorithm, + Compute all the cell proofs for an extended blob. This is an inefficient O(n^2) algorithm, for performant implementation the FK20 algorithm that runs in O(n log n) should be used instead. From 6c8955d4bc27b8a051112317973391d8b86c2885 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 18:12:15 +0100 Subject: [PATCH 07/60] chore: fix comment --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 4fdb42d96f..bbb85b9554 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -403,7 +403,7 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ ```python def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: """ - Compute the cell data for a blob (without computing the proofs). + Compute the cell data for an extended blob (without computing the proofs). Public method. """ From c9e1291860c9788dc6f696be32c3f836ee4e0cfc Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 18:14:32 +0100 Subject: [PATCH 08/60] update tests --- .../eth2spec/test/eip7594/unittests/das/test_das.py | 12 ++++++------ .../test_polynomial_commitments.py | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py index 24011fcdd7..1289d61a21 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py @@ -18,11 +18,11 @@ def test_compute_extended_matrix(spec): blob_count = 2 input_blobs = [get_sample_blob(spec, rng=rng) for _ in range(blob_count)] extended_matrix = spec.compute_extended_matrix(input_blobs) - assert len(extended_matrix) == spec.CELLS_PER_BLOB * blob_count + assert len(extended_matrix) == spec.CELLS_PER_EXT_BLOB * blob_count - rows = [extended_matrix[i:(i + spec.CELLS_PER_BLOB)] for i in range(0, len(extended_matrix), spec.CELLS_PER_BLOB)] + rows = [extended_matrix[i:(i + spec.CELLS_PER_EXT_BLOB)] for i in range(0, len(extended_matrix), spec.CELLS_PER_EXT_BLOB)] assert len(rows) == blob_count - assert len(rows[0]) == spec.CELLS_PER_BLOB + assert len(rows[0]) == spec.CELLS_PER_EXT_BLOB for blob_index, row in enumerate(rows): extended_blob = [] @@ -40,7 +40,7 @@ def test_recover_matrix(spec): rng = random.Random(5566) # Number of samples we will be recovering from - N_SAMPLES = spec.CELLS_PER_BLOB // 2 + N_SAMPLES = spec.CELLS_PER_EXT_BLOB // 2 blob_count = 2 cells_dict = {} @@ -54,9 +54,9 @@ def test_recover_matrix(spec): cell_ids = [] # First figure out just the indices of the cells for _ in range(N_SAMPLES): - cell_id = rng.randint(0, spec.CELLS_PER_BLOB - 1) + cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) while cell_id in cell_ids: - cell_id = rng.randint(0, spec.CELLS_PER_BLOB - 1) + cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) cell_ids.append(cell_id) cell = cells[cell_id] cells_dict[(blob_index, cell_id)] = cell diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index d49894adb1..b6232eaeb3 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -71,7 +71,7 @@ def test_recover_polynomial(spec): rng = random.Random(5566) # Number of samples we will be recovering from - N_SAMPLES = spec.CELLS_PER_BLOB // 2 + N_SAMPLES = spec.CELLS_PER_EXT_BLOB // 2 # Get the data we will be working with blob = get_sample_blob(spec) @@ -86,9 +86,9 @@ def test_recover_polynomial(spec): cell_ids = [] # First figure out just the indices of the cells for i in range(N_SAMPLES): - j = rng.randint(0, spec.CELLS_PER_BLOB - 1) + j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) while j in cell_ids: - j = rng.randint(0, spec.CELLS_PER_BLOB - 1) + j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) cell_ids.append(j) # Now the cells themselves known_cells_bytes = [cells_bytes[cell_id] for cell_id in cell_ids] From b06d431eb6ca157c97cb560a081ba436d8653b45 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Thu, 18 Apr 2024 18:28:10 +0100 Subject: [PATCH 09/60] fix lint --- .../pyspec/eth2spec/test/eip7594/unittests/das/test_das.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py index 1289d61a21..dba8e885a0 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py @@ -20,7 +20,8 @@ def test_compute_extended_matrix(spec): extended_matrix = spec.compute_extended_matrix(input_blobs) assert len(extended_matrix) == spec.CELLS_PER_EXT_BLOB * blob_count - rows = [extended_matrix[i:(i + spec.CELLS_PER_EXT_BLOB)] for i in range(0, len(extended_matrix), spec.CELLS_PER_EXT_BLOB)] + rows = [extended_matrix[i:(i + spec.CELLS_PER_EXT_BLOB)] + for i in range(0, len(extended_matrix), spec.CELLS_PER_EXT_BLOB)] assert len(rows) == blob_count assert len(rows[0]) == spec.CELLS_PER_EXT_BLOB From 1669723adfa56c327927f61c8cb3a9b942b67907 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 18 Apr 2024 20:26:46 +0100 Subject: [PATCH 10/60] Update specs/_features/eip7594/polynomial-commitments-sampling.md Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1937ac9793..8702b72a5a 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -317,7 +317,7 @@ def compute_kzg_proof_multi_impl( - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points """ - # For all points, compute the evaluation of those points. + # For all points, compute the evaluation of those points ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs] # Compute r(X) interpolation_polynomial = interpolate_polynomialcoeff(zs, ys) From 8e3500b2b56e128fcd19483ea2c4167afd5d5d77 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 18 Apr 2024 20:26:52 +0100 Subject: [PATCH 11/60] Update specs/_features/eip7594/polynomial-commitments-sampling.md Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 8702b72a5a..fb4b46fc86 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -313,7 +313,7 @@ def compute_kzg_proof_multi_impl( This is done by committing to the following quotient polynomial: Q(X) = f(X) - r(X) / Z(X) Where: - - r(X) is the degree k-1 polynomial that agrees with f(x) at all `k` points + - r(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points """ From 33dfa88160d9073e7e5e75827dce2a9877a7ca29 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Fri, 19 Apr 2024 12:07:58 +0100 Subject: [PATCH 12/60] fix bad merge --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index aa6a8e1832..a238a95fe2 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -322,8 +322,6 @@ def compute_kzg_proof_multi_impl( # For all points, compute the evaluation of those points ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs] - # Compute r(X) - interpolation_polynomial = interpolate_polynomialcoeff(zs, ys) # Compute Z(X) denominator_poly = vanishing_polynomialcoeff(zs) From 32cbda9de25f8eeff0f506d45fc8af7b2ec1634a Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Fri, 19 Apr 2024 12:50:22 +0100 Subject: [PATCH 13/60] rename to recover_all_cells --- .../polynomial-commitments-sampling.md | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 60ecafe17b..f8c10084c5 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -45,7 +45,7 @@ - [`construct_vanishing_polynomial`](#construct_vanishing_polynomial) - [`recover_shifted_data`](#recover_shifted_data) - [`recover_original_data`](#recover_original_data) - - [`recover_polynomial`](#recover_polynomial) + - [`recover_all_cells`](#recover_all_cells) @@ -596,14 +596,15 @@ def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldEle return reconstructed_data ``` -### `recover_polynomial` +### `recover_all_cells` ```python -def recover_polynomial(cell_ids: Sequence[CellID], - cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]]) -> Polynomial: +def recover_all_cells(cell_ids: Sequence[CellID], + cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]]) -> Sequence[Cell]: """ - Recover original polynomial from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, half of which can be missing. This - algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here: + Recover all of the cells in the extended blob from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, + half of which can be missing. + This algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here: https://ethresear.ch/t/reed-solomon-erasure-code-recovery-in-n-log-2-n-time-with-ffts/3039 A faster version thanks to Qi Zhou can be found here: @@ -646,5 +647,9 @@ def recover_polynomial(cell_ids: Sequence[CellID], end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL assert reconstructed_data[start:end] == cell - return reconstructed_data + reconstructed_data_as_cells = [ + reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] + for i in range(CELLS_PER_EXT_BLOB)] + + return reconstructed_data_as_cells ``` From 3ddd00e74199c3b234a7a68b059a0a948a0baa89 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Fri, 19 Apr 2024 12:50:32 +0100 Subject: [PATCH 14/60] use recover_all_cells --- specs/_features/eip7594/das-core.md | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 0e3b0809e3..6fee616477 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -135,7 +135,7 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count: """ Return the recovered ``ExtendedMatrix``. - This helper demonstrates how to apply ``recover_polynomial``. + This helper demonstrates how to apply ``recover_all_cells``. The data structure for storing cells is implementation-dependent. """ extended_matrix = [] @@ -144,12 +144,8 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count: cells = [cells_dict[(blob_index, cell_id)] for cell_id in cell_ids] cells_bytes = [[bls_field_to_bytes(element) for element in cell] for cell in cells] - full_polynomial = recover_polynomial(cell_ids, cells_bytes) - cells_from_full_polynomial = [ - full_polynomial[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] - for i in range(CELLS_PER_EXT_BLOB) - ] - extended_matrix.extend(cells_from_full_polynomial) + all_cells_for_row = recover_all_cells(cell_ids, cells_bytes) + extended_matrix.extend(all_cells_for_row) return ExtendedMatrix(extended_matrix) ``` From 988412ec2c3f39b5c014941905bf316dd3541267 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Fri, 19 Apr 2024 12:53:47 +0100 Subject: [PATCH 15/60] modify test --- .../test_polynomial_commitments.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index b6232eaeb3..f7cf65d2db 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -67,7 +67,7 @@ def test_verify_cell_proof_batch(spec): @with_eip7594_and_later @spec_test @single_phase -def test_recover_polynomial(spec): +def test_recover_all_cells(spec): rng = random.Random(5566) # Number of samples we will be recovering from @@ -93,15 +93,15 @@ def test_recover_polynomial(spec): # Now the cells themselves known_cells_bytes = [cells_bytes[cell_id] for cell_id in cell_ids] - # Recover the data - recovered_data = spec.recover_polynomial(cell_ids, known_cells_bytes) + # Recover all of the cells + recovered_cells = spec.recover_all_cells(cell_ids, known_cells_bytes) + recovered_data = [x for xs in recovered_cells for x in xs] # Check that the original data match the non-extended portion of the recovered data assert original_polynomial == recovered_data[:len(recovered_data) // 2] - # Now flatten the cells and check that they match the entirety of the recovered data - flattened_cells = [x for xs in cells for x in xs] - assert flattened_cells == recovered_data + # Check that the recovered cells match the original cells + assert cells == recovered_data @with_eip7594_and_later From 8707a4c2c1396ce8351e86ac524773dc7dc8ee85 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Fri, 19 Apr 2024 11:18:11 -0500 Subject: [PATCH 16/60] Fix test_recover_all_cells --- .../polynomial_commitments/test_polynomial_commitments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index f7cf65d2db..1dd9b2d6a7 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -101,7 +101,7 @@ def test_recover_all_cells(spec): assert original_polynomial == recovered_data[:len(recovered_data) // 2] # Check that the recovered cells match the original cells - assert cells == recovered_data + assert cells == recovered_cells @with_eip7594_and_later From 4684c5748c59002b5bc0a2fd455a03017f0ce9d3 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Fri, 19 Apr 2024 19:19:17 +0100 Subject: [PATCH 17/60] add comment for verify algorithm --- .../eip7594/polynomial-commitments-sampling.md | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1a50b00787..5c557fe35e 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -311,7 +311,7 @@ def compute_kzg_proof_multi_impl( Compute a KZG multi-evaluation proof for a set of `k` points. This is done by committing to the following quotient polynomial: - Q(X) = f(X) - r(X) / Z(X) + Q(X) = f(X) - r(X) / Z(X) Where: - r(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points @@ -340,12 +340,26 @@ def verify_kzg_proof_multi_impl(commitment: KZGCommitment, ys: Sequence[BLSFieldElement], proof: KZGProof) -> bool: """ - Helper function that verifies a KZG multiproof + Verify a KZG multi-evaluation proof for a set of `k` points. + + This is done by checking if the following equation holds: + Q(x) Z(x) = f(X) - r(X) + Where: + f(X) is the polynomial that we want to show opens at `k` points to `k` values + Q(X) is the quotient polynomial computed by the prover + r(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points + Z(X) is the polynomial that evaluates to zero on all `k` points + + The verifier receives the commitments to Q(X) and f(X), so they check the equation + holds by using the following pairing equation: + e([Q(X)]_1, [Z(X)]_2) == e([f(X)]_1 - [r(X)]_1, [1]_2) """ assert len(zs) == len(ys) + # Compute [Z(X)]_2 zero_poly = g2_lincomb(KZG_SETUP_G2_MONOMIAL[:len(zs) + 1], vanishing_polynomialcoeff(zs)) + # Compute [r(X)]_1 interpolated_poly = g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(zs)], interpolate_polynomialcoeff(zs, ys)) return (bls.pairing_check([ From 22a9c726ecef298a2eff978ea3d138695e381d5a Mon Sep 17 00:00:00 2001 From: terence Date: Fri, 19 Apr 2024 16:30:04 -0700 Subject: [PATCH 18/60] Fix `ExecutionLayerWithdrawalRequest` container to also imply it's for EIP7002 --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index ca04ae6a8c..262401f0e6 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -229,7 +229,7 @@ class PendingPartialWithdrawal(Container): ``` #### `ExecutionLayerWithdrawalRequest` -*Note*: The container is new in EIP7251. +*Note*: The container is new in EIP7251:EIP7002. ```python class ExecutionLayerWithdrawalRequest(Container): From 0e4fc984ef662f9d7c869d6e2d0f64901d2625e1 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Sat, 20 Apr 2024 02:16:19 +0100 Subject: [PATCH 19/60] Remove unused return value (#3704) --- specs/_features/eip7594/polynomial-commitments-sampling.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1a50b00787..d8f21f0c65 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -525,7 +525,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ else: # cell_id in cell_ids assert all(a != 0 for a in zero_poly_eval_brp[start:end]) - return zero_poly_coeff, zero_poly_eval, zero_poly_eval_brp + return zero_poly_coeff, zero_poly_eval ``` ### `recover_shifted_data` @@ -624,7 +624,7 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes] missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] - zero_poly_coeff, zero_poly_eval, zero_poly_eval_brp = construct_vanishing_polynomial(missing_cell_ids) + zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids) eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( cell_ids, From dca048d8df222c982307bb295f1ccf124446c853 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Mon, 22 Apr 2024 09:57:58 +0100 Subject: [PATCH 20/60] push @asn-d6 suggestions --- .../eip7594/polynomial-commitments-sampling.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 5c557fe35e..28bc68ee9e 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -311,12 +311,12 @@ def compute_kzg_proof_multi_impl( Compute a KZG multi-evaluation proof for a set of `k` points. This is done by committing to the following quotient polynomial: - Q(X) = f(X) - r(X) / Z(X) + Q(X) = f(X) - I(X) / Z(X) Where: - - r(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points + - I(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points - We further note that since the degree of r(X) is less than the degree of Z(X), + We further note that since the degree of I(X) is less than the degree of Z(X), the computation can be simplified in monomial form to Q(X) = f(X) / Z(X) """ @@ -343,23 +343,23 @@ def verify_kzg_proof_multi_impl(commitment: KZGCommitment, Verify a KZG multi-evaluation proof for a set of `k` points. This is done by checking if the following equation holds: - Q(x) Z(x) = f(X) - r(X) + Q(x) Z(x) = f(X) - I(X) Where: - f(X) is the polynomial that we want to show opens at `k` points to `k` values + f(X) is the polynomial that we want to verify opens at `k` points to `k` values Q(X) is the quotient polynomial computed by the prover - r(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points + I(X) is the degree k-1 polynomial that evaluates to `ys` at all `zs`` points Z(X) is the polynomial that evaluates to zero on all `k` points The verifier receives the commitments to Q(X) and f(X), so they check the equation holds by using the following pairing equation: - e([Q(X)]_1, [Z(X)]_2) == e([f(X)]_1 - [r(X)]_1, [1]_2) + e([Q(X)]_1, [Z(X)]_2) == e([f(X)]_1 - [I(X)]_1, [1]_2) """ assert len(zs) == len(ys) # Compute [Z(X)]_2 zero_poly = g2_lincomb(KZG_SETUP_G2_MONOMIAL[:len(zs) + 1], vanishing_polynomialcoeff(zs)) - # Compute [r(X)]_1 + # Compute [I(X)]_1 interpolated_poly = g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(zs)], interpolate_polynomialcoeff(zs, ys)) return (bls.pairing_check([ From e46c4c39fb0b3d2bf58f372a845e2d06968ef39c Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 19 Apr 2024 16:44:52 +0800 Subject: [PATCH 21/60] workaround: add a new config `MAX_EXTENDED_MATRIX_SIZE` to avoid `class Container(List[c, a*b])` --- configs/mainnet.yaml | 1 + configs/minimal.yaml | 1 + pysetup/spec_builders/eip7594.py | 1 + specs/_features/eip7594/das-core.md | 3 ++- .../eth2spec/test/eip7594/unittests/test_config_invariants.py | 1 + 5 files changed, 6 insertions(+), 1 deletion(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 55a973e9a9..56f1f89030 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -157,6 +157,7 @@ WHISK_PROPOSER_SELECTION_GAP: 2 # EIP7594 NUMBER_OF_COLUMNS: 128 +MAX_EXTENDED_MATRIX_SIZE: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 75e75ce9c2..d55e0c398f 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -155,6 +155,7 @@ WHISK_PROPOSER_SELECTION_GAP: 1 # EIP7594 NUMBER_OF_COLUMNS: 128 +MAX_EXTENDED_MATRIX_SIZE: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 diff --git a/pysetup/spec_builders/eip7594.py b/pysetup/spec_builders/eip7594.py index 414d73f327..9c5ce4fd91 100644 --- a/pysetup/spec_builders/eip7594.py +++ b/pysetup/spec_builders/eip7594.py @@ -19,6 +19,7 @@ def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: 'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value, 'FIELD_ELEMENTS_PER_EXT_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_EXT_BLOB'].value, 'NUMBER_OF_COLUMNS': spec_object.config_vars['NUMBER_OF_COLUMNS'].value, + 'MAX_EXTENDED_MATRIX_SIZE': spec_object.config_vars['MAX_EXTENDED_MATRIX_SIZE'].value, } @classmethod diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 6fee616477..61c79cfd2b 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -46,7 +46,7 @@ We define the following Python custom types for type hinting and readability: | Name | SSZ equivalent | Description | | - | - | - | | `DataColumn` | `List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK]` | The data of each column in EIP-7594 | -| `ExtendedMatrix` | `List[Cell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data of one-dimensional erasure coding extended blobs (in row major format) | +| `ExtendedMatrix` | `List[Cell, MAX_EXTENDED_MATRIX_SIZE]` | The full data of one-dimensional erasure coding extended blobs (in row major format). | ## Configuration @@ -55,6 +55,7 @@ We define the following Python custom types for type hinting and readability: | Name | Value | Description | | - | - | - | | `NUMBER_OF_COLUMNS` | `uint64(FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. | +| `MAX_EXTENDED_MATRIX_SIZE` | `uint64(MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS)` (= 768) | The data size of `ExtendedMatrix`. | ### Networking diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py index 999bc97b7f..e2ec28d74f 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py @@ -18,6 +18,7 @@ def test_invariants(spec): assert spec.config.MAX_REQUEST_DATA_COLUMN_SIDECARS == ( spec.config.MAX_REQUEST_BLOCKS_DENEB * spec.config.NUMBER_OF_COLUMNS ) + assert spec.config.MAX_EXTENDED_MATRIX_SIZE == spec.MAX_BLOBS_PER_BLOCK * spec.config.NUMBER_OF_COLUMNS @with_eip7594_and_later From 3f2d498cee5c8bb1db0f496d5b7b1a3d0c13e7f5 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Mon, 22 Apr 2024 13:02:42 +0100 Subject: [PATCH 22/60] rename --- configs/mainnet.yaml | 2 +- configs/minimal.yaml | 2 +- pysetup/spec_builders/eip7594.py | 2 +- specs/_features/eip7594/das-core.md | 4 ++-- .../eth2spec/test/eip7594/unittests/test_config_invariants.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 56f1f89030..bbe9195135 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -157,7 +157,7 @@ WHISK_PROPOSER_SELECTION_GAP: 2 # EIP7594 NUMBER_OF_COLUMNS: 128 -MAX_EXTENDED_MATRIX_SIZE: 768 +MAX_CELLS_IN_EXTENDED_MATRIX: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index d55e0c398f..99e84e5fbe 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -155,7 +155,7 @@ WHISK_PROPOSER_SELECTION_GAP: 1 # EIP7594 NUMBER_OF_COLUMNS: 128 -MAX_EXTENDED_MATRIX_SIZE: 768 +MAX_CELLS_IN_EXTENDED_MATRIX: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 diff --git a/pysetup/spec_builders/eip7594.py b/pysetup/spec_builders/eip7594.py index 9c5ce4fd91..3329378320 100644 --- a/pysetup/spec_builders/eip7594.py +++ b/pysetup/spec_builders/eip7594.py @@ -19,7 +19,7 @@ def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: 'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value, 'FIELD_ELEMENTS_PER_EXT_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_EXT_BLOB'].value, 'NUMBER_OF_COLUMNS': spec_object.config_vars['NUMBER_OF_COLUMNS'].value, - 'MAX_EXTENDED_MATRIX_SIZE': spec_object.config_vars['MAX_EXTENDED_MATRIX_SIZE'].value, + 'MAX_CELLS_IN_EXTENDED_MATRIX': spec_object.config_vars['MAX_CELLS_IN_EXTENDED_MATRIX'].value, } @classmethod diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 61c79cfd2b..379f1e4f6d 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -46,7 +46,7 @@ We define the following Python custom types for type hinting and readability: | Name | SSZ equivalent | Description | | - | - | - | | `DataColumn` | `List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK]` | The data of each column in EIP-7594 | -| `ExtendedMatrix` | `List[Cell, MAX_EXTENDED_MATRIX_SIZE]` | The full data of one-dimensional erasure coding extended blobs (in row major format). | +| `ExtendedMatrix` | `List[Cell, MAX_CELLS_IN_EXTENDED_MATRIX]` | The full data of one-dimensional erasure coding extended blobs (in row major format). | ## Configuration @@ -55,7 +55,7 @@ We define the following Python custom types for type hinting and readability: | Name | Value | Description | | - | - | - | | `NUMBER_OF_COLUMNS` | `uint64(FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. | -| `MAX_EXTENDED_MATRIX_SIZE` | `uint64(MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS)` (= 768) | The data size of `ExtendedMatrix`. | +| `MAX_CELLS_IN_EXTENDED_MATRIX` | `uint64(MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS)` (= 768) | The data size of `ExtendedMatrix`. | ### Networking diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py index e2ec28d74f..9ad21d2624 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py @@ -18,7 +18,7 @@ def test_invariants(spec): assert spec.config.MAX_REQUEST_DATA_COLUMN_SIDECARS == ( spec.config.MAX_REQUEST_BLOCKS_DENEB * spec.config.NUMBER_OF_COLUMNS ) - assert spec.config.MAX_EXTENDED_MATRIX_SIZE == spec.MAX_BLOBS_PER_BLOCK * spec.config.NUMBER_OF_COLUMNS + assert spec.config.MAX_CELLS_IN_EXTENDED_MATRIX == spec.MAX_BLOBS_PER_BLOCK * spec.config.NUMBER_OF_COLUMNS @with_eip7594_and_later From 5c561722b8969c7fc5d28bd68892d724fe955055 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 22 Apr 2024 13:57:52 +0100 Subject: [PATCH 23/60] add BLS_MODULUS to places where we subtract two integers (#3709) --- specs/_features/eip7594/polynomial-commitments-sampling.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 549bfa74bd..38bf98899b 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -223,7 +223,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial quot = div(a[apos], b[bpos]) o.insert(0, quot) for i in range(bpos, -1, -1): - a[diff + i] = (int(a[diff + i]) - int(b[i]) * int(quot)) % BLS_MODULUS + a[diff + i] = (int(a[diff + i]) - int(b[i] + BLS_MODULUS) * int(quot)) % BLS_MODULUS apos -= 1 diff -= 1 return [x % BLS_MODULUS for x in o] @@ -264,7 +264,7 @@ def interpolate_polynomialcoeff(xs: Sequence[BLSFieldElement], ys: Sequence[BLSF if j != i: weight_adjustment = bls_modular_inverse(int(xs[i]) - int(xs[j])) summand = multiply_polynomialcoeff( - summand, [(- int(weight_adjustment) * int(xs[j])) % BLS_MODULUS, weight_adjustment] + summand, [((BLS_MODULUS - int(weight_adjustment)) * int(xs[j])) % BLS_MODULUS, weight_adjustment] ) r = add_polynomialcoeff(r, summand) @@ -280,7 +280,7 @@ def vanishing_polynomialcoeff(xs: Sequence[BLSFieldElement]) -> PolynomialCoeff: """ p = [1] for x in xs: - p = multiply_polynomialcoeff(p, [-int(x), 1]) + p = multiply_polynomialcoeff(p, [-int(x) + BLS_MODULUS, 1]) return p ``` From 79d255b15a229212a61c5e7ef920a6b7c414e827 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Mon, 22 Apr 2024 14:08:44 +0100 Subject: [PATCH 24/60] use CELLS_PER_EXT_BLOB in das-core --- specs/_features/eip7594/das-core.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 379f1e4f6d..fff60e19f4 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -54,7 +54,7 @@ We define the following Python custom types for type hinting and readability: | Name | Value | Description | | - | - | - | -| `NUMBER_OF_COLUMNS` | `uint64(FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. | +| `NUMBER_OF_COLUMNS` | `uint64(CELLS_PER_EXT_BLOB)` (= 128) | Number of columns in the extended data matrix. | | `MAX_CELLS_IN_EXTENDED_MATRIX` | `uint64(MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS)` (= 768) | The data size of `ExtendedMatrix`. | ### Networking From bcd0a09e68da264ed840c2aa0e603d66d8cae8f7 Mon Sep 17 00:00:00 2001 From: Justin Traglia <95511699+jtraglia@users.noreply.github.com> Date: Mon, 22 Apr 2024 09:03:57 -0500 Subject: [PATCH 25/60] Integrate new Coset/CosetEvals types (#3701) * Add CellBytes type * Use ByteVector, not Vector * Fix tests * Replace b"" with [] * Apply suggestions for bytes_to_cell * Add/integrate new CosetEvals type * Remove accidental new line * Fix recover_all_cells * Fix recover_matrix * fix CosetEvals abstraction leak * Introduce internal Coset type for `coset_for_cell()` * Use CosetEvals in compute_kzg_proof_multi_impl() * update test * satisfy linter * Fix two nits I noticed --------- Co-authored-by: Kevaundray Wedderburn Co-authored-by: George Kadianakis --- specs/_features/eip7594/das-core.md | 3 +- .../polynomial-commitments-sampling.md | 87 ++++++++++++------- .../test/eip7594/unittests/das/test_das.py | 2 +- .../test_polynomial_commitments.py | 19 ++-- 4 files changed, 65 insertions(+), 46 deletions(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 379f1e4f6d..9d0a839ba0 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -143,9 +143,8 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count: for blob_index in range(blob_count): cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index] cells = [cells_dict[(blob_index, cell_id)] for cell_id in cell_ids] - cells_bytes = [[bls_field_to_bytes(element) for element in cell] for cell in cells] - all_cells_for_row = recover_all_cells(cell_ids, cells_bytes) + all_cells_for_row = recover_all_cells(cell_ids, cells) extended_matrix.extend(all_cells_for_row) return ExtendedMatrix(extended_matrix) ``` diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 38bf98899b..c56b1de014 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -13,7 +13,8 @@ - [Cells](#cells) - [Helper functions](#helper-functions) - [BLS12-381 helpers](#bls12-381-helpers) - - [`bytes_to_cell`](#bytes_to_cell) + - [`cell_to_coset_evals`](#cell_to_coset_evals) + - [`coset_evals_to_cell`](#coset_evals_to_cell) - [Linear combinations](#linear-combinations) - [`g2_lincomb`](#g2_lincomb) - [FFTs](#ffts) @@ -63,7 +64,9 @@ Public functions MUST accept raw bytes as input and perform the required cryptog | Name | SSZ equivalent | Description | | - | - | - | | `PolynomialCoeff` | `List[BLSFieldElement, FIELD_ELEMENTS_PER_EXT_BLOB]` | A polynomial in coefficient form | -| `Cell` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with their own KZG proofs | +| `Coset` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The evaluation domain of a cell | +| `CosetEvals` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The internal representation of a cell (the evaluations over its Coset) | +| `Cell` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with its own KZG proof | | `CellID` | `uint64` | Cell identifier | | `RowIndex` | `uint64` | Row identifier | | `ColumnIndex` | `uint64` | Column identifier | @@ -91,14 +94,33 @@ Cells are the smallest unit of blob data that can come with their own KZG proofs ### BLS12-381 helpers -#### `bytes_to_cell` +#### `cell_to_coset_evals` ```python -def bytes_to_cell(cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]) -> Cell: +def cell_to_coset_evals(cell: Cell) -> CosetEvals: """ - Convert untrusted bytes into a Cell. + Convert an untrusted ``Cell`` into a trusted ``CosetEvals``. """ - return [bytes_to_bls_field(element) for element in cell_bytes] + evals = [] + for i in range(FIELD_ELEMENTS_PER_CELL): + start = i * BYTES_PER_FIELD_ELEMENT + end = (i + 1) * BYTES_PER_FIELD_ELEMENT + value = bytes_to_bls_field(cell[start:end]) + evals.append(value) + return CosetEvals(evals) +``` + +#### `coset_evals_to_cell` + +```python +def coset_evals_to_cell(coset_evals: CosetEvals) -> Cell: + """ + Convert a trusted ``CosetEval`` into an untrusted ``Cell``. + """ + cell = [] + for i in range(FIELD_ELEMENTS_PER_CELL): + cell += bls_field_to_bytes(coset_evals[i]) + return Cell(cell) ``` ### Linear combinations @@ -306,7 +328,7 @@ Extended KZG functions for multiproofs ```python def compute_kzg_proof_multi_impl( polynomial_coeff: PolynomialCoeff, - zs: Sequence[BLSFieldElement]) -> Tuple[KZGProof, Sequence[BLSFieldElement]]: + zs: Coset) -> Tuple[KZGProof, CosetEvals]: """ Compute a KZG multi-evaluation proof for a set of `k` points. @@ -336,8 +358,8 @@ def compute_kzg_proof_multi_impl( ```python def verify_kzg_proof_multi_impl(commitment: KZGCommitment, - zs: Sequence[BLSFieldElement], - ys: Sequence[BLSFieldElement], + zs: Coset, + ys: CosetEvals, proof: KZGProof) -> bool: """ Verify a KZG multi-evaluation proof for a set of `k` points. @@ -376,7 +398,7 @@ def verify_kzg_proof_multi_impl(commitment: KZGCommitment, #### `coset_for_cell` ```python -def coset_for_cell(cell_id: CellID) -> Cell: +def coset_for_cell(cell_id: CellID) -> Coset: """ Get the coset for a given ``cell_id`` """ @@ -384,7 +406,7 @@ def coset_for_cell(cell_id: CellID) -> Cell: roots_of_unity_brp = bit_reversal_permutation( compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) ) - return Cell(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)]) + return Coset(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)]) ``` ## Cells @@ -413,7 +435,7 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ for i in range(CELLS_PER_EXT_BLOB): coset = coset_for_cell(i) proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) - cells.append(ys) + cells.append(coset_evals_to_cell(ys)) proofs.append(proof) return cells, proofs @@ -434,8 +456,12 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: extended_data = fft_field(polynomial_coeff + [0] * FIELD_ELEMENTS_PER_BLOB, compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) extended_data_rbo = bit_reversal_permutation(extended_data) - return [extended_data_rbo[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] - for i in range(CELLS_PER_EXT_BLOB)] + cells = [] + for cell_id in range(CELLS_PER_EXT_BLOB): + start = cell_id * FIELD_ELEMENTS_PER_CELL + end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL + cells.append(coset_evals_to_cell(extended_data_rbo[start:end])) + return cells ``` ### Cell verification @@ -445,7 +471,7 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: ```python def verify_cell_proof(commitment_bytes: Bytes48, cell_id: CellID, - cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL], + cell: Cell, proof_bytes: Bytes48) -> bool: """ Check a cell proof @@ -457,7 +483,7 @@ def verify_cell_proof(commitment_bytes: Bytes48, return verify_kzg_proof_multi_impl( bytes_to_kzg_commitment(commitment_bytes), coset, - bytes_to_cell(cell_bytes), + cell_to_coset_evals(cell), bytes_to_kzg_proof(proof_bytes)) ``` @@ -467,7 +493,7 @@ def verify_cell_proof(commitment_bytes: Bytes48, def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48], row_indices: Sequence[RowIndex], column_indices: Sequence[ColumnIndex], - cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]], + cells: Sequence[Cell], proofs_bytes: Sequence[Bytes48]) -> bool: """ Verify a set of cells, given their corresponding proofs and their coordinates (row_id, column_id) in the blob @@ -483,19 +509,19 @@ def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48], Public method. """ - assert len(cells_bytes) == len(proofs_bytes) == len(row_indices) == len(column_indices) + assert len(cells) == len(proofs_bytes) == len(row_indices) == len(column_indices) # Get commitments via row IDs commitments_bytes = [row_commitments_bytes[row_index] for row_index in row_indices] # Get objects from bytes commitments = [bytes_to_kzg_commitment(commitment_bytes) for commitment_bytes in commitments_bytes] - cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes] + cosets_evals = [cell_to_coset_evals(cell) for cell in cells] proofs = [bytes_to_kzg_proof(proof_bytes) for proof_bytes in proofs_bytes] return all( - verify_kzg_proof_multi_impl(commitment, coset_for_cell(column_index), cell, proof) - for commitment, column_index, cell, proof in zip(commitments, column_indices, cells, proofs) + verify_kzg_proof_multi_impl(commitment, coset_for_cell(column_index), coset_evals, proof) + for commitment, column_index, coset_evals, proof in zip(commitments, column_indices, cosets_evals, proofs) ) ``` @@ -612,8 +638,7 @@ def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldEle ### `recover_all_cells` ```python -def recover_all_cells(cell_ids: Sequence[CellID], - cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]]) -> Sequence[Cell]: +def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequence[Cell]: """ Recover all of the cells in the extended blob from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, half of which can be missing. @@ -625,7 +650,7 @@ def recover_all_cells(cell_ids: Sequence[CellID], Public method. """ - assert len(cell_ids) == len(cells_bytes) + assert len(cell_ids) == len(cells) # Check we have enough cells to be able to perform the reconstruction assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB # Check for duplicates @@ -634,15 +659,15 @@ def recover_all_cells(cell_ids: Sequence[CellID], # Get the extended domain roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) - # Convert from bytes to cells - cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes] + # Convert cells to coset evals + cosets_evals = [cell_to_coset_evals(cell) for cell in cells] missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids) eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( cell_ids, - cells, + cosets_evals, zero_poly_eval, zero_poly_coeff, roots_of_unity_extended, @@ -655,14 +680,14 @@ def recover_all_cells(cell_ids: Sequence[CellID], roots_of_unity_extended, ) - for cell_id, cell in zip(cell_ids, cells): + for cell_id, coset_evals in zip(cell_ids, cosets_evals): start = cell_id * FIELD_ELEMENTS_PER_CELL end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL - assert reconstructed_data[start:end] == cell + assert reconstructed_data[start:end] == coset_evals reconstructed_data_as_cells = [ - reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL] + coset_evals_to_cell(reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL]) for i in range(CELLS_PER_EXT_BLOB)] - + return reconstructed_data_as_cells ``` diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py index dba8e885a0..cdbfad9ffe 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py @@ -28,7 +28,7 @@ def test_compute_extended_matrix(spec): for blob_index, row in enumerate(rows): extended_blob = [] for cell in row: - extended_blob.extend(cell) + extended_blob.extend(spec.cell_to_coset_evals(cell)) blob_part = extended_blob[0:len(extended_blob) // 2] blob = b''.join([spec.bls_field_to_bytes(x) for x in blob_part]) assert blob == input_blobs[blob_index] diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index 1dd9b2d6a7..deb83c223e 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -36,12 +36,10 @@ def test_verify_cell_proof(spec): commitment = spec.blob_to_kzg_commitment(blob) cells, proofs = spec.compute_cells_and_proofs(blob) - cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells] - cell_id = 0 - assert spec.verify_cell_proof(commitment, cell_id, cells_bytes[cell_id], proofs[cell_id]) + assert spec.verify_cell_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) cell_id = 1 - assert spec.verify_cell_proof(commitment, cell_id, cells_bytes[cell_id], proofs[cell_id]) + assert spec.verify_cell_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) @with_eip7594_and_later @@ -51,7 +49,6 @@ def test_verify_cell_proof_batch(spec): blob = get_sample_blob(spec) commitment = spec.blob_to_kzg_commitment(blob) cells, proofs = spec.compute_cells_and_proofs(blob) - cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells] assert len(cells) == len(proofs) @@ -59,7 +56,7 @@ def test_verify_cell_proof_batch(spec): row_commitments_bytes=[commitment], row_indices=[0, 0], column_indices=[0, 4], - cells_bytes=[cells_bytes[0], cells_bytes[4]], + cells=[cells[0], cells[4]], proofs_bytes=[proofs[0], proofs[4]], ) @@ -75,12 +72,9 @@ def test_recover_all_cells(spec): # Get the data we will be working with blob = get_sample_blob(spec) - # Get the data in evaluation form - original_polynomial = spec.blob_to_polynomial(blob) # Extend data with Reed-Solomon and split the extended data in cells cells = spec.compute_cells(blob) - cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells] # Compute the cells we will be recovering from cell_ids = [] @@ -91,14 +85,15 @@ def test_recover_all_cells(spec): j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) cell_ids.append(j) # Now the cells themselves - known_cells_bytes = [cells_bytes[cell_id] for cell_id in cell_ids] + known_cells = [cells[cell_id] for cell_id in cell_ids] # Recover all of the cells - recovered_cells = spec.recover_all_cells(cell_ids, known_cells_bytes) + recovered_cells = spec.recover_all_cells(cell_ids, known_cells) recovered_data = [x for xs in recovered_cells for x in xs] # Check that the original data match the non-extended portion of the recovered data - assert original_polynomial == recovered_data[:len(recovered_data) // 2] + blob_byte_array = [b for b in blob] + assert blob_byte_array == recovered_data[:len(recovered_data) // 2] # Check that the recovered cells match the original cells assert cells == recovered_cells From e51f7df77d776a79832dc937497cf84551f19286 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 22 Apr 2024 16:43:04 +0100 Subject: [PATCH 26/60] chore: make list copy explicit in `divide_polynomialcoeff` (#3706) --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index c56b1de014..1353bafa3e 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -236,7 +236,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial """ Long polynomial division for two coefficient form polynomials ``a`` and ``b`` """ - a = [x for x in a] + a = a.copy() # Make a copy since `a` is passed by reference o = [] apos = len(a) - 1 bpos = len(b) - 1 From b52182cdd6c14695fa232aa2ad99d7c7df3ee51b Mon Sep 17 00:00:00 2001 From: EchoAlice Date: Mon, 22 Apr 2024 13:31:37 -0600 Subject: [PATCH 27/60] Refactor `upgrade_to_electra()` --- specs/electra/fork.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/specs/electra/fork.md b/specs/electra/fork.md index 590c34beeb..ffd5f21571 100644 --- a/specs/electra/fork.md +++ b/specs/electra/fork.md @@ -149,15 +149,18 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: deposit_receipts_start_index=UNSET_DEPOSIT_RECEIPTS_START_INDEX, # [New in Electra:EIP7251] deposit_balance_to_consume=0, - exit_balance_to_consume=get_activation_exit_churn_limit(pre), + exit_balance_to_consume=0, earliest_exit_epoch=earliest_exit_epoch, - consolidation_balance_to_consume=get_consolidation_churn_limit(pre), + consolidation_balance_to_consume=0, earliest_consolidation_epoch=compute_activation_exit_epoch(get_current_epoch(pre)), pending_balance_deposits=[], pending_partial_withdrawals=[], pending_consolidations=[], ) + post.exit_balance_to_consume = get_activation_exit_churn_limit(post) + post.consolidation_balance_to_consume = get_consolidation_churn_limit(post) + # [New in Electra:EIP7251] # add validators that are not yet active to pending balance deposits pre_activation = sorted([ From 73637c84b298e78d662407805ef5018b06ce93bb Mon Sep 17 00:00:00 2001 From: Justin Traglia <95511699+jtraglia@users.noreply.github.com> Date: Tue, 23 Apr 2024 04:12:26 -0500 Subject: [PATCH 28/60] EIP-7594: Add asserts for public functions (#3684) * Add length asserts for public PeerDAS functions * Fix cell asserts * Rename field variable to satisfy linter * Add asserts for row/column indices * Use CELLS_PER_EXT_BLOB * Update to work with new spec changes * Fix indentation * Add explict check for cell_id --- .../polynomial-commitments-sampling.md | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1353bafa3e..cf3b2c7593 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -426,6 +426,8 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[ Public method. """ + assert len(blob) == BYTES_PER_BLOB + polynomial = blob_to_polynomial(blob) polynomial_coeff = polynomial_eval_to_coeff(polynomial) @@ -450,6 +452,8 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: Public method. """ + assert len(blob) == BYTES_PER_BLOB + polynomial = blob_to_polynomial(blob) polynomial_coeff = polynomial_eval_to_coeff(polynomial) @@ -478,6 +482,11 @@ def verify_cell_proof(commitment_bytes: Bytes48, Public method. """ + assert len(commitment_bytes) == BYTES_PER_COMMITMENT + assert cell_id < CELLS_PER_EXT_BLOB + assert len(cell) == BYTES_PER_CELL + assert len(proof_bytes) == BYTES_PER_PROOF + coset = coset_for_cell(cell_id) return verify_kzg_proof_multi_impl( @@ -510,6 +519,16 @@ def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48], Public method. """ assert len(cells) == len(proofs_bytes) == len(row_indices) == len(column_indices) + for commitment_bytes in row_commitments_bytes: + assert len(commitment_bytes) == BYTES_PER_COMMITMENT + for row_index in row_indices: + assert row_index < len(row_commitments_bytes) + for column_index in column_indices: + assert column_index < CELLS_PER_EXT_BLOB + for cell in cells: + assert len(cell) == BYTES_PER_CELL + for proof_bytes in proofs_bytes: + assert len(proof_bytes) == BYTES_PER_PROOF # Get commitments via row IDs commitments_bytes = [row_commitments_bytes[row_index] for row_index in row_indices] @@ -655,6 +674,9 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB # Check for duplicates assert len(cell_ids) == len(set(cell_ids)) + # Check that each cell is the correct length + for cell in cells: + assert len(cell) == BYTES_PER_CELL # Get the extended domain roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) From 640675f62815d85f14e3400adc6076961d2a42a7 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 12:44:50 +0100 Subject: [PATCH 29/60] add `g1_multi_exp` --- specs/deneb/polynomial-commitments.md | 7 ++++--- tests/core/pyspec/eth2spec/utils/bls.py | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 818bee6435..022d95e72f 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -277,9 +277,10 @@ def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElemen BLS multiscalar multiplication. This function can be optimized using Pippenger's algorithm and variants. """ assert len(points) == len(scalars) - result = bls.Z1() - for x, a in zip(points, scalars): - result = bls.add(result, bls.multiply(bls.bytes48_to_G1(x), a)) + points_g1 = [] + for point in points: + points_g1.append(bls.bytes48_to_G1(point)) + result = bls.g1_multi_exp(points_g1,scalars) return KZGCommitment(bls.G1_to_bytes48(result)) ``` diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 39562bf5ee..3a03623d38 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -224,6 +224,23 @@ def multiply(point, scalar): return point * scalar return py_ecc_mul(point, scalar) +def g1_multi_exp(points, integers): + """ + Performs a multi-scalar multiplication between + `points` and `scalars`. + `point` should be in G1 + """ + assert(len(points) == len(integers)) + if bls == arkworks_bls or bls == fastest_bls: + scalars = [] + for integer in integers: + int_as_bytes = integer.to_bytes(32, 'little') + scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) + return arkworks_G1.multiexp_unchecked(points, scalars) + result = Z1() + for point,scalar in points.zip(scalars): + result = add(result, multiply(point, scalar)) + return result def neg(point): """ From 57c14e540022a0615dcf52b25da44bccfe065a6b Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Tue, 23 Apr 2024 20:20:16 +0800 Subject: [PATCH 30/60] Add `get_custody_columns` tests --- specs/_features/eip7594/das-core.md | 4 +- .../test/eip7594/networking/__init__.py | 0 .../networking/test_get_custody_columns.py | 49 +++++++++++++++++++ tests/formats/networking/README.md | 6 +++ .../formats/networking/get_custody_columns.md | 14 ++++++ tests/generators/networking/README.md | 5 ++ tests/generators/networking/__init__.py | 0 tests/generators/networking/main.py | 14 ++++++ tests/generators/networking/requirements.txt | 2 + 9 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 tests/core/pyspec/eth2spec/test/eip7594/networking/__init__.py create mode 100644 tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py create mode 100644 tests/formats/networking/README.md create mode 100644 tests/formats/networking/get_custody_columns.md create mode 100644 tests/generators/networking/README.md create mode 100644 tests/generators/networking/__init__.py create mode 100644 tests/generators/networking/main.py create mode 100644 tests/generators/networking/requirements.txt diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 0e3b0809e3..63e05d5708 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -105,11 +105,11 @@ def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequen assert len(subnet_ids) == len(set(subnet_ids)) columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT - return [ + return sorted([ ColumnIndex(DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnet_id) for i in range(columns_per_subnet) for subnet_id in subnet_ids - ] + ]) ``` #### `compute_extended_matrix` diff --git a/tests/core/pyspec/eth2spec/test/eip7594/networking/__init__.py b/tests/core/pyspec/eth2spec/test/eip7594/networking/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py b/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py new file mode 100644 index 0000000000..794f4609a3 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py @@ -0,0 +1,49 @@ +import random + +from eth2spec.test.context import ( + single_phase, + spec_test, + with_eip7594_and_later, +) + + +def _run_get_custody_columns(spec, rng): + node_id = rng.randint(0, 2**32 - 1) + custody_subnet_count = rng.randint(0, spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT) + + result = spec.get_custody_columns(node_id, custody_subnet_count) + yield 'node_id', 'meta', node_id + yield 'custody_subnet_count', 'meta', custody_subnet_count + + assert len(result) == len(set(result)) + assert len(result) == ( + custody_subnet_count * spec.config.NUMBER_OF_COLUMNS // spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT + ) + assert all(i < spec.config.NUMBER_OF_COLUMNS for i in result) + python_list_result = [int(i) for i in result] + + yield 'result', 'meta', python_list_result + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__1(spec): + rng = random.Random(1111) + yield from _run_get_custody_columns(spec, rng) + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__2(spec): + rng = random.Random(2222) + yield from _run_get_custody_columns(spec, rng) + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__3(spec): + rng = random.Random(3333) + yield from _run_get_custody_columns(spec, rng) diff --git a/tests/formats/networking/README.md b/tests/formats/networking/README.md new file mode 100644 index 0000000000..e4679c17eb --- /dev/null +++ b/tests/formats/networking/README.md @@ -0,0 +1,6 @@ +# Networking tests + +The aim of the networking tests is to set a base-line on what really needs to pass, i.e. the essentials. + +Handlers: +- [`get_custody_columns`](./get_custody_columns.md): `get_custody_columns` helper tests diff --git a/tests/formats/networking/get_custody_columns.md b/tests/formats/networking/get_custody_columns.md new file mode 100644 index 0000000000..ec1a9f96a1 --- /dev/null +++ b/tests/formats/networking/get_custody_columns.md @@ -0,0 +1,14 @@ +# `get_custody_columns` tests + +`get_custody_columns` tests provide sanity check of the correntness of `get_custody_columns` helper. + +## Test case format + +### `meta.yaml` + +```yaml +description: string -- Optional. Description of test case, purely for debugging purposes. +node_id: int -- argument: the NodeId input. +custody_subnet_count: int -- argument: the count of custody subnets. +result: list of int -- output: the list of resulting column indices. +``` diff --git a/tests/generators/networking/README.md b/tests/generators/networking/README.md new file mode 100644 index 0000000000..4b4fea74da --- /dev/null +++ b/tests/generators/networking/README.md @@ -0,0 +1,5 @@ +# Networking tests + +The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the networking protocol. + +Test-format documentation can be found [here](../../formats/networking/README.md). diff --git a/tests/generators/networking/__init__.py b/tests/generators/networking/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/generators/networking/main.py b/tests/generators/networking/main.py new file mode 100644 index 0000000000..2681daf68b --- /dev/null +++ b/tests/generators/networking/main.py @@ -0,0 +1,14 @@ + +from eth2spec.test.helpers.constants import EIP7594 +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators + + +if __name__ == "__main__": + eip7594_mods = {key: 'eth2spec.test.eip7594.networking.test_' + key for key in [ + 'get_custody_columns', + ]} + all_mods = { + EIP7594: eip7594_mods + } + + run_state_test_generators(runner_name="networking", all_mods=all_mods) diff --git a/tests/generators/networking/requirements.txt b/tests/generators/networking/requirements.txt new file mode 100644 index 0000000000..1822486863 --- /dev/null +++ b/tests/generators/networking/requirements.txt @@ -0,0 +1,2 @@ +pytest>=4.4 +../../../[generator] From 6aa950e1dd5440f6831fad679de6113e2711d7f4 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 13:38:04 +0100 Subject: [PATCH 31/60] add g2_multi_exp and lint fixes --- tests/core/pyspec/eth2spec/utils/bls.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 3a03623d38..45d40f7bef 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -224,13 +224,13 @@ def multiply(point, scalar): return point * scalar return py_ecc_mul(point, scalar) + def g1_multi_exp(points, integers): """ Performs a multi-scalar multiplication between `points` and `scalars`. `point` should be in G1 """ - assert(len(points) == len(integers)) if bls == arkworks_bls or bls == fastest_bls: scalars = [] for integer in integers: @@ -238,10 +238,30 @@ def g1_multi_exp(points, integers): scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) return arkworks_G1.multiexp_unchecked(points, scalars) result = Z1() - for point,scalar in points.zip(scalars): + for point, scalar in points.zip(integers): result = add(result, multiply(point, scalar)) return result + +# TODO: Duplicated code for now +def g2_multi_exp(points, integers): + """ + Performs a multi-scalar multiplication between + `points` and `scalars`. + `point` should be in G2 + """ + if bls == arkworks_bls or bls == fastest_bls: + scalars = [] + for integer in integers: + int_as_bytes = integer.to_bytes(32, 'little') + scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) + return arkworks_G2.multiexp_unchecked(points, scalars) + result = Z2() + for point, scalar in points.zip(integers): + result = add(result, multiply(point, scalar)) + return result + + def neg(point): """ Returns the point negation of `point` From e7e49ec71aa031e452ac735766b4326b08f5e68e Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 13:38:14 +0100 Subject: [PATCH 32/60] lint fixes --- specs/deneb/polynomial-commitments.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 022d95e72f..953bc28cda 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -280,7 +280,7 @@ def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElemen points_g1 = [] for point in points: points_g1.append(bls.bytes48_to_G1(point)) - result = bls.g1_multi_exp(points_g1,scalars) + result = bls.g1_multi_exp(points_g1, scalars) return KZGCommitment(bls.G1_to_bytes48(result)) ``` From 5a74f40442095d8d64de4564c81da446b3a9c5ec Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 13:38:27 +0100 Subject: [PATCH 33/60] g2_lincomb uses g2_multi_exp --- specs/_features/eip7594/polynomial-commitments-sampling.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1353bafa3e..19c3569b8e 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -133,9 +133,10 @@ def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> BLS multiscalar multiplication in G2. This function can be optimized using Pippenger's algorithm and variants. """ assert len(points) == len(scalars) - result = bls.Z2() - for x, a in zip(points, scalars): - result = bls.add(result, bls.multiply(bls.bytes96_to_G2(x), a)) + points_g2 = [] + for point in points: + points_g2.append(bls.bytes96_to_G2(point)) + result = bls.g2_multi_exp(points_g2, scalars) return Bytes96(bls.G2_to_bytes96(result)) ``` From 941d9a94765c111be2760e7377e2d7c33c7d4b7d Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 13:55:23 +0100 Subject: [PATCH 34/60] trigger CI again --- tests/core/pyspec/eth2spec/utils/bls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 45d40f7bef..d9bb7f209a 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -243,7 +243,7 @@ def g1_multi_exp(points, integers): return result -# TODO: Duplicated code for now +# TODO: Duplicated code for now (we can use type-checking to avoid duplication) def g2_multi_exp(points, integers): """ Performs a multi-scalar multiplication between From d55da1bdb1574af40701d834cf303f89184b3bc0 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:21:37 +0100 Subject: [PATCH 35/60] use one method for both G1 and G2 multiexp --- tests/core/pyspec/eth2spec/utils/bls.py | 30 +++++++++---------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index d9bb7f209a..f87a8eb3a3 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -225,37 +225,27 @@ def multiply(point, scalar): return py_ecc_mul(point, scalar) -def g1_multi_exp(points, integers): +def multi_exp(points, integers): """ Performs a multi-scalar multiplication between `points` and `scalars`. - `point` should be in G1 + `point` should be in G2 """ if bls == arkworks_bls or bls == fastest_bls: + # Convert integers into arkworks Scalars scalars = [] for integer in integers: int_as_bytes = integer.to_bytes(32, 'little') scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) - return arkworks_G1.multiexp_unchecked(points, scalars) - result = Z1() - for point, scalar in points.zip(integers): - result = add(result, multiply(point, scalar)) - return result + # Check if we need to perform a G1 or G2 multiexp + if isinstance(points[0], arkworks_G1): + return arkworks_G1.multiexp_unchecked(points, scalars) + elif isinstance(points[0], arkworks_G2): + return arkworks_G2.multiexp_unchecked(points, scalars) + else: + raise Exception("Invalid point type") -# TODO: Duplicated code for now (we can use type-checking to avoid duplication) -def g2_multi_exp(points, integers): - """ - Performs a multi-scalar multiplication between - `points` and `scalars`. - `point` should be in G2 - """ - if bls == arkworks_bls or bls == fastest_bls: - scalars = [] - for integer in integers: - int_as_bytes = integer.to_bytes(32, 'little') - scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) - return arkworks_G2.multiexp_unchecked(points, scalars) result = Z2() for point, scalar in points.zip(integers): result = add(result, multiply(point, scalar)) From 5eb19b3cef4e423ddde17357e8a6f69863cf1ad1 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:21:54 +0100 Subject: [PATCH 36/60] modify g1_lincomb and g2_lincomb --- specs/_features/eip7594/polynomial-commitments-sampling.md | 4 ++-- specs/deneb/polynomial-commitments.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 19c3569b8e..8dceb3ebed 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -130,13 +130,13 @@ def coset_evals_to_cell(coset_evals: CosetEvals) -> Cell: ```python def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> Bytes96: """ - BLS multiscalar multiplication in G2. This function can be optimized using Pippenger's algorithm and variants. + BLS multiscalar multiplication in G2. This can be naively implemented using double-and-add. """ assert len(points) == len(scalars) points_g2 = [] for point in points: points_g2.append(bls.bytes96_to_G2(point)) - result = bls.g2_multi_exp(points_g2, scalars) + result = bls.multi_exp(points_g2, scalars) return Bytes96(bls.G2_to_bytes96(result)) ``` diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 953bc28cda..26db0df93d 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -274,13 +274,13 @@ def div(x: BLSFieldElement, y: BLSFieldElement) -> BLSFieldElement: ```python def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElement]) -> KZGCommitment: """ - BLS multiscalar multiplication. This function can be optimized using Pippenger's algorithm and variants. + BLS multiscalar multiplication in G1. This can be naively implemented using double-and-add. """ assert len(points) == len(scalars) points_g1 = [] for point in points: points_g1.append(bls.bytes48_to_G1(point)) - result = bls.g1_multi_exp(points_g1, scalars) + result = bls.multi_exp(points_g1, scalars) return KZGCommitment(bls.G1_to_bytes48(result)) ``` From 5230d8e81ee759532ffdb9e6b42ca72e58369124 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:22:13 +0100 Subject: [PATCH 37/60] fix: function description --- tests/core/pyspec/eth2spec/utils/bls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index f87a8eb3a3..21d9da2e99 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -229,7 +229,7 @@ def multi_exp(points, integers): """ Performs a multi-scalar multiplication between `points` and `scalars`. - `point` should be in G2 + `points` can either be in G1 or G2 """ if bls == arkworks_bls or bls == fastest_bls: # Convert integers into arkworks Scalars From b58012845fbb9f31b593e6ebc332cb6db5aa2a7a Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:28:53 +0100 Subject: [PATCH 38/60] check if there are any points --- tests/core/pyspec/eth2spec/utils/bls.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 21d9da2e99..c6a2797043 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -238,6 +238,8 @@ def multi_exp(points, integers): int_as_bytes = integer.to_bytes(32, 'little') scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) + if len(points) == 0: + return Z1() # Check if we need to perform a G1 or G2 multiexp if isinstance(points[0], arkworks_G1): return arkworks_G1.multiexp_unchecked(points, scalars) From 274d013d5c9281aea7dbb23c180afdc589037d05 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:53:06 +0100 Subject: [PATCH 39/60] mke multi_exp polymorphic for pyecc --- tests/core/pyspec/eth2spec/utils/bls.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index c6a2797043..639bff8432 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -229,7 +229,9 @@ def multi_exp(points, integers): """ Performs a multi-scalar multiplication between `points` and `scalars`. - `points` can either be in G1 or G2 + `points` can either be in G1 or G2. + + Note: This method assumes that there is at least one point. """ if bls == arkworks_bls or bls == fastest_bls: # Convert integers into arkworks Scalars @@ -238,8 +240,6 @@ def multi_exp(points, integers): int_as_bytes = integer.to_bytes(32, 'little') scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes)) - if len(points) == 0: - return Z1() # Check if we need to perform a G1 or G2 multiexp if isinstance(points[0], arkworks_G1): return arkworks_G1.multiexp_unchecked(points, scalars) @@ -248,7 +248,14 @@ def multi_exp(points, integers): else: raise Exception("Invalid point type") - result = Z2() + result = Z1() + if isinstance(points[0], py_ecc_G1): + result = Z1() + elif isinstance(points[0], py_ecc_G2): + result = Z2() + else: + raise Exception("Invalid point type") + for point, scalar in points.zip(integers): result = add(result, multiply(point, scalar)) return result From d28a0164d207a6e02794aa0138feb1ec830abed8 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:55:37 +0100 Subject: [PATCH 40/60] add is_zero check --- specs/_features/eip7594/polynomial-commitments-sampling.md | 5 +++++ specs/deneb/polynomial-commitments.md | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 8dceb3ebed..b93a694c5a 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -133,9 +133,14 @@ def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> BLS multiscalar multiplication in G2. This can be naively implemented using double-and-add. """ assert len(points) == len(scalars) + + if len(points) == 0: + return bls.Z2() + points_g2 = [] for point in points: points_g2.append(bls.bytes96_to_G2(point)) + result = bls.multi_exp(points_g2, scalars) return Bytes96(bls.G2_to_bytes96(result)) ``` diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 26db0df93d..4009aab64c 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -277,9 +277,14 @@ def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElemen BLS multiscalar multiplication in G1. This can be naively implemented using double-and-add. """ assert len(points) == len(scalars) + + if len(points) == 0: + return bls.Z1() + points_g1 = [] for point in points: points_g1.append(bls.bytes48_to_G1(point)) + result = bls.multi_exp(points_g1, scalars) return KZGCommitment(bls.G1_to_bytes48(result)) ``` From 309979d7781985758688f7d2fce1c72f3a8264df Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 14:58:32 +0100 Subject: [PATCH 41/60] add check for multi_exp --- tests/core/pyspec/eth2spec/utils/bls.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 639bff8432..be297d20bc 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -230,9 +230,12 @@ def multi_exp(points, integers): Performs a multi-scalar multiplication between `points` and `scalars`. `points` can either be in G1 or G2. - - Note: This method assumes that there is at least one point. """ + # Since this method accepts either G1 or G2, we need to know + # the type of the point to return. Hence, we need at least one point. + if not points or not integers: + raise Exception("Cannot call multi_exp with zero points or zero integers") + if bls == arkworks_bls or bls == fastest_bls: # Convert integers into arkworks Scalars scalars = [] From f3302a38364e4a3e2373c446a6fa06a4a75ed5b3 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 15:11:46 +0100 Subject: [PATCH 42/60] - return serialized identity points - result = None initially --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- specs/deneb/polynomial-commitments.md | 2 +- tests/core/pyspec/eth2spec/utils/bls.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index b93a694c5a..34ce077cb8 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -135,7 +135,7 @@ def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> assert len(points) == len(scalars) if len(points) == 0: - return bls.Z2() + return bls.G2_to_bytes96(bls.Z2()) points_g2 = [] for point in points: diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 4009aab64c..08048d566e 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -279,7 +279,7 @@ def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElemen assert len(points) == len(scalars) if len(points) == 0: - return bls.Z1() + return bls.G1_to_bytes48(bls.Z1()) points_g1 = [] for point in points: diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index be297d20bc..0139791c55 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -251,7 +251,7 @@ def multi_exp(points, integers): else: raise Exception("Invalid point type") - result = Z1() + result = None if isinstance(points[0], py_ecc_G1): result = Z1() elif isinstance(points[0], py_ecc_G2): From e514ac632891cb1f59d35bb5baa2ff8fca0963d3 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 23 Apr 2024 10:15:21 -0500 Subject: [PATCH 43/60] Add kzg_7594 test formats --- tests/formats/kzg_7594/README.md | 13 +++++++++ tests/formats/kzg_7594/compute_cells.md | 22 +++++++++++++++ .../kzg_7594/compute_cells_and_proofs.md | 23 +++++++++++++++ tests/formats/kzg_7594/recover_all_cells.md | 23 +++++++++++++++ tests/formats/kzg_7594/verify_cell_proof.md | 26 +++++++++++++++++ .../kzg_7594/verify_cell_proof_batch.md | 28 +++++++++++++++++++ 6 files changed, 135 insertions(+) create mode 100644 tests/formats/kzg_7594/README.md create mode 100644 tests/formats/kzg_7594/compute_cells.md create mode 100644 tests/formats/kzg_7594/compute_cells_and_proofs.md create mode 100644 tests/formats/kzg_7594/recover_all_cells.md create mode 100644 tests/formats/kzg_7594/verify_cell_proof.md create mode 100644 tests/formats/kzg_7594/verify_cell_proof_batch.md diff --git a/tests/formats/kzg_7594/README.md b/tests/formats/kzg_7594/README.md new file mode 100644 index 0000000000..d7d6eeae2c --- /dev/null +++ b/tests/formats/kzg_7594/README.md @@ -0,0 +1,13 @@ +# KZG tests for EIP-7594 + +A test type for KZG libraries. Tests all the public interfaces that a KZG library is required to implement for EIP-7594, as defined in `polynomial-commitments-sampling.md`. + +We do not recommend rolling your own crypto or using an untested KZG library. + +The KZG test suite runner has the following handlers: + +- [`compute_cells`](./compute_cells.md) +- [`compute_cells_and_proofs`](./compute_cells_and_proofs.md) +- [`verify_cell_proof`](./verify_cell_proof.md) +- [`verify_cell_proof_batch`](./verify_cell_proof_batch.md) +- [`recover_all_cells`](./recover_all_cells.md) diff --git a/tests/formats/kzg_7594/compute_cells.md b/tests/formats/kzg_7594/compute_cells.md new file mode 100644 index 0000000000..aec118c20b --- /dev/null +++ b/tests/formats/kzg_7594/compute_cells.md @@ -0,0 +1,22 @@ +# Test format: Compute cells + +Compute the cells for a given `blob`. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + blob: Blob -- the data blob +output: List[Cell] -- the cells +``` + +- `Blob` is a 131072-byte hexadecimal string, prefixed with `0x`. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `compute_cells` handler should compute the cells (chunks of an extended blob) for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/compute_cells_and_proofs.md b/tests/formats/kzg_7594/compute_cells_and_proofs.md new file mode 100644 index 0000000000..0262d8a673 --- /dev/null +++ b/tests/formats/kzg_7594/compute_cells_and_proofs.md @@ -0,0 +1,23 @@ +# Test format: Compute cells and proofs + +Compute the cells and cell KZG proofs for a given `blob`. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + blob: Blob -- the data blob +output: Tuple[List[Cell], List[KZGProof]] -- the cells and proofs +``` + +- `Blob` is a 131072-byte hexadecimal string, prefixed with `0x`. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. +- `KZGProof` is a 48-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `compute_cells_and_proofs` handler should compute the cells (chunks of an extended blob) and cell KZG proofs for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/recover_all_cells.md b/tests/formats/kzg_7594/recover_all_cells.md new file mode 100644 index 0000000000..082769627e --- /dev/null +++ b/tests/formats/kzg_7594/recover_all_cells.md @@ -0,0 +1,23 @@ +# Test format: Recover all cells + +Recover all cells given at least 50% of the original `cells`. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + cell_ids: List[CellID] -- the cell identifier for each cell + cells: List[Cell] -- the partial collection of cells +output: List[Cell] -- all cells, including recovered cells +``` + +- `CellID` is an unsigned 64-bit integer. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `recover_all_cells` handler should recover missing cells, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/verify_cell_proof.md b/tests/formats/kzg_7594/verify_cell_proof.md new file mode 100644 index 0000000000..dc9cb93e44 --- /dev/null +++ b/tests/formats/kzg_7594/verify_cell_proof.md @@ -0,0 +1,26 @@ +# Test format: Verify cell proof + +Use the cell KZG `proof` to verify that the KZG `commitment` for a given `cell` is correct. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + commitment: Bytes48 -- the KZG commitment + cell_id: CellID -- the identifier for the cell + cell: Cell -- the cell + proof: Bytes48 -- the KZG proof for the cell +output: bool -- true (correct proof) or false (incorrect proof) +``` + +- `Bytes48` is a 48-byte hexadecimal string, prefixed with `0x`. +- `CellID` is an unsigned 64-bit integer. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `verify_cell_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/verify_cell_proof_batch.md b/tests/formats/kzg_7594/verify_cell_proof_batch.md new file mode 100644 index 0000000000..e4a72d2507 --- /dev/null +++ b/tests/formats/kzg_7594/verify_cell_proof_batch.md @@ -0,0 +1,28 @@ +# Test format: Verify cell proof batch + +Use the cell KZG `proofs` to verify that the KZG `row_commitments` for the given `cells` are correct. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + row_commitments: List[Bytes48] -- the KZG commitments + row_indices: List[RowIndex] -- the commitment index for each cell + column_indices: List[ColumnIndex] -- the column index for each cell + cells: List[Cell] -- the cells + proofs: List[Bytes48] -- the KZG proof for each cell +output: bool -- true (all proofs are correct) or false (some proofs incorrect) +``` + +- `Bytes48` is a 48-byte hexadecimal string, prefixed with `0x`. +- `RowIndex` is an unsigned 64-bit integer. +- `ColumnIndex` is an unsigned 64-bit integer. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `verify_cell_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. From 736fe5300db46a9cf20e4c9b7d5028f78620bf31 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Tue, 23 Apr 2024 17:07:32 +0100 Subject: [PATCH 44/60] Empty commit From bab254b0ba33f61613f165ce160728da5bdd418a Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Wed, 24 Apr 2024 00:07:37 +0800 Subject: [PATCH 45/60] Add description of `multi_exp` --- specs/deneb/polynomial-commitments.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index 08048d566e..a740ca7efb 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -18,6 +18,7 @@ - [`reverse_bits`](#reverse_bits) - [`bit_reversal_permutation`](#bit_reversal_permutation) - [BLS12-381 helpers](#bls12-381-helpers) + - [`multi_exp`](#multi_exp) - [`hash_to_bls_field`](#hash_to_bls_field) - [`bytes_to_bls_field`](#bytes_to_bls_field) - [`bls_field_to_bytes`](#bls_field_to_bytes) @@ -146,6 +147,18 @@ def bit_reversal_permutation(sequence: Sequence[T]) -> Sequence[T]: ### BLS12-381 helpers + +#### `multi_exp` + +This function performs a multi-scalar multiplication between `points` and `integers`. `points` can either be in G1 or G2. + +```python +def multi_exp(points: PyUnion[Sequence[G1Point], Sequence[G2Point]], + integers: Sequence[uint64]) -> PyUnion[Sequence[G1Point], Sequence[G2Point]]: + # pylint: disable=unused-argument + ... +``` + #### `hash_to_bls_field` ```python From bd48cefdd7749b3f7dc093666dd1de4f25c20aec Mon Sep 17 00:00:00 2001 From: kevaundray Date: Tue, 23 Apr 2024 17:10:15 +0100 Subject: [PATCH 46/60] Update tests/core/pyspec/eth2spec/utils/bls.py --- tests/core/pyspec/eth2spec/utils/bls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 0139791c55..299495322f 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -228,7 +228,7 @@ def multiply(point, scalar): def multi_exp(points, integers): """ Performs a multi-scalar multiplication between - `points` and `scalars`. + `points` and `integers`. `points` can either be in G1 or G2. """ # Since this method accepts either G1 or G2, we need to know From a526cdf446cbd84fc5d401512d4fd79ceef79e65 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Wed, 24 Apr 2024 00:21:45 +0800 Subject: [PATCH 47/60] Fix: use TypeVar for "point" rather than bytes --- pysetup/spec_builders/deneb.py | 1 + specs/deneb/polynomial-commitments.md | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pysetup/spec_builders/deneb.py b/pysetup/spec_builders/deneb.py index e1ad051849..dc3c175836 100644 --- a/pysetup/spec_builders/deneb.py +++ b/pysetup/spec_builders/deneb.py @@ -17,6 +17,7 @@ def imports(cls, preset_name: str): def preparations(cls): return ''' T = TypeVar('T') # For generic function +TPoint = TypeVar('TPoint') # For generic function. G1 or G2 point. ''' @classmethod diff --git a/specs/deneb/polynomial-commitments.md b/specs/deneb/polynomial-commitments.md index a740ca7efb..5f7edb455c 100644 --- a/specs/deneb/polynomial-commitments.md +++ b/specs/deneb/polynomial-commitments.md @@ -153,8 +153,8 @@ def bit_reversal_permutation(sequence: Sequence[T]) -> Sequence[T]: This function performs a multi-scalar multiplication between `points` and `integers`. `points` can either be in G1 or G2. ```python -def multi_exp(points: PyUnion[Sequence[G1Point], Sequence[G2Point]], - integers: Sequence[uint64]) -> PyUnion[Sequence[G1Point], Sequence[G2Point]]: +def multi_exp(points: Sequence[TPoint], + integers: Sequence[uint64]) -> Sequence[TPoint]: # pylint: disable=unused-argument ... ``` From 731caf8573186d78f5f390876bee5e864aead544 Mon Sep 17 00:00:00 2001 From: Justin Traglia <95511699+jtraglia@users.noreply.github.com> Date: Tue, 23 Apr 2024 15:01:09 -0500 Subject: [PATCH 48/60] Rename formats/kzg to formats/kzg_4844 (#3716) --- tests/formats/{kzg => kzg_4844}/README.md | 0 tests/formats/{kzg => kzg_4844}/blob_to_kzg_commitment.md | 0 tests/formats/{kzg => kzg_4844}/compute_blob_kzg_proof.md | 0 tests/formats/{kzg => kzg_4844}/compute_kzg_proof.md | 0 tests/formats/{kzg => kzg_4844}/verify_blob_kzg_proof.md | 0 tests/formats/{kzg => kzg_4844}/verify_blob_kzg_proof_batch.md | 0 tests/formats/{kzg => kzg_4844}/verify_kzg_proof.md | 0 7 files changed, 0 insertions(+), 0 deletions(-) rename tests/formats/{kzg => kzg_4844}/README.md (100%) rename tests/formats/{kzg => kzg_4844}/blob_to_kzg_commitment.md (100%) rename tests/formats/{kzg => kzg_4844}/compute_blob_kzg_proof.md (100%) rename tests/formats/{kzg => kzg_4844}/compute_kzg_proof.md (100%) rename tests/formats/{kzg => kzg_4844}/verify_blob_kzg_proof.md (100%) rename tests/formats/{kzg => kzg_4844}/verify_blob_kzg_proof_batch.md (100%) rename tests/formats/{kzg => kzg_4844}/verify_kzg_proof.md (100%) diff --git a/tests/formats/kzg/README.md b/tests/formats/kzg_4844/README.md similarity index 100% rename from tests/formats/kzg/README.md rename to tests/formats/kzg_4844/README.md diff --git a/tests/formats/kzg/blob_to_kzg_commitment.md b/tests/formats/kzg_4844/blob_to_kzg_commitment.md similarity index 100% rename from tests/formats/kzg/blob_to_kzg_commitment.md rename to tests/formats/kzg_4844/blob_to_kzg_commitment.md diff --git a/tests/formats/kzg/compute_blob_kzg_proof.md b/tests/formats/kzg_4844/compute_blob_kzg_proof.md similarity index 100% rename from tests/formats/kzg/compute_blob_kzg_proof.md rename to tests/formats/kzg_4844/compute_blob_kzg_proof.md diff --git a/tests/formats/kzg/compute_kzg_proof.md b/tests/formats/kzg_4844/compute_kzg_proof.md similarity index 100% rename from tests/formats/kzg/compute_kzg_proof.md rename to tests/formats/kzg_4844/compute_kzg_proof.md diff --git a/tests/formats/kzg/verify_blob_kzg_proof.md b/tests/formats/kzg_4844/verify_blob_kzg_proof.md similarity index 100% rename from tests/formats/kzg/verify_blob_kzg_proof.md rename to tests/formats/kzg_4844/verify_blob_kzg_proof.md diff --git a/tests/formats/kzg/verify_blob_kzg_proof_batch.md b/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md similarity index 100% rename from tests/formats/kzg/verify_blob_kzg_proof_batch.md rename to tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md diff --git a/tests/formats/kzg/verify_kzg_proof.md b/tests/formats/kzg_4844/verify_kzg_proof.md similarity index 100% rename from tests/formats/kzg/verify_kzg_proof.md rename to tests/formats/kzg_4844/verify_kzg_proof.md From b25740052a90646d2fce7bfe9a2d506ca27cb25a Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Wed, 24 Apr 2024 18:40:56 +0800 Subject: [PATCH 49/60] Apply suggestions from code review Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> --- tests/formats/networking/get_custody_columns.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/formats/networking/get_custody_columns.md b/tests/formats/networking/get_custody_columns.md index ec1a9f96a1..03b21f729e 100644 --- a/tests/formats/networking/get_custody_columns.md +++ b/tests/formats/networking/get_custody_columns.md @@ -1,13 +1,13 @@ # `get_custody_columns` tests -`get_custody_columns` tests provide sanity check of the correntness of `get_custody_columns` helper. +`get_custody_columns` tests provide sanity check of the correctness of `get_custody_columns` helper. ## Test case format ### `meta.yaml` ```yaml -description: string -- Optional. Description of test case, purely for debugging purposes. +description: string -- optional: description of test case, purely for debugging purposes. node_id: int -- argument: the NodeId input. custody_subnet_count: int -- argument: the count of custody subnets. result: list of int -- output: the list of resulting column indices. From 24899b7fbad70b26796bc521d8cb9563d1f35caa Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Wed, 24 Apr 2024 19:04:27 +0800 Subject: [PATCH 50/60] Apply suggestions from @jtraglia --- specs/_features/eip7594/das-core.md | 17 ++++++- .../networking/test_get_custody_columns.py | 46 +++++++++++++++++-- 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 63e05d5708..69fede2be0 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -8,6 +8,8 @@ +- [Constants](#constants) + - [Misc](#misc) - [Custom types](#custom-types) - [Configuration](#configuration) - [Data size](#data-size) @@ -39,6 +41,16 @@ +## Constants + +The following values are (non-configurable) constants used throughout the specification. + +### Misc + +| Name | Value | +| - | - | +| `UINT256_MAX` | `uint256(2**256 - 1)` | + ## Custom types We define the following Python custom types for type hinting and readability: @@ -95,8 +107,11 @@ def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequen subnet_ids = [] i = 0 while len(subnet_ids) < custody_subnet_count: + if node_id == UINT256_MAX: + node_id = 0 + subnet_id = ( - bytes_to_uint64(hash(uint_to_bytes(uint64(node_id + i)))[0:8]) + bytes_to_uint64(hash(uint_to_bytes(uint256(node_id + i)))[0:8]) % DATA_COLUMN_SIDECAR_SUBNET_COUNT ) if subnet_id not in subnet_ids: diff --git a/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py b/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py index 794f4609a3..cadaa90d9b 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/networking/test_get_custody_columns.py @@ -7,9 +7,12 @@ ) -def _run_get_custody_columns(spec, rng): - node_id = rng.randint(0, 2**32 - 1) - custody_subnet_count = rng.randint(0, spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT) +def _run_get_custody_columns(spec, rng, node_id=None, custody_subnet_count=None): + if node_id is None: + node_id = rng.randint(0, 2**256 - 1) + + if custody_subnet_count is None: + custody_subnet_count = rng.randint(0, spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT) result = spec.get_custody_columns(node_id, custody_subnet_count) yield 'node_id', 'meta', node_id @@ -25,6 +28,43 @@ def _run_get_custody_columns(spec, rng): yield 'result', 'meta', python_list_result +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__min_node_id_min_custody_subnet_count(spec): + rng = random.Random(1111) + yield from _run_get_custody_columns(spec, rng, node_id=0, custody_subnet_count=0) + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__min_node_id_max_custody_subnet_count(spec): + rng = random.Random(1111) + yield from _run_get_custody_columns( + spec, rng, node_id=0, + custody_subnet_count=spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT) + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__max_node_id_min_custody_subnet_count(spec): + rng = random.Random(1111) + yield from _run_get_custody_columns(spec, rng, node_id=2**256 - 1, custody_subnet_count=0) + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_get_custody_columns__max_node_id_max_custody_subnet_count(spec): + rng = random.Random(1111) + yield from _run_get_custody_columns( + spec, rng, node_id=2**256 - 1, + custody_subnet_count=spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT, + ) + + @with_eip7594_and_later @spec_test @single_phase From e793fe26985dfaee973f9734b70589e63724ffd2 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Wed, 24 Apr 2024 20:08:56 +0800 Subject: [PATCH 51/60] Add Electra fork tests --- .../eth2spec/test/electra/fork/__init__.py | 0 .../electra/fork/test_electra_fork_basic.py | 82 ++++++++++++++++++ .../electra/fork/test_electra_fork_random.py | 84 +++++++++++++++++++ .../eth2spec/test/helpers/deneb/fork.py | 2 + .../eth2spec/test/helpers/electra/__init__.py | 0 .../eth2spec/test/helpers/electra/fork.py | 65 ++++++++++++++ tests/generators/forks/main.py | 5 +- 7 files changed, 237 insertions(+), 1 deletion(-) create mode 100644 tests/core/pyspec/eth2spec/test/electra/fork/__init__.py create mode 100644 tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_basic.py create mode 100644 tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_random.py create mode 100644 tests/core/pyspec/eth2spec/test/helpers/electra/__init__.py create mode 100644 tests/core/pyspec/eth2spec/test/helpers/electra/fork.py diff --git a/tests/core/pyspec/eth2spec/test/electra/fork/__init__.py b/tests/core/pyspec/eth2spec/test/electra/fork/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_basic.py b/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_basic.py new file mode 100644 index 0000000000..3bd6350b34 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_basic.py @@ -0,0 +1,82 @@ +from eth2spec.test.context import ( + with_phases, + with_custom_state, + with_presets, + spec_test, with_state, + low_balances, misc_balances, large_validator_set, +) +from eth2spec.test.utils import with_meta_tags +from eth2spec.test.helpers.constants import ( + DENEB, ELECTRA, + MINIMAL, +) +from eth2spec.test.helpers.state import ( + next_epoch, + next_epoch_via_block, +) +from eth2spec.test.helpers.electra.fork import ( + ELECTRA_FORK_TEST_META_TAGS, + run_fork_test, +) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_base_state(spec, phases, state): + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_next_epoch(spec, phases, state): + next_epoch(spec, state) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_next_epoch_with_block(spec, phases, state): + next_epoch_via_block(spec, state) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_many_next_epoch(spec, phases, state): + for _ in range(3): + next_epoch(spec, state) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@spec_test +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_random_low_balances(spec, phases, state): + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@spec_test +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_random_misc_balances(spec, phases, state): + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@with_presets([MINIMAL], + reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated") +@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@spec_test +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_fork_random_large_validator_set(spec, phases, state): + yield from run_fork_test(phases[ELECTRA], state) diff --git a/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_random.py b/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_random.py new file mode 100644 index 0000000000..07495ed453 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/electra/fork/test_electra_fork_random.py @@ -0,0 +1,84 @@ +from random import Random + +from eth2spec.test.context import ( + with_phases, + with_custom_state, + with_presets, + spec_test, with_state, + low_balances, misc_balances, large_validator_set, +) +from eth2spec.test.utils import with_meta_tags +from eth2spec.test.helpers.constants import ( + DENEB, ELECTRA, + MINIMAL, +) +from eth2spec.test.helpers.electra.fork import ( + ELECTRA_FORK_TEST_META_TAGS, + run_fork_test, +) +from eth2spec.test.helpers.random import randomize_state + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_0(spec, phases, state): + randomize_state(spec, state, rng=Random(1010)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_1(spec, phases, state): + randomize_state(spec, state, rng=Random(2020)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_2(spec, phases, state): + randomize_state(spec, state, rng=Random(3030)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_state +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_3(spec, phases, state): + randomize_state(spec, state, rng=Random(4040)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_low_balances(spec, phases, state): + randomize_state(spec, state, rng=Random(5050)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@spec_test +@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_misc_balances(spec, phases, state): + randomize_state(spec, state, rng=Random(6060)) + yield from run_fork_test(phases[ELECTRA], state) + + +@with_phases(phases=[DENEB], other_phases=[ELECTRA]) +@with_presets([MINIMAL], + reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated") +@spec_test +@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE) +@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS) +def test_electra_fork_random_large_validator_set(spec, phases, state): + randomize_state(spec, state, rng=Random(7070)) + yield from run_fork_test(phases[ELECTRA], state) diff --git a/tests/core/pyspec/eth2spec/test/helpers/deneb/fork.py b/tests/core/pyspec/eth2spec/test/helpers/deneb/fork.py index 7fe0535c10..fd2428a046 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/deneb/fork.py +++ b/tests/core/pyspec/eth2spec/test/helpers/deneb/fork.py @@ -36,6 +36,8 @@ def run_fork_test(post_spec, pre_state): 'current_sync_committee', 'next_sync_committee', # Withdrawals 'next_withdrawal_index', 'next_withdrawal_validator_index', + # Deep history valid from Capella onwards + 'historical_summaries', ] for field in stable_fields: assert getattr(pre_state, field) == getattr(post_state, field) diff --git a/tests/core/pyspec/eth2spec/test/helpers/electra/__init__.py b/tests/core/pyspec/eth2spec/test/helpers/electra/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/helpers/electra/fork.py b/tests/core/pyspec/eth2spec/test/helpers/electra/fork.py new file mode 100644 index 0000000000..39a43a5233 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/helpers/electra/fork.py @@ -0,0 +1,65 @@ +from eth2spec.test.helpers.constants import ( + ELECTRA, +) + + +ELECTRA_FORK_TEST_META_TAGS = { + 'fork': ELECTRA, +} + + +def run_fork_test(post_spec, pre_state): + yield 'pre', pre_state + + post_state = post_spec.upgrade_to_electra(pre_state) + + # Stable fields + stable_fields = [ + 'genesis_time', 'genesis_validators_root', 'slot', + # History + 'latest_block_header', 'block_roots', 'state_roots', 'historical_roots', + # Eth1 + 'eth1_data', 'eth1_data_votes', 'eth1_deposit_index', + # Registry + # NOTE: 'validators', 'balances' could be changed. + # Randomness + 'randao_mixes', + # Slashings + 'slashings', + # Participation + 'previous_epoch_participation', 'current_epoch_participation', + # Finality + 'justification_bits', 'previous_justified_checkpoint', 'current_justified_checkpoint', 'finalized_checkpoint', + # Inactivity + 'inactivity_scores', + # Sync + 'current_sync_committee', 'next_sync_committee', + # Withdrawals + 'next_withdrawal_index', 'next_withdrawal_validator_index', + # Deep history valid from Capella onwards + 'historical_summaries', + + ] + for field in stable_fields: + assert getattr(pre_state, field) == getattr(post_state, field) + + # Modified fields + modified_fields = ['fork', 'latest_execution_payload_header'] + for field in modified_fields: + assert getattr(pre_state, field) != getattr(post_state, field) + + assert len(pre_state.validators) == len(post_state.validators) + for pre_validator, post_validator in zip(pre_state.validators, post_state.validators): + stable_validator_fields = [ + 'pubkey', 'withdrawal_credentials', + 'slashed', + 'exit_epoch', 'withdrawable_epoch', + ] + for field in stable_validator_fields: + assert getattr(pre_validator, field) == getattr(post_validator, field) + + assert pre_state.fork.current_version == post_state.fork.previous_version + assert post_state.fork.current_version == post_spec.config.ELECTRA_FORK_VERSION + assert post_state.fork.epoch == post_spec.get_current_epoch(post_state) + + yield 'post', post_state diff --git a/tests/generators/forks/main.py b/tests/generators/forks/main.py index 7d68a31e7a..91078c8dae 100644 --- a/tests/generators/forks/main.py +++ b/tests/generators/forks/main.py @@ -1,7 +1,7 @@ from typing import Iterable from eth2spec.test.helpers.constants import ( - PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, + PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, ELECTRA, MINIMAL, MAINNET, ) from eth2spec.test.helpers.typing import SpecForkName, PresetBaseName @@ -9,6 +9,7 @@ from eth2spec.test.bellatrix.fork import test_bellatrix_fork_basic, test_bellatrix_fork_random from eth2spec.test.capella.fork import test_capella_fork_basic, test_capella_fork_random from eth2spec.test.deneb.fork import test_deneb_fork_basic, test_deneb_fork_random +from eth2spec.test.electra.fork import test_electra_fork_basic, test_electra_fork_random from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests @@ -42,6 +43,8 @@ def _get_fork_tests_providers(): yield create_provider(test_capella_fork_random, preset, BELLATRIX, CAPELLA) yield create_provider(test_deneb_fork_basic, preset, CAPELLA, DENEB) yield create_provider(test_deneb_fork_random, preset, CAPELLA, DENEB) + yield create_provider(test_electra_fork_basic, preset, DENEB, ELECTRA) + yield create_provider(test_electra_fork_random, preset, DENEB, ELECTRA) if __name__ == "__main__": From f5277700e3b89c4d62bd4e88a559c2b938c6b0a5 Mon Sep 17 00:00:00 2001 From: Justin Traglia <95511699+jtraglia@users.noreply.github.com> Date: Wed, 24 Apr 2024 11:40:27 -0500 Subject: [PATCH 52/60] Add initial version of kzg_7594 test generator (#3693) --- .../pyspec/eth2spec/test/utils/kzg_tests.py | 155 ++++ tests/generators/kzg_4844/main.py | 153 +--- tests/generators/kzg_7594/README.md | 3 + tests/generators/kzg_7594/main.py | 837 ++++++++++++++++++ tests/generators/kzg_7594/requirements.txt | 2 + 5 files changed, 1045 insertions(+), 105 deletions(-) create mode 100644 tests/core/pyspec/eth2spec/test/utils/kzg_tests.py create mode 100644 tests/generators/kzg_7594/README.md create mode 100644 tests/generators/kzg_7594/main.py create mode 100644 tests/generators/kzg_7594/requirements.txt diff --git a/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py b/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py new file mode 100644 index 0000000000..c6bbfef56a --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py @@ -0,0 +1,155 @@ +from hashlib import sha256 + +from eth_utils import ( + encode_hex, + int_to_big_endian, +) + +from eth2spec.utils import bls +from eth2spec.eip7594 import spec + + +############################################################################### +# Helper functions +############################################################################### + +def expect_exception(func, *args): + try: + func(*args) + except Exception: + pass + else: + raise Exception("should have raised exception") + + +def bls_add_one(x): + """ + Adds "one" (actually bls.G1()) to a compressed group element. + Useful to compute definitely incorrect proofs. + """ + return bls.G1_to_bytes48( + bls.add(bls.bytes48_to_G1(x), bls.G1()) + ) + + +def hash(x): + return sha256(x).digest() + + +def make_id(*args): + values_str = "_".join(str(arg) for arg in args) + return hash(bytes(values_str, "utf-8"))[:8].hex() + + +def field_element_bytes(x): + return int.to_bytes(x % spec.BLS_MODULUS, 32, spec.KZG_ENDIANNESS) + + +def field_element_bytes_unchecked(x): + return int.to_bytes(x, 32, spec.KZG_ENDIANNESS) + + +def encode_hex_list(a): + return [encode_hex(x) for x in a] + + +def int_to_hex(n: int, byte_length: int = None) -> str: + byte_value = int_to_big_endian(n) + if byte_length: + byte_value = byte_value.rjust(byte_length, b'\x00') + return encode_hex(byte_value) + + +def evaluate_blob_at(blob, z): + return field_element_bytes( + spec.evaluate_polynomial_in_evaluation_form(spec.blob_to_polynomial(blob), spec.bytes_to_bls_field(z)) + ) + + +############################################################################### +# Global variables +############################################################################### + +BLS_MODULUS_BYTES = spec.BLS_MODULUS.to_bytes(32, spec.KZG_ENDIANNESS) + +# Field Elements + +FE_VALID1 = field_element_bytes(0) +FE_VALID2 = field_element_bytes(1) +FE_VALID3 = field_element_bytes(2) +FE_VALID4 = field_element_bytes(pow(5, 1235, spec.BLS_MODULUS)) +FE_VALID5 = field_element_bytes(spec.BLS_MODULUS - 1) +FE_VALID6 = field_element_bytes(spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB)[1]) +VALID_FIELD_ELEMENTS = [FE_VALID1, FE_VALID2, FE_VALID3, FE_VALID4, FE_VALID5, FE_VALID6] + +FE_INVALID_EQUAL_TO_MODULUS = field_element_bytes_unchecked(spec.BLS_MODULUS) +FE_INVALID_MODULUS_PLUS_ONE = field_element_bytes_unchecked(spec.BLS_MODULUS + 1) +FE_INVALID_UINT256_MAX = field_element_bytes_unchecked(2**256 - 1) +FE_INVALID_UINT256_MID = field_element_bytes_unchecked(2**256 - 2**128) +FE_INVALID_LENGTH_PLUS_ONE = VALID_FIELD_ELEMENTS[0] + b"\x00" +FE_INVALID_LENGTH_MINUS_ONE = VALID_FIELD_ELEMENTS[0][:-1] +INVALID_FIELD_ELEMENTS = [FE_INVALID_EQUAL_TO_MODULUS, FE_INVALID_MODULUS_PLUS_ONE, + FE_INVALID_UINT256_MAX, FE_INVALID_UINT256_MID, + FE_INVALID_LENGTH_PLUS_ONE, FE_INVALID_LENGTH_MINUS_ONE] + +# Blobs + +BLOB_ALL_ZEROS = spec.Blob() +BLOB_ALL_TWOS = spec.Blob(b''.join([field_element_bytes(2) for n in range(4096)])) +BLOB_RANDOM_VALID1 = spec.Blob(b''.join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) +BLOB_RANDOM_VALID2 = spec.Blob(b''.join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) +BLOB_RANDOM_VALID3 = spec.Blob(b''.join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) +BLOB_ALL_MODULUS_MINUS_ONE = spec.Blob(b''.join([field_element_bytes(spec.BLS_MODULUS - 1) for n in range(4096)])) +BLOB_ALMOST_ZERO = spec.Blob(b''.join([field_element_bytes(1 if n == 3211 else 0) for n in range(4096)])) + +BLOB_INVALID = spec.Blob(b'\xFF' * 4096 * 32) +BLOB_INVALID_CLOSE = spec.Blob(b''.join( + [BLS_MODULUS_BYTES if n == 2111 else field_element_bytes(0) for n in range(4096)] +)) +BLOB_INVALID_LENGTH_PLUS_ONE = BLOB_RANDOM_VALID1 + b"\x00" +BLOB_INVALID_LENGTH_MINUS_ONE = BLOB_RANDOM_VALID1[:-1] + +VALID_BLOBS = [BLOB_ALL_ZEROS, BLOB_ALL_TWOS, BLOB_RANDOM_VALID1, BLOB_RANDOM_VALID2, + BLOB_RANDOM_VALID3, BLOB_ALL_MODULUS_MINUS_ONE, BLOB_ALMOST_ZERO] +INVALID_BLOBS = [BLOB_INVALID, BLOB_INVALID_CLOSE, BLOB_INVALID_LENGTH_PLUS_ONE, BLOB_INVALID_LENGTH_MINUS_ONE] + +# Commitments + +VALID_COMMITMENTS = [spec.blob_to_kzg_commitment(blob) for blob in VALID_BLOBS] + +# Points + +G1 = bls.G1_to_bytes48(bls.G1()) +G1_INVALID_TOO_FEW_BYTES = G1[:-1] +G1_INVALID_TOO_MANY_BYTES = G1 + b"\x00" +G1_INVALID_P1_NOT_IN_G1 = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" + + "0123456789abcdef0123456789abcdef0123456789abcdef") +G1_INVALID_P1_NOT_ON_CURVE = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" + + "0123456789abcdef0123456789abcdef0123456789abcde0") +INVALID_G1_POINTS = [G1_INVALID_TOO_FEW_BYTES, G1_INVALID_TOO_MANY_BYTES, + G1_INVALID_P1_NOT_IN_G1, G1_INVALID_P1_NOT_ON_CURVE] + +# Individual Cells + +CELL_RANDOM_VALID1 = b"".join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS)) + for n in range(spec.FIELD_ELEMENTS_PER_CELL)]) +CELL_RANDOM_VALID2 = b"".join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS)) + for n in range(spec.FIELD_ELEMENTS_PER_CELL)]) +CELL_RANDOM_VALID3 = b"".join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS)) + for n in range(spec.FIELD_ELEMENTS_PER_CELL)]) + +CELL_ALL_MAX_VALUE = b"".join([field_element_bytes_unchecked(2 ** 256 - 1) + for n in range(spec.FIELD_ELEMENTS_PER_CELL)]) +CELL_ONE_INVALID_FIELD = b"".join([field_element_bytes_unchecked(spec.BLS_MODULUS) + if n == 7 else field_element_bytes(0) + for n in range(spec.FIELD_ELEMENTS_PER_CELL)]) +CELL_INVALID_TOO_FEW_BYTES = CELL_RANDOM_VALID1[:-1] +CELL_INVALID_TOO_MANY_BYTES = CELL_RANDOM_VALID2 + b"\x00" + +VALID_INDIVIDUAL_RANDOM_CELL_BYTES = [CELL_RANDOM_VALID1, CELL_RANDOM_VALID2, CELL_RANDOM_VALID3] +INVALID_INDIVIDUAL_CELL_BYTES = [CELL_ALL_MAX_VALUE, CELL_ONE_INVALID_FIELD, CELL_INVALID_TOO_FEW_BYTES, + CELL_INVALID_TOO_MANY_BYTES] + +# Cells & Proofs + +VALID_CELLS_AND_PROOFS = [] # Saved in case02_compute_cells_and_proofs diff --git a/tests/generators/kzg_4844/main.py b/tests/generators/kzg_4844/main.py index 165b287e96..a4d3544bdf 100644 --- a/tests/generators/kzg_4844/main.py +++ b/tests/generators/kzg_4844/main.py @@ -1,118 +1,37 @@ """ -KZG 4844 test vectors generator +KZG test vectors generator for EIP-4844 """ -from hashlib import sha256 from typing import Tuple, Iterable, Any, Callable, Dict -from eth_utils import ( - encode_hex, - int_to_big_endian, -) +from eth_utils import encode_hex -from eth2spec.utils import bls +from eth2spec.deneb import spec +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from eth2spec.test.helpers.constants import DENEB from eth2spec.test.helpers.typing import SpecForkName -from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing -from eth2spec.deneb import spec - - -def expect_exception(func, *args): - try: - func(*args) - except Exception: - pass - else: - raise Exception("should have raised exception") - - -def field_element_bytes(x): - return int.to_bytes(x % spec.BLS_MODULUS, 32, spec.KZG_ENDIANNESS) - - -def field_element_bytes_unchecked(x): - return int.to_bytes(x, 32, spec.KZG_ENDIANNESS) - - -def encode_hex_list(a): - return [encode_hex(x) for x in a] - - -def bls_add_one(x): - """ - Adds "one" (actually bls.G1()) to a compressed group element. - Useful to compute definitely incorrect proofs. - """ - return bls.G1_to_bytes48( - bls.add(bls.bytes48_to_G1(x), bls.G1()) - ) - - -def evaluate_blob_at(blob, z): - return field_element_bytes( - spec.evaluate_polynomial_in_evaluation_form(spec.blob_to_polynomial(blob), spec.bytes_to_bls_field(z)) - ) - - -BLS_MODULUS_BYTES = spec.BLS_MODULUS.to_bytes(32, spec.KZG_ENDIANNESS) - -G1 = bls.G1_to_bytes48(bls.G1()) -G1_INVALID_TOO_FEW_BYTES = G1[:-1] -G1_INVALID_TOO_MANY_BYTES = G1 + b"\x00" -G1_INVALID_P1_NOT_IN_G1 = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" + - "0123456789abcdef0123456789abcdef0123456789abcdef") -G1_INVALID_P1_NOT_ON_CURVE = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" + - "0123456789abcdef0123456789abcdef0123456789abcde0") -INVALID_G1_POINTS = [G1_INVALID_TOO_FEW_BYTES, G1_INVALID_TOO_MANY_BYTES, - G1_INVALID_P1_NOT_IN_G1, G1_INVALID_P1_NOT_ON_CURVE] - -BLOB_ALL_ZEROS = spec.Blob() -BLOB_ALL_TWOS = spec.Blob(b''.join([field_element_bytes(2) for n in range(4096)])) -BLOB_RANDOM_VALID1 = spec.Blob(b''.join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) -BLOB_RANDOM_VALID2 = spec.Blob(b''.join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) -BLOB_RANDOM_VALID3 = spec.Blob(b''.join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS)) for n in range(4096)])) -BLOB_ALL_MODULUS_MINUS_ONE = spec.Blob(b''.join([field_element_bytes(spec.BLS_MODULUS - 1) for n in range(4096)])) -BLOB_ALMOST_ZERO = spec.Blob(b''.join([field_element_bytes(1 if n == 3211 else 0) for n in range(4096)])) -BLOB_INVALID = spec.Blob(b'\xFF' * 4096 * 32) -BLOB_INVALID_CLOSE = spec.Blob(b''.join( - [BLS_MODULUS_BYTES if n == 2111 else field_element_bytes(0) for n in range(4096)] -)) -BLOB_INVALID_LENGTH_PLUS_ONE = BLOB_RANDOM_VALID1 + b"\x00" -BLOB_INVALID_LENGTH_MINUS_ONE = BLOB_RANDOM_VALID1[:-1] - -VALID_BLOBS = [BLOB_ALL_ZEROS, BLOB_ALL_TWOS, BLOB_RANDOM_VALID1, BLOB_RANDOM_VALID2, - BLOB_RANDOM_VALID3, BLOB_ALL_MODULUS_MINUS_ONE, BLOB_ALMOST_ZERO] -INVALID_BLOBS = [BLOB_INVALID, BLOB_INVALID_CLOSE, BLOB_INVALID_LENGTH_PLUS_ONE, BLOB_INVALID_LENGTH_MINUS_ONE] - -FE_VALID1 = field_element_bytes(0) -FE_VALID2 = field_element_bytes(1) -FE_VALID3 = field_element_bytes(2) -FE_VALID4 = field_element_bytes(pow(5, 1235, spec.BLS_MODULUS)) -FE_VALID5 = field_element_bytes(spec.BLS_MODULUS - 1) -FE_VALID6 = field_element_bytes(spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB)[1]) -VALID_FIELD_ELEMENTS = [FE_VALID1, FE_VALID2, FE_VALID3, FE_VALID4, FE_VALID5, FE_VALID6] - -FE_INVALID_EQUAL_TO_MODULUS = field_element_bytes_unchecked(spec.BLS_MODULUS) -FE_INVALID_MODULUS_PLUS_ONE = field_element_bytes_unchecked(spec.BLS_MODULUS + 1) -FE_INVALID_UINT256_MAX = field_element_bytes_unchecked(2**256 - 1) -FE_INVALID_UINT256_MID = field_element_bytes_unchecked(2**256 - 2**128) -FE_INVALID_LENGTH_PLUS_ONE = VALID_FIELD_ELEMENTS[0] + b"\x00" -FE_INVALID_LENGTH_MINUS_ONE = VALID_FIELD_ELEMENTS[0][:-1] -INVALID_FIELD_ELEMENTS = [FE_INVALID_EQUAL_TO_MODULUS, FE_INVALID_MODULUS_PLUS_ONE, - FE_INVALID_UINT256_MAX, FE_INVALID_UINT256_MID, - FE_INVALID_LENGTH_PLUS_ONE, FE_INVALID_LENGTH_MINUS_ONE] - - -def hash(x): - return sha256(x).digest() - +from eth2spec.test.utils.kzg_tests import ( + BLOB_ALL_TWOS, + BLOB_ALL_ZEROS, + BLOB_RANDOM_VALID1, + G1, + INVALID_BLOBS, + INVALID_FIELD_ELEMENTS, + INVALID_G1_POINTS, + VALID_BLOBS, + VALID_FIELD_ELEMENTS, + bls_add_one, + encode_hex_list, + expect_exception, + field_element_bytes, + hash, +) +from eth2spec.utils import bls -def int_to_hex(n: int, byte_length: int = None) -> str: - byte_value = int_to_big_endian(n) - if byte_length: - byte_value = byte_value.rjust(byte_length, b'\x00') - return encode_hex(byte_value) +############################################################################### +# Test cases for blob_to_kzg_commitment +############################################################################### def case01_blob_to_kzg_commitment(): # Valid cases @@ -138,6 +57,10 @@ def case01_blob_to_kzg_commitment(): } +############################################################################### +# Test cases for compute_kzg_proof +############################################################################### + def case02_compute_kzg_proof(): # Valid cases for blob in VALID_BLOBS: @@ -179,6 +102,10 @@ def case02_compute_kzg_proof(): } +############################################################################### +# Test cases for verify_kzg_proof +############################################################################### + def case03_verify_kzg_proof(): # Valid cases for blob in VALID_BLOBS: @@ -341,6 +268,10 @@ def case03_verify_kzg_proof(): } +############################################################################### +# Test cases for compute_blob_kzg_proof +############################################################################### + def case04_compute_blob_kzg_proof(): # Valid cases for blob in VALID_BLOBS: @@ -382,6 +313,10 @@ def case04_compute_blob_kzg_proof(): } +############################################################################### +# Test cases for verify_blob_kzg_proof +############################################################################### + def case05_verify_blob_kzg_proof(): # Valid cases for blob in VALID_BLOBS: @@ -503,6 +438,10 @@ def case05_verify_blob_kzg_proof(): } +############################################################################### +# Test cases for verify_blob_kzg_proof_batch +############################################################################### + def case06_verify_blob_kzg_proof_batch(): # Valid cases proofs = [] @@ -627,6 +566,10 @@ def case06_verify_blob_kzg_proof_batch(): } +############################################################################### +# Main logic +############################################################################### + def create_provider(fork_name: SpecForkName, handler_name: str, test_case_fn: Callable[[], Iterable[Tuple[str, Dict[str, Any]]]]) -> gen_typing.TestProvider: diff --git a/tests/generators/kzg_7594/README.md b/tests/generators/kzg_7594/README.md new file mode 100644 index 0000000000..5336255ce0 --- /dev/null +++ b/tests/generators/kzg_7594/README.md @@ -0,0 +1,3 @@ +# KZG Test Generator for EIP-7594 + +These tests are specific to the API required for implementing PeerDAS polynomial commitment sampling. \ No newline at end of file diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py new file mode 100644 index 0000000000..670ed29ee3 --- /dev/null +++ b/tests/generators/kzg_7594/main.py @@ -0,0 +1,837 @@ +""" +KZG test vectors generator for EIP-7594 +""" + +from typing import Tuple, Iterable, Any, Callable, Dict + +from eth_utils import encode_hex + +from eth2spec.eip7594 import spec +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing +from eth2spec.test.helpers.constants import EIP7594 +from eth2spec.test.helpers.typing import SpecForkName +from eth2spec.test.utils.kzg_tests import ( + BLOB_RANDOM_VALID1, + BLOB_RANDOM_VALID2, + BLOB_RANDOM_VALID3, + CELL_RANDOM_VALID1, + CELL_RANDOM_VALID2, + INVALID_BLOBS, + INVALID_G1_POINTS, + INVALID_INDIVIDUAL_CELL_BYTES, + VALID_BLOBS, + VALID_CELLS_AND_PROOFS, + VALID_COMMITMENTS, + VALID_INDIVIDUAL_RANDOM_CELL_BYTES, + bls_add_one, + encode_hex_list, + expect_exception, + make_id, +) +from eth2spec.utils import bls + + +############################################################################### +# Test cases for compute_cells +############################################################################### + +def case01_compute_cells(): + # Valid cases + for blob in VALID_BLOBS: + cells = spec.compute_cells(blob) + identifier = make_id(blob) + yield f'compute_cells_case_valid_{identifier}', { + 'input': { + 'blob': encode_hex(blob), + }, + 'output': encode_hex_list(cells) + } + + # Edge case: Invalid blobs + for blob in INVALID_BLOBS: + expect_exception(spec.compute_cells, blob) + identifier = make_id(blob) + yield f'compute_cells_case_invalid_blob_{identifier}', { + 'input': { + 'blob': encode_hex(blob) + }, + 'output': None + } + + +############################################################################### +# Test cases for compute_cells_and_proofs +############################################################################### + +def case02_compute_cells_and_proofs(): + # Valid cases + for blob in VALID_BLOBS: + cells, proofs = spec.compute_cells_and_proofs(blob) + # Save cells & proofs here to save on time. + VALID_CELLS_AND_PROOFS.append((cells, proofs)) + identifier = make_id(blob) + yield f'compute_cells_and_proofs_case_valid_{identifier}', { + 'input': { + 'blob': encode_hex(blob), + }, + 'output': (encode_hex_list(cells), encode_hex_list(proofs)) + } + + # Edge case: Invalid blobs + for blob in INVALID_BLOBS: + expect_exception(spec.compute_cells_and_proofs, blob) + identifier = make_id(blob) + yield f'compute_cells_and_proofs_case_invalid_blob_{identifier}', { + 'input': { + 'blob': encode_hex(blob) + }, + 'output': None + } + + +############################################################################### +# Test cases for verify_cell_proof +############################################################################### + +def case03_verify_cell_proof(): + # Valid cases + for i in range(len(VALID_BLOBS)): + cells, proofs = VALID_CELLS_AND_PROOFS[i] + commitment = VALID_COMMITMENTS[i] + cell_id = (2 ** i - 1) % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_id] + proof = proofs[cell_id] + assert spec.verify_cell_proof(commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_valid_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': True + } + + # Incorrect commitment + for i in range(len(VALID_BLOBS)): + cells, proofs = VALID_CELLS_AND_PROOFS[i] + commitment = bls_add_one(VALID_COMMITMENTS[i]) + cell_id = 99 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_id] + proof = proofs[cell_id] + assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_incorrect_commitment_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': False + } + + # Incorrect cell + for i in range(len(VALID_INDIVIDUAL_RANDOM_CELL_BYTES)): + cell_id = 16 % spec.CELLS_PER_EXT_BLOB + commitment = VALID_COMMITMENTS[i] + cells, proofs = VALID_CELLS_AND_PROOFS[i] + cell = VALID_INDIVIDUAL_RANDOM_CELL_BYTES[i] + proof = proofs[cell_id] + assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_incorrect_cell_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': False + } + + # Incorrect proof + for i in range(len(VALID_BLOBS)): + cell_id = 91 % spec.CELLS_PER_EXT_BLOB + commitment = VALID_COMMITMENTS[i] + cells, proofs = VALID_CELLS_AND_PROOFS[i] + cell = cells[cell_id] + proof = bls_add_one(proofs[cell_id]) + assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_incorrect_proof_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': False + } + + # Edge case: Invalid commitment + for commitment in INVALID_G1_POINTS: + cells, proofs = VALID_CELLS_AND_PROOFS[0] + cell_id = 81 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_id] + proof = proofs[cell_id] + expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_invalid_commitment_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': None + } + + # Edge case: Invalid cell_id + for cell_id in [spec.CELLS_PER_EXT_BLOB, spec.CELLS_PER_EXT_BLOB + 1]: + cells, proofs = VALID_CELLS_AND_PROOFS[1] + commitment = VALID_COMMITMENTS[1] + cell = cells[0] + proof = proofs[0] + expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_invalid_cell_id_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': None + } + + # Edge case: Invalid cell + for cell in INVALID_INDIVIDUAL_CELL_BYTES: + cell_id = 32 % spec.CELLS_PER_EXT_BLOB + commitment = VALID_COMMITMENTS[2] + cells, proofs = VALID_CELLS_AND_PROOFS[2] + proof = proofs[cell_id] + expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_invalid_cell_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': None + } + + # Edge case: Invalid proof + for proof in INVALID_G1_POINTS: + cells, _ = VALID_CELLS_AND_PROOFS[3] + commitment = VALID_COMMITMENTS[3] + cell_id = 36 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_id] + expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + identifier = make_id(commitment, cell_id, cell, proof) + yield f'verify_cell_proof_case_invalid_proof_{identifier}', { + 'input': { + 'commitment': encode_hex(commitment), + 'cell_id': cell_id, + 'cell': encode_hex(cell), + 'proof': encode_hex(proof), + }, + 'output': None + } + + +############################################################################### +# Test cases for verify_cell_proof_batch +############################################################################### + +def case04_verify_cell_proof_batch(): + # Valid cases + for i in range(len(VALID_BLOBS)): + cells, proofs = VALID_CELLS_AND_PROOFS[i] + row_commitments = [VALID_COMMITMENTS[i]] + row_indices = [0] * spec.CELLS_PER_EXT_BLOB + column_indices = list(range(spec.CELLS_PER_EXT_BLOB)) + assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_valid_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': True + } + + # Valid: zero cells + cells, row_commitments, row_indices, column_indices, proofs = [], [], [], [], [] + assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_valid_zero_cells_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': True + } + + # Valid: Verify cells from multiple blobs + cells0, proofs0 = VALID_CELLS_AND_PROOFS[0] + cells1, proofs1 = VALID_CELLS_AND_PROOFS[1] + row_commitments = VALID_COMMITMENTS[:2] + row_indices = [0, 1] + column_indices = [0, 0] + cells = [cells0[0], cells1[0]] + proofs = [proofs0[0], proofs1[0]] + assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_valid_multiple_blobs_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': True + } + + # Valid: Unused row commitments + cells, proofs = VALID_CELLS_AND_PROOFS[2] + cells, proofs = cells[:3], proofs[:3] + # Provide list of all commitments + row_commitments = VALID_COMMITMENTS + row_indices = [2] * len(cells) + column_indices = list(range(len(cells))) + assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_valid_unused_row_commitments_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': True + } + + # Valid: Same cell multiple times + row_commitments = [VALID_COMMITMENTS[3]] + num_duplicates = 3 + row_indices = [0] * num_duplicates + column_indices = [0] * num_duplicates + cells = [VALID_CELLS_AND_PROOFS[3][0][0]] * num_duplicates + proofs = [VALID_CELLS_AND_PROOFS[3][1][0]] * num_duplicates + assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_valid_same_cell_multiple_times_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': True + } + + # Incorrect row commitment + cells, proofs = VALID_CELLS_AND_PROOFS[5] + cells, proofs = cells[:1], proofs[:1] + # Change commitment so it's wrong + row_commitments = [bls_add_one(VALID_COMMITMENTS[5])] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_incorrect_row_commitment_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': False + } + + # Incorrect cell + cells, proofs = VALID_CELLS_AND_PROOFS[6] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[6]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Change last cell so it's wrong + cells[-1] = CELL_RANDOM_VALID2 + assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_incorrect_cell_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': False + } + + # Incorrect proof + cells, proofs = VALID_CELLS_AND_PROOFS[0] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[0]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Change last proof so it's wrong + proofs[-1] = bls_add_one(proofs[-1]) + assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_incorrect_proof_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': False + } + + # Edge case: Invalid row commitment + for i, commitment in enumerate(INVALID_G1_POINTS): + cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_G1_POINTS)] + cells, proofs = cells[:1], proofs[:1] + # Set row_commitments to the invalid commitment + row_commitments = [commitment] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_row_commitment_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Invalid row_index + cells, proofs = VALID_CELLS_AND_PROOFS[0] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[0]] + row_indices = [0] * len(cells) + # Set first row index to an invalid value + row_indices[0] = 1 + column_indices = list(range(len(cells))) + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_row_index_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Invalid column_index + cells, proofs = VALID_CELLS_AND_PROOFS[1] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[1]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Set first column index to an invalid value + column_indices[0] = spec.CELLS_PER_EXT_BLOB + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_column_index_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Invalid cell + for i, cell in enumerate(INVALID_INDIVIDUAL_CELL_BYTES): + cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_INDIVIDUAL_CELL_BYTES)] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[i % len(INVALID_INDIVIDUAL_CELL_BYTES)]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Set first cell to the invalid cell + cells[0] = cell + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_cell_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Invalid proof + for i, proof in enumerate(INVALID_G1_POINTS): + cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_G1_POINTS)] + cells, proofs = cells[:1], proofs[:1] + row_commitments = [VALID_COMMITMENTS[i % len(INVALID_G1_POINTS)]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Set first proof to the invalid proof + proofs[0] = proof + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_proof_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Missing a row commitment + cells, proofs = VALID_CELLS_AND_PROOFS[0] + cells, proofs = cells[:1], proofs[:1] + # Do not include the row commitment + row_commitments = [] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_missing_row_commitment_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Missing a row index + cells, proofs = VALID_CELLS_AND_PROOFS[1] + cells, proofs = cells[:2], proofs[:2] + row_commitments = [VALID_COMMITMENTS[1]] + # Leave off one of the row indices + row_indices = [0] * (len(cells) - 1) + column_indices = list(range(len(cells))) + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_missing_row_index_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Missing a column index + cells, proofs = VALID_CELLS_AND_PROOFS[2] + cells, proofs = cells[:2], proofs[:2] + row_commitments = [VALID_COMMITMENTS[2]] + row_indices = [0] * len(cells) + # Leave off one of the column indices + column_indices = list(range(len(cells) - 1)) + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_missing_column_index_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Missing a cell + cells, proofs = VALID_CELLS_AND_PROOFS[3] + cells, proofs = cells[:2], proofs[:2] + row_commitments = [VALID_COMMITMENTS[3]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Remove the last proof + cells = cells[:-1] + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_missing_cell_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + # Edge case: Missing a proof + cells, proofs = VALID_CELLS_AND_PROOFS[4] + cells, proofs = cells[:2], proofs[:2] + row_commitments = [VALID_COMMITMENTS[4]] + row_indices = [0] * len(cells) + column_indices = list(range(len(cells))) + # Remove the last proof + proofs = proofs[:-1] + expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) + yield f'verify_cell_proof_batch_case_invalid_missing_proof_{identifier}', { + 'input': { + 'row_commitments': encode_hex_list(row_commitments), + 'row_indices': row_indices, + 'column_indices': column_indices, + 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), + }, + 'output': None + } + + +############################################################################### +# Test cases for recover_all_cells +############################################################################### + +def case05_recover_all_cells(): + # Valid: No missing cells + blob = BLOB_RANDOM_VALID1 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) + recovered_cells = spec.recover_all_cells(cell_ids, cells) + assert recovered_cells == cells + identifier = make_id(cell_ids, cells) + yield f'recover_all_cells_case_valid_no_missing_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(cells), + }, + 'output': encode_hex_list(recovered_cells) + } + + # Valid: Half missing cells (every other cell) + blob = BLOB_RANDOM_VALID2 + cells = spec.compute_cells(blob) + cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + assert recovered_cells == cells + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_valid_half_missing_every_other_cell_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': encode_hex_list(recovered_cells) + } + + # Valid: Half missing cells (first half) + blob = BLOB_RANDOM_VALID3 + cells = spec.compute_cells(blob) + cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + assert recovered_cells == cells + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_valid_half_missing_first_half_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': encode_hex_list(recovered_cells) + } + + # Valid: Half missing cells (second half) + blob = BLOB_RANDOM_VALID1 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2, spec.CELLS_PER_EXT_BLOB)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + assert recovered_cells == cells + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_valid_half_missing_second_half_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': encode_hex_list(recovered_cells) + } + + # Edge case: All cells are missing + cell_ids, partial_cells = [], [] + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_all_cells_are_missing_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: More than half missing + blob = BLOB_RANDOM_VALID2 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 - 1)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_more_than_half_missing_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: Invalid cell_id + blob = BLOB_RANDOM_VALID1 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + # Replace first cell_id with an invalid value + cell_ids[0] = spec.CELLS_PER_EXT_BLOB + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_cell_id_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: Invalid cell + blob = BLOB_RANDOM_VALID2 + for cell in INVALID_INDIVIDUAL_CELL_BYTES: + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + # Replace first cell with an invalid value + partial_cells[0] = cell + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_cell_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: More cell_ids than cells + blob = BLOB_RANDOM_VALID3 + cells = spec.compute_cells(blob) + cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + # Add another cell_id + cell_ids.append(spec.CELLS_PER_EXT_BLOB - 1) + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_more_cell_ids_than_cells_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: More cells than cell_ids + blob = BLOB_RANDOM_VALID1 + cells = spec.compute_cells(blob) + cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + # Add another cell + partial_cells.append(CELL_RANDOM_VALID1) + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_more_cells_than_cell_ids_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + # Edge case: Duplicate cell_id + blob = BLOB_RANDOM_VALID2 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + # Replace first cell_id with the second cell_id + cell_ids[0] = cell_ids[1] + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_duplicate_cell_id_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + + +############################################################################### +# Main logic +############################################################################### + +def create_provider(fork_name: SpecForkName, + handler_name: str, + test_case_fn: Callable[[], Iterable[Tuple[str, Dict[str, Any]]]]) -> gen_typing.TestProvider: + def prepare_fn() -> None: + # Nothing to load / change in spec. Maybe in future forks. + # Put the tests into the general config category, to not require any particular configuration. + return + + def cases_fn() -> Iterable[gen_typing.TestCase]: + for data in test_case_fn(): + (case_name, case_content) = data + yield gen_typing.TestCase( + fork_name=fork_name, + preset_name='general', + runner_name='kzg', + handler_name=handler_name, + suite_name='kzg-mainnet', + case_name=case_name, + case_fn=lambda: [('data', 'data', case_content)] + ) + + return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) + + +if __name__ == "__main__": + bls.use_arkworks() + gen_runner.run_generator("kzg_7594", [ + # EIP-7594 + create_provider(EIP7594, 'compute_cells', case01_compute_cells), + create_provider(EIP7594, 'compute_cells_and_proofs', case02_compute_cells_and_proofs), + create_provider(EIP7594, 'verify_cell_proof', case03_verify_cell_proof), + create_provider(EIP7594, 'verify_cell_proof_batch', case04_verify_cell_proof_batch), + create_provider(EIP7594, 'recover_all_cells', case05_recover_all_cells), + ]) diff --git a/tests/generators/kzg_7594/requirements.txt b/tests/generators/kzg_7594/requirements.txt new file mode 100644 index 0000000000..1822486863 --- /dev/null +++ b/tests/generators/kzg_7594/requirements.txt @@ -0,0 +1,2 @@ +pytest>=4.4 +../../../[generator] From e973b8d77b9e550ef2928ad5567a91a1ada20d06 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Thu, 25 Apr 2024 03:13:05 +0800 Subject: [PATCH 53/60] Add EIP-7594 to `TESTGEN_FORKS` --- tests/core/pyspec/eth2spec/test/helpers/constants.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/constants.py b/tests/core/pyspec/eth2spec/test/helpers/constants.py index 61ae170f0a..067d2a480e 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/constants.py +++ b/tests/core/pyspec/eth2spec/test/helpers/constants.py @@ -26,7 +26,7 @@ # # The forks that are deployed on Mainnet -MAINNET_FORKS = (PHASE0, ALTAIR, BELLATRIX, CAPELLA) +MAINNET_FORKS = (PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB) LATEST_FORK = MAINNET_FORKS[-1] # The forks that pytest can run with. # Note: when adding a new fork here, all tests from previous forks with decorator `with_X_and_later` @@ -40,9 +40,9 @@ EIP7594, ) # The forks that have light client specs -LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0], DENEB) +LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0],) # The forks that output to the test vectors. -TESTGEN_FORKS = (*MAINNET_FORKS, DENEB, ELECTRA, WHISK) +TESTGEN_FORKS = (*MAINNET_FORKS, ELECTRA, EIP7594, WHISK) # Forks allowed in the test runner `--fork` flag, to fail fast in case of typos ALLOWED_TEST_RUNNER_FORKS = (*ALL_PHASES, WHISK) From 7819e7f558ccaa098ce0fe190dec15ab503a8780 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Thu, 25 Apr 2024 23:30:40 +0800 Subject: [PATCH 54/60] Fix and move `test_multiple_consolidations_*` tests to sanity block tests --- .../test_process_consolidation.py | 245 ---------------- .../test/electra/sanity/blocks/__init__.py | 1 + .../sanity/blocks/test_consolidation.py | 271 ++++++++++++++++++ 3 files changed, 272 insertions(+), 245 deletions(-) create mode 100644 tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_consolidation.py diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py index 9af262f60e..b1b55645bf 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py @@ -404,251 +404,6 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): assert state.consolidation_balance_to_consume == expected_balance -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_multiple_consolidations_below_churn(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - current_epoch = spec.get_current_epoch(state) - - yield "pre", state - # Prepare a bunch of consolidations, based on the current state - consolidations = [] - for i in range(3): - source_index = 2 * i - target_index = 2 * i + 1 - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, - source_index=source_index, - target_index=target_index, - ), - source_privkey, - target_privkey, - ) - consolidations.append(signed_consolidation) - - # Now run all the consolidations - for consolidation in consolidations: - # the function yields data, but we are just interested in running it here, ignore yields. - for _ in run_consolidation_processing(spec, state, consolidation): - continue - - yield "post", state - - expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - assert state.earliest_consolidation_epoch == expected_exit_epoch - assert ( - state.consolidation_balance_to_consume - == consolidation_churn_limit - 3 * spec.MIN_ACTIVATION_BALANCE - ) - for i in range(3): - assert state.validators[2 * i].exit_epoch == expected_exit_epoch - - -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_multiple_consolidations_equal_churn(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - current_epoch = spec.get_current_epoch(state) - - yield "pre", state - # Prepare a bunch of consolidations, based on the current state - consolidations = [] - for i in range(4): - source_index = 2 * i - target_index = 2 * i + 1 - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, - source_index=source_index, - target_index=target_index, - ), - source_privkey, - target_privkey, - ) - consolidations.append(signed_consolidation) - - # Now run all the consolidations - for consolidation in consolidations: - # the function yields data, but we are just interested in running it here, ignore yields. - for _ in run_consolidation_processing(spec, state, consolidation): - continue - - yield "post", state - - expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - assert state.earliest_consolidation_epoch == expected_exit_epoch - assert state.consolidation_balance_to_consume == 0 - for i in range(4): - assert state.validators[2 * i].exit_epoch == expected_exit_epoch - - -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_multiple_consolidations_above_churn(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - current_epoch = spec.get_current_epoch(state) - - # Prepare a bunch of consolidations, based on the current state - consolidations = [] - for i in range(4): - source_index = 2 * i - target_index = 2 * i + 1 - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, - source_index=source_index, - target_index=target_index, - ), - source_privkey, - target_privkey, - ) - consolidations.append(signed_consolidation) - - # Now run all the consolidations - for consolidation in consolidations: - # the function yields data, but we are just interested in running it here, ignore yields. - for _ in run_consolidation_processing(spec, state, consolidation): - continue - - # consolidate an additional validator - source_index = spec.get_active_validator_indices(state, current_epoch)[-2] - target_index = spec.get_active_validator_indices(state, current_epoch)[-1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, - ) - # This is the interesting part of the test: on a pre-state with full consolidation queue, - # when processing an additional consolidation, it results in an exit in a later epoch - yield from run_consolidation_processing(spec, state, signed_consolidation) - - expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - assert state.earliest_consolidation_epoch == expected_exit_epoch + 1 - assert ( - state.consolidation_balance_to_consume - == consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE - ) - assert state.validators[source_index].exit_epoch == expected_exit_epoch + 1 - for i in range(4): - assert state.validators[2 * i].exit_epoch == expected_exit_epoch - - -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_multiple_consolidations_equal_twice_churn(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - current_epoch = spec.get_current_epoch(state) - - yield "pre", state - # Prepare a bunch of consolidations, based on the current state - consolidations = [] - for i in range(8): - source_index = 2 * i - target_index = 2 * i + 1 - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, - source_index=source_index, - target_index=target_index, - ), - source_privkey, - target_privkey, - ) - consolidations.append(signed_consolidation) - - # Now run all the consolidations - for consolidation in consolidations: - # the function yields data, but we are just interested in running it here, ignore yields. - for _ in run_consolidation_processing(spec, state, consolidation): - continue - - yield "post", state - - first_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - assert state.consolidation_balance_to_consume == 0 - assert state.earliest_consolidation_epoch == first_exit_epoch + 1 - for i in range(4): - assert state.validators[2 * i].exit_epoch == first_exit_epoch - for i in range(4, 8): - assert state.validators[2 * i].exit_epoch == first_exit_epoch + 1 - - # Failing tests @with_electra_and_later diff --git a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/__init__.py b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/__init__.py index 3c0e060f3d..46e3659cd3 100644 --- a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/__init__.py +++ b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/__init__.py @@ -1 +1,2 @@ +from .test_consolidation import * # noqa: F401 F403 from .test_deposit_transition import * # noqa: F401 F403 diff --git a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_consolidation.py new file mode 100644 index 0000000000..2d8613b528 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_consolidation.py @@ -0,0 +1,271 @@ + +from eth2spec.test.context import ( + with_electra_and_later, + with_presets, + spec_test, + single_phase, + with_custom_state, + scaled_churn_balances_exceed_activation_exit_churn_limit, + default_activation_threshold, +) +from eth2spec.test.helpers.block import ( + build_empty_block_for_next_slot +) +from eth2spec.test.helpers.consolidations import ( + sign_consolidation, +) +from eth2spec.test.helpers.constants import MINIMAL +from eth2spec.test.helpers.keys import pubkey_to_privkey +from eth2spec.test.helpers.state import ( + state_transition_and_sign_block, +) +from eth2spec.test.helpers.withdrawals import ( + set_eth1_withdrawal_credential_with_balance, +) + + +@with_electra_and_later +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_multiple_consolidations_below_churn(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) + + yield "pre", state + + # Prepare a bunch of consolidations, each of them in a block, based on the current state + blocks = [] + consolidation_count = 3 + for i in range(consolidation_count): + source_index = 2 * i + target_index = 2 * i + 1 + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, + source_index=source_index, + target_index=target_index, + ), + source_privkey, + target_privkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.consolidations = [signed_consolidation] + signed_block = state_transition_and_sign_block(spec, state, block) + blocks.append(signed_block) + + yield "blocks", blocks + yield "post", state + + expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + assert state.earliest_consolidation_epoch == expected_exit_epoch + assert ( + state.consolidation_balance_to_consume + == consolidation_churn_limit - 3 * spec.MIN_ACTIVATION_BALANCE + ) + for i in range(consolidation_count): + assert state.validators[2 * i].exit_epoch == expected_exit_epoch + + +@with_electra_and_later +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_multiple_consolidations_equal_churn(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) + + yield "pre", state + # Prepare a bunch of consolidations, each of them in a block, based on the current state + blocks = [] + consolidation_count = 4 + for i in range(consolidation_count): + source_index = 2 * i + target_index = 2 * i + 1 + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, + source_index=source_index, + target_index=target_index, + ), + source_privkey, + target_privkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.consolidations = [signed_consolidation] + signed_block = state_transition_and_sign_block(spec, state, block) + blocks.append(signed_block) + + yield "blocks", blocks + yield "post", state + + expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + assert state.earliest_consolidation_epoch == expected_exit_epoch + assert state.consolidation_balance_to_consume == 0 + for i in range(consolidation_count): + assert state.validators[2 * i].exit_epoch == expected_exit_epoch + + +@with_electra_and_later +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_multiple_consolidations_above_churn(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) + + # Prepare a bunch of consolidations, each of them in a block, based on the current state + blocks = [] + consolidation_count = 4 + for i in range(consolidation_count): + source_index = 2 * i + target_index = 2 * i + 1 + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, + source_index=source_index, + target_index=target_index, + ), + source_privkey, + target_privkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.consolidations = [signed_consolidation] + signed_block = state_transition_and_sign_block(spec, state, block) + blocks.append(signed_block) + + # consolidate an additional validator + source_index = spec.get_active_validator_indices(state, current_epoch)[-2] + target_index = spec.get_active_validator_indices(state, current_epoch)[-1] + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + # This is the interesting part of the test: on a pre-state with full consolidation queue, + # when processing an additional consolidation, it results in an exit in a later epoch + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, source_index=source_index, target_index=target_index + ), + source_privkey, + target_privkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.consolidations = [signed_consolidation] + signed_block = state_transition_and_sign_block(spec, state, block) + blocks.append(signed_block) + + yield "blocks", blocks + yield "post", state + + expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + assert state.earliest_consolidation_epoch == expected_exit_epoch + 1 + assert ( + state.consolidation_balance_to_consume + == consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE + ) + assert state.validators[source_index].exit_epoch == expected_exit_epoch + 1 + for i in range(consolidation_count): + assert state.validators[2 * i].exit_epoch == expected_exit_epoch + + +@with_electra_and_later +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_multiple_consolidations_equal_twice_churn(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) + + yield "pre", state + # Prepare a bunch of consolidations, each of them in a block, based on the current state + blocks = [] + consolidation_count = 8 + for i in range(consolidation_count): + source_index = 2 * i + target_index = 2 * i + 1 + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, + source_index=source_index, + target_index=target_index, + ), + source_privkey, + target_privkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.consolidations = [signed_consolidation] + signed_block = state_transition_and_sign_block(spec, state, block) + blocks.append(signed_block) + + yield "blocks", blocks + yield "post", state + + first_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + assert state.consolidation_balance_to_consume == 0 + assert state.earliest_consolidation_epoch == first_exit_epoch + 1 + for i in range(consolidation_count // 2): + assert state.validators[2 * i].exit_epoch == first_exit_epoch + for i in range(consolidation_count // 2, consolidation_count): + assert state.validators[2 * i].exit_epoch == first_exit_epoch + 1 From 6ecff3ed94c13211d16c6e830cd57623f3738fb6 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 26 Apr 2024 00:20:49 +0800 Subject: [PATCH 55/60] Bump version to v1.5.0-alpha.1 --- tests/core/pyspec/eth2spec/VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt index b3375e1fe3..dc1605fa64 100644 --- a/tests/core/pyspec/eth2spec/VERSION.txt +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -1 +1 @@ -1.5.0-alpha.0 +1.5.0-alpha.1 From ac1ce3b3f02a5d5b03cc91b65a0a037db77ff51d Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 25 Apr 2024 17:31:22 -0500 Subject: [PATCH 56/60] Rename verify_cell_proof to verify_cell_kzg_proof --- specs/_features/eip7594/p2p-interface.md | 2 +- .../polynomial-commitments-sampling.md | 12 +- .../test_polynomial_commitments.py | 10 +- tests/formats/kzg_7594/README.md | 4 +- ...cell_proof.md => verify_cell_kzg_proof.md} | 4 +- ...atch.md => verify_cell_kzg_proof_batch.md} | 4 +- tests/generators/kzg_7594/main.py | 116 +++++++++--------- 7 files changed, 76 insertions(+), 76 deletions(-) rename tests/formats/kzg_7594/{verify_cell_proof.md => verify_cell_kzg_proof.md} (53%) rename tests/formats/kzg_7594/{verify_cell_proof_batch.md => verify_cell_kzg_proof_batch.md} (58%) diff --git a/specs/_features/eip7594/p2p-interface.md b/specs/_features/eip7594/p2p-interface.md index b282abbe49..2434d875ec 100644 --- a/specs/_features/eip7594/p2p-interface.md +++ b/specs/_features/eip7594/p2p-interface.md @@ -74,7 +74,7 @@ def verify_data_column_sidecar_kzg_proofs(sidecar: DataColumnSidecar) -> bool: row_ids = [RowIndex(i) for i in range(len(sidecar.column))] # KZG batch verifies that the cells match the corresponding commitments and proofs - return verify_cell_proof_batch( + return verify_cell_kzg_proof_batch( row_commitments=sidecar.kzg_commitments, row_indices=row_ids, # all rows column_indices=[sidecar.index], diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 5cd9c94480..091b7532ec 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -40,8 +40,8 @@ - [`compute_cells_and_proofs`](#compute_cells_and_proofs) - [`compute_cells`](#compute_cells) - [Cell verification](#cell-verification) - - [`verify_cell_proof`](#verify_cell_proof) - - [`verify_cell_proof_batch`](#verify_cell_proof_batch) + - [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) + - [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) - [Reconstruction](#reconstruction) - [`construct_vanishing_polynomial`](#construct_vanishing_polynomial) - [`recover_shifted_data`](#recover_shifted_data) @@ -476,10 +476,10 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: ### Cell verification -#### `verify_cell_proof` +#### `verify_cell_kzg_proof` ```python -def verify_cell_proof(commitment_bytes: Bytes48, +def verify_cell_kzg_proof(commitment_bytes: Bytes48, cell_id: CellID, cell: Cell, proof_bytes: Bytes48) -> bool: @@ -502,10 +502,10 @@ def verify_cell_proof(commitment_bytes: Bytes48, bytes_to_kzg_proof(proof_bytes)) ``` -#### `verify_cell_proof_batch` +#### `verify_cell_kzg_proof_batch` ```python -def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48], +def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], row_indices: Sequence[RowIndex], column_indices: Sequence[ColumnIndex], cells: Sequence[Cell], diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index deb83c223e..4ffc3ca94a 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -31,28 +31,28 @@ def test_fft(spec): @with_eip7594_and_later @spec_test @single_phase -def test_verify_cell_proof(spec): +def test_verify_cell_kzg_proof(spec): blob = get_sample_blob(spec) commitment = spec.blob_to_kzg_commitment(blob) cells, proofs = spec.compute_cells_and_proofs(blob) cell_id = 0 - assert spec.verify_cell_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) + assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) cell_id = 1 - assert spec.verify_cell_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) + assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) @with_eip7594_and_later @spec_test @single_phase -def test_verify_cell_proof_batch(spec): +def test_verify_cell_kzg_proof_batch(spec): blob = get_sample_blob(spec) commitment = spec.blob_to_kzg_commitment(blob) cells, proofs = spec.compute_cells_and_proofs(blob) assert len(cells) == len(proofs) - assert spec.verify_cell_proof_batch( + assert spec.verify_cell_kzg_proof_batch( row_commitments_bytes=[commitment], row_indices=[0, 0], column_indices=[0, 4], diff --git a/tests/formats/kzg_7594/README.md b/tests/formats/kzg_7594/README.md index d7d6eeae2c..94f3015fb5 100644 --- a/tests/formats/kzg_7594/README.md +++ b/tests/formats/kzg_7594/README.md @@ -8,6 +8,6 @@ The KZG test suite runner has the following handlers: - [`compute_cells`](./compute_cells.md) - [`compute_cells_and_proofs`](./compute_cells_and_proofs.md) -- [`verify_cell_proof`](./verify_cell_proof.md) -- [`verify_cell_proof_batch`](./verify_cell_proof_batch.md) +- [`verify_cell_kzg_proof`](./verify_cell_kzg_proof.md) +- [`verify_cell_kzg_proof_batch`](./verify_cell_kzg_proof_batch.md) - [`recover_all_cells`](./recover_all_cells.md) diff --git a/tests/formats/kzg_7594/verify_cell_proof.md b/tests/formats/kzg_7594/verify_cell_kzg_proof.md similarity index 53% rename from tests/formats/kzg_7594/verify_cell_proof.md rename to tests/formats/kzg_7594/verify_cell_kzg_proof.md index dc9cb93e44..5ab3ad0739 100644 --- a/tests/formats/kzg_7594/verify_cell_proof.md +++ b/tests/formats/kzg_7594/verify_cell_kzg_proof.md @@ -1,4 +1,4 @@ -# Test format: Verify cell proof +# Test format: Verify cell KZG proof Use the cell KZG `proof` to verify that the KZG `commitment` for a given `cell` is correct. @@ -23,4 +23,4 @@ All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with ` ## Condition -The `verify_cell_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. +The `verify_cell_kzg_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/verify_cell_proof_batch.md b/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md similarity index 58% rename from tests/formats/kzg_7594/verify_cell_proof_batch.md rename to tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md index e4a72d2507..9761b55032 100644 --- a/tests/formats/kzg_7594/verify_cell_proof_batch.md +++ b/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md @@ -1,4 +1,4 @@ -# Test format: Verify cell proof batch +# Test format: Verify cell KZG proof batch Use the cell KZG `proofs` to verify that the KZG `row_commitments` for the given `cells` are correct. @@ -25,4 +25,4 @@ All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with ` ## Condition -The `verify_cell_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. +The `verify_cell_kzg_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index 670ed29ee3..e8c776ea1b 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -90,10 +90,10 @@ def case02_compute_cells_and_proofs(): ############################################################################### -# Test cases for verify_cell_proof +# Test cases for verify_cell_kzg_proof ############################################################################### -def case03_verify_cell_proof(): +def case03_verify_cell_kzg_proof(): # Valid cases for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] @@ -101,9 +101,9 @@ def case03_verify_cell_proof(): cell_id = (2 ** i - 1) % spec.CELLS_PER_EXT_BLOB cell = cells[cell_id] proof = proofs[cell_id] - assert spec.verify_cell_proof(commitment, cell_id, cell, proof) + assert spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_valid_{identifier}', { + yield f'verify_cell_kzg_proof_case_valid_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -120,9 +120,9 @@ def case03_verify_cell_proof(): cell_id = 99 % spec.CELLS_PER_EXT_BLOB cell = cells[cell_id] proof = proofs[cell_id] - assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_incorrect_commitment_{identifier}', { + yield f'verify_cell_kzg_proof_case_incorrect_commitment_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -139,9 +139,9 @@ def case03_verify_cell_proof(): cells, proofs = VALID_CELLS_AND_PROOFS[i] cell = VALID_INDIVIDUAL_RANDOM_CELL_BYTES[i] proof = proofs[cell_id] - assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_incorrect_cell_{identifier}', { + yield f'verify_cell_kzg_proof_case_incorrect_cell_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -158,9 +158,9 @@ def case03_verify_cell_proof(): cells, proofs = VALID_CELLS_AND_PROOFS[i] cell = cells[cell_id] proof = bls_add_one(proofs[cell_id]) - assert not spec.verify_cell_proof(commitment, cell_id, cell, proof) + assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_incorrect_proof_{identifier}', { + yield f'verify_cell_kzg_proof_case_incorrect_proof_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -176,9 +176,9 @@ def case03_verify_cell_proof(): cell_id = 81 % spec.CELLS_PER_EXT_BLOB cell = cells[cell_id] proof = proofs[cell_id] - expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_invalid_commitment_{identifier}', { + yield f'verify_cell_kzg_proof_case_invalid_commitment_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -194,9 +194,9 @@ def case03_verify_cell_proof(): commitment = VALID_COMMITMENTS[1] cell = cells[0] proof = proofs[0] - expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_invalid_cell_id_{identifier}', { + yield f'verify_cell_kzg_proof_case_invalid_cell_id_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -212,9 +212,9 @@ def case03_verify_cell_proof(): commitment = VALID_COMMITMENTS[2] cells, proofs = VALID_CELLS_AND_PROOFS[2] proof = proofs[cell_id] - expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_invalid_cell_{identifier}', { + yield f'verify_cell_kzg_proof_case_invalid_cell_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -230,9 +230,9 @@ def case03_verify_cell_proof(): commitment = VALID_COMMITMENTS[3] cell_id = 36 % spec.CELLS_PER_EXT_BLOB cell = cells[cell_id] - expect_exception(spec.verify_cell_proof, commitment, cell_id, cell, proof) + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_proof_case_invalid_proof_{identifier}', { + yield f'verify_cell_kzg_proof_case_invalid_proof_{identifier}', { 'input': { 'commitment': encode_hex(commitment), 'cell_id': cell_id, @@ -244,19 +244,19 @@ def case03_verify_cell_proof(): ############################################################################### -# Test cases for verify_cell_proof_batch +# Test cases for verify_cell_kzg_proof_batch ############################################################################### -def case04_verify_cell_proof_batch(): +def case04_verify_cell_kzg_proof_batch(): # Valid cases for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] row_commitments = [VALID_COMMITMENTS[i]] row_indices = [0] * spec.CELLS_PER_EXT_BLOB column_indices = list(range(spec.CELLS_PER_EXT_BLOB)) - assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_valid_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_valid_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -269,9 +269,9 @@ def case04_verify_cell_proof_batch(): # Valid: zero cells cells, row_commitments, row_indices, column_indices, proofs = [], [], [], [], [] - assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_valid_zero_cells_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_valid_zero_cells_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -290,9 +290,9 @@ def case04_verify_cell_proof_batch(): column_indices = [0, 0] cells = [cells0[0], cells1[0]] proofs = [proofs0[0], proofs1[0]] - assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_valid_multiple_blobs_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_valid_multiple_blobs_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -310,9 +310,9 @@ def case04_verify_cell_proof_batch(): row_commitments = VALID_COMMITMENTS row_indices = [2] * len(cells) column_indices = list(range(len(cells))) - assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_valid_unused_row_commitments_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_valid_unused_row_commitments_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -330,9 +330,9 @@ def case04_verify_cell_proof_batch(): column_indices = [0] * num_duplicates cells = [VALID_CELLS_AND_PROOFS[3][0][0]] * num_duplicates proofs = [VALID_CELLS_AND_PROOFS[3][1][0]] * num_duplicates - assert spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_valid_same_cell_multiple_times_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_valid_same_cell_multiple_times_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -350,9 +350,9 @@ def case04_verify_cell_proof_batch(): row_commitments = [bls_add_one(VALID_COMMITMENTS[5])] row_indices = [0] * len(cells) column_indices = list(range(len(cells))) - assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_incorrect_row_commitment_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_incorrect_row_commitment_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -371,9 +371,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Change last cell so it's wrong cells[-1] = CELL_RANDOM_VALID2 - assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_incorrect_cell_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_incorrect_cell_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -392,9 +392,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Change last proof so it's wrong proofs[-1] = bls_add_one(proofs[-1]) - assert not spec.verify_cell_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) + assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_incorrect_proof_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_incorrect_proof_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -413,9 +413,9 @@ def case04_verify_cell_proof_batch(): row_commitments = [commitment] row_indices = [0] * len(cells) column_indices = list(range(len(cells))) - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_row_commitment_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_row_commitment_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -434,9 +434,9 @@ def case04_verify_cell_proof_batch(): # Set first row index to an invalid value row_indices[0] = 1 column_indices = list(range(len(cells))) - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_row_index_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_row_index_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -455,9 +455,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Set first column index to an invalid value column_indices[0] = spec.CELLS_PER_EXT_BLOB - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_column_index_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_column_index_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -477,9 +477,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Set first cell to the invalid cell cells[0] = cell - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_cell_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_cell_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -499,9 +499,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Set first proof to the invalid proof proofs[0] = proof - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_proof_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_proof_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -519,9 +519,9 @@ def case04_verify_cell_proof_batch(): row_commitments = [] row_indices = [0] * len(cells) column_indices = list(range(len(cells))) - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_missing_row_commitment_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_missing_row_commitment_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -539,9 +539,9 @@ def case04_verify_cell_proof_batch(): # Leave off one of the row indices row_indices = [0] * (len(cells) - 1) column_indices = list(range(len(cells))) - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_missing_row_index_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_missing_row_index_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -559,9 +559,9 @@ def case04_verify_cell_proof_batch(): row_indices = [0] * len(cells) # Leave off one of the column indices column_indices = list(range(len(cells) - 1)) - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_missing_column_index_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_missing_column_index_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -580,9 +580,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Remove the last proof cells = cells[:-1] - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_missing_cell_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_missing_cell_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -601,9 +601,9 @@ def case04_verify_cell_proof_batch(): column_indices = list(range(len(cells))) # Remove the last proof proofs = proofs[:-1] - expect_exception(spec.verify_cell_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) + expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs) identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs) - yield f'verify_cell_proof_batch_case_invalid_missing_proof_{identifier}', { + yield f'verify_cell_kzg_proof_batch_case_invalid_missing_proof_{identifier}', { 'input': { 'row_commitments': encode_hex_list(row_commitments), 'row_indices': row_indices, @@ -831,7 +831,7 @@ def cases_fn() -> Iterable[gen_typing.TestCase]: # EIP-7594 create_provider(EIP7594, 'compute_cells', case01_compute_cells), create_provider(EIP7594, 'compute_cells_and_proofs', case02_compute_cells_and_proofs), - create_provider(EIP7594, 'verify_cell_proof', case03_verify_cell_proof), - create_provider(EIP7594, 'verify_cell_proof_batch', case04_verify_cell_proof_batch), + create_provider(EIP7594, 'verify_cell_kzg_proof', case03_verify_cell_kzg_proof), + create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case04_verify_cell_kzg_proof_batch), create_provider(EIP7594, 'recover_all_cells', case05_recover_all_cells), ]) From 4f6652120803546954e673939f29e444ac6e62f4 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 25 Apr 2024 17:34:10 -0500 Subject: [PATCH 57/60] Fix parameter indentations --- .../eip7594/polynomial-commitments-sampling.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 091b7532ec..3a70663cea 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -480,9 +480,9 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: ```python def verify_cell_kzg_proof(commitment_bytes: Bytes48, - cell_id: CellID, - cell: Cell, - proof_bytes: Bytes48) -> bool: + cell_id: CellID, + cell: Cell, + proof_bytes: Bytes48) -> bool: """ Check a cell proof @@ -506,10 +506,10 @@ def verify_cell_kzg_proof(commitment_bytes: Bytes48, ```python def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], - row_indices: Sequence[RowIndex], - column_indices: Sequence[ColumnIndex], - cells: Sequence[Cell], - proofs_bytes: Sequence[Bytes48]) -> bool: + row_indices: Sequence[RowIndex], + column_indices: Sequence[ColumnIndex], + cells: Sequence[Cell], + proofs_bytes: Sequence[Bytes48]) -> bool: """ Verify a set of cells, given their corresponding proofs and their coordinates (row_id, column_id) in the blob matrix. The list of all commitments is also provided in row_commitments_bytes. From b4188829b32139916127827c64ba17c923e66c3c Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 25 Apr 2024 21:57:22 -0500 Subject: [PATCH 58/60] Rename compute_cells_and_proofs to compute_cells_and_kzg_proofs --- specs/_features/eip7594/das-core.md | 2 +- .../eip7594/polynomial-commitments-sampling.md | 6 +++--- .../test_polynomial_commitments.py | 4 ++-- tests/core/pyspec/eth2spec/test/utils/kzg_tests.py | 2 +- tests/formats/kzg_7594/README.md | 2 +- ...d_proofs.md => compute_cells_and_kzg_proofs.md} | 4 ++-- tests/generators/kzg_7594/main.py | 14 +++++++------- 7 files changed, 17 insertions(+), 17 deletions(-) rename tests/formats/kzg_7594/{compute_cells_and_proofs.md => compute_cells_and_kzg_proofs.md} (58%) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index d754379fcf..efd5d6c212 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -175,7 +175,7 @@ def get_data_column_sidecars(signed_block: SignedBeaconBlock, block.body, get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments'), ) - cells_and_proofs = [compute_cells_and_proofs(blob) for blob in blobs] + cells_and_proofs = [compute_cells_and_kzg_proofs(blob) for blob in blobs] blob_count = len(blobs) cells = [cells_and_proofs[i][0] for i in range(blob_count)] proofs = [cells_and_proofs[i][1] for i in range(blob_count)] diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 3a70663cea..6d30d6ed6c 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -37,7 +37,7 @@ - [`coset_for_cell`](#coset_for_cell) - [Cells](#cells-1) - [Cell computation](#cell-computation) - - [`compute_cells_and_proofs`](#compute_cells_and_proofs) + - [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) - [`compute_cells`](#compute_cells) - [Cell verification](#cell-verification) - [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) @@ -419,10 +419,10 @@ def coset_for_cell(cell_id: CellID) -> Coset: ### Cell computation -#### `compute_cells_and_proofs` +#### `compute_cells_and_kzg_proofs` ```python -def compute_cells_and_proofs(blob: Blob) -> Tuple[ +def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[ Vector[Cell, CELLS_PER_EXT_BLOB], Vector[KZGProof, CELLS_PER_EXT_BLOB]]: """ diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index 4ffc3ca94a..c247e0532f 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -34,7 +34,7 @@ def test_fft(spec): def test_verify_cell_kzg_proof(spec): blob = get_sample_blob(spec) commitment = spec.blob_to_kzg_commitment(blob) - cells, proofs = spec.compute_cells_and_proofs(blob) + cells, proofs = spec.compute_cells_and_kzg_proofs(blob) cell_id = 0 assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) @@ -48,7 +48,7 @@ def test_verify_cell_kzg_proof(spec): def test_verify_cell_kzg_proof_batch(spec): blob = get_sample_blob(spec) commitment = spec.blob_to_kzg_commitment(blob) - cells, proofs = spec.compute_cells_and_proofs(blob) + cells, proofs = spec.compute_cells_and_kzg_proofs(blob) assert len(cells) == len(proofs) diff --git a/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py b/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py index c6bbfef56a..5f1e11b49a 100644 --- a/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py +++ b/tests/core/pyspec/eth2spec/test/utils/kzg_tests.py @@ -152,4 +152,4 @@ def evaluate_blob_at(blob, z): # Cells & Proofs -VALID_CELLS_AND_PROOFS = [] # Saved in case02_compute_cells_and_proofs +VALID_CELLS_AND_PROOFS = [] # Saved in case02_compute_cells_and_kzg_proofs diff --git a/tests/formats/kzg_7594/README.md b/tests/formats/kzg_7594/README.md index 94f3015fb5..dbd95dd3dc 100644 --- a/tests/formats/kzg_7594/README.md +++ b/tests/formats/kzg_7594/README.md @@ -7,7 +7,7 @@ We do not recommend rolling your own crypto or using an untested KZG library. The KZG test suite runner has the following handlers: - [`compute_cells`](./compute_cells.md) -- [`compute_cells_and_proofs`](./compute_cells_and_proofs.md) +- [`compute_cells_and_kzg_proofs`](./compute_cells_and_kzg_proofs.md) - [`verify_cell_kzg_proof`](./verify_cell_kzg_proof.md) - [`verify_cell_kzg_proof_batch`](./verify_cell_kzg_proof_batch.md) - [`recover_all_cells`](./recover_all_cells.md) diff --git a/tests/formats/kzg_7594/compute_cells_and_proofs.md b/tests/formats/kzg_7594/compute_cells_and_kzg_proofs.md similarity index 58% rename from tests/formats/kzg_7594/compute_cells_and_proofs.md rename to tests/formats/kzg_7594/compute_cells_and_kzg_proofs.md index 0262d8a673..1ee1be649a 100644 --- a/tests/formats/kzg_7594/compute_cells_and_proofs.md +++ b/tests/formats/kzg_7594/compute_cells_and_kzg_proofs.md @@ -1,4 +1,4 @@ -# Test format: Compute cells and proofs +# Test format: Compute cells and KZG proofs Compute the cells and cell KZG proofs for a given `blob`. @@ -20,4 +20,4 @@ All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with ` ## Condition -The `compute_cells_and_proofs` handler should compute the cells (chunks of an extended blob) and cell KZG proofs for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`. +The `compute_cells_and_kzg_proofs` handler should compute the cells (chunks of an extended blob) and cell KZG proofs for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`. diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index e8c776ea1b..9afea6efe1 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -60,17 +60,17 @@ def case01_compute_cells(): ############################################################################### -# Test cases for compute_cells_and_proofs +# Test cases for compute_cells_and_kzg_proofs ############################################################################### -def case02_compute_cells_and_proofs(): +def case02_compute_cells_and_kzg_proofs(): # Valid cases for blob in VALID_BLOBS: - cells, proofs = spec.compute_cells_and_proofs(blob) + cells, proofs = spec.compute_cells_and_kzg_proofs(blob) # Save cells & proofs here to save on time. VALID_CELLS_AND_PROOFS.append((cells, proofs)) identifier = make_id(blob) - yield f'compute_cells_and_proofs_case_valid_{identifier}', { + yield f'compute_cells_and_kzg_proofs_case_valid_{identifier}', { 'input': { 'blob': encode_hex(blob), }, @@ -79,9 +79,9 @@ def case02_compute_cells_and_proofs(): # Edge case: Invalid blobs for blob in INVALID_BLOBS: - expect_exception(spec.compute_cells_and_proofs, blob) + expect_exception(spec.compute_cells_and_kzg_proofs, blob) identifier = make_id(blob) - yield f'compute_cells_and_proofs_case_invalid_blob_{identifier}', { + yield f'compute_cells_and_kzg_proofs_case_invalid_blob_{identifier}', { 'input': { 'blob': encode_hex(blob) }, @@ -830,7 +830,7 @@ def cases_fn() -> Iterable[gen_typing.TestCase]: gen_runner.run_generator("kzg_7594", [ # EIP-7594 create_provider(EIP7594, 'compute_cells', case01_compute_cells), - create_provider(EIP7594, 'compute_cells_and_proofs', case02_compute_cells_and_proofs), + create_provider(EIP7594, 'compute_cells_and_kzg_proofs', case02_compute_cells_and_kzg_proofs), create_provider(EIP7594, 'verify_cell_kzg_proof', case03_verify_cell_kzg_proof), create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case04_verify_cell_kzg_proof_batch), create_provider(EIP7594, 'recover_all_cells', case05_recover_all_cells), From ef9af0d85e867ff78c2b13b0ac54f0dc3d979a8c Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 26 Apr 2024 20:51:11 +0800 Subject: [PATCH 59/60] Fix consolidation `test_invalid_*_signature` tests --- .../test_process_consolidation.py | 76 ++++++++++++++++++- 1 file changed, 74 insertions(+), 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py index b1b55645bf..95bd812e38 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py @@ -611,9 +611,43 @@ def test_invalid_different_credentials(spec, state): @with_electra_and_later -@spec_state_test +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase @always_bls def test_invalid_source_signature(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + current_epoch = spec.get_current_epoch(state) + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, source_index=source_index, target_index=target_index + ), + source_privkey, + target_privkey, + ) + + # Set earliest consolidation epoch to the expected exit epoch + expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + state.earliest_consolidation_epoch = expected_exit_epoch + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) source_privkey = pubkey_to_privkey[state.validators[0].pubkey] target_privkey = pubkey_to_privkey[state.validators[1].pubkey] @@ -627,17 +661,53 @@ def test_invalid_source_signature(spec, state): source_privkey, target_privkey, ) + # Change the pubkey of the source validator, invalidating its signature state.validators[0].pubkey = state.validators[1].pubkey + yield from run_consolidation_processing( spec, state, signed_consolidation, valid=False ) @with_electra_and_later -@spec_state_test +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase @always_bls def test_invalid_target_signature(spec, state): + # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn + current_epoch = spec.get_current_epoch(state) + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] + target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] + + # Set source and target withdrawal credentials to the same eth1 credential + set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + signed_consolidation = sign_consolidation( + spec, + state, + spec.Consolidation( + epoch=current_epoch, source_index=source_index, target_index=target_index + ), + source_privkey, + target_privkey, + ) + + # Set earliest consolidation epoch to the expected exit epoch + expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + state.earliest_consolidation_epoch = expected_exit_epoch + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + # Set the consolidation balance to consume equal to churn limit + state.consolidation_balance_to_consume = consolidation_churn_limit + current_epoch = spec.get_current_epoch(state) source_privkey = pubkey_to_privkey[state.validators[0].pubkey] target_privkey = pubkey_to_privkey[state.validators[1].pubkey] @@ -651,8 +721,10 @@ def test_invalid_target_signature(spec, state): source_privkey, target_privkey, ) + # Change the pubkey of the target validator, invalidating its signature state.validators[1].pubkey = state.validators[2].pubkey + yield from run_consolidation_processing( spec, state, signed_consolidation, valid=False ) From a4e04ebf64f95b681e607dd84b0c1f58b93588eb Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Sat, 27 Apr 2024 12:11:29 +0800 Subject: [PATCH 60/60] Misc fix --- configs/mainnet.yaml | 9 ++++----- configs/minimal.yaml | 6 +++--- specs/_features/eip7594/fork.md | 2 +- .../test_process_execution_payload.py | 12 ++++++++---- .../test_process_effective_balance_updates.py | 2 +- tests/core/pyspec/eth2spec/utils/bls.py | 8 +++++--- 6 files changed, 22 insertions(+), 17 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index bbe9195135..cda09fca72 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -52,14 +52,13 @@ DENEB_FORK_VERSION: 0x04000000 DENEB_FORK_EPOCH: 269568 # March 13, 2024, 01:55:35pm UTC # Electra ELECTRA_FORK_VERSION: 0x05000000 -ELECTRA_FORK_EPOCH: 18446744073709551615 +ELECTRA_FORK_EPOCH: 18446744073709551615 # temporary stub +# EIP7594 +EIP7594_FORK_VERSION: 0x06000000 # temporary stub +EIP7594_FORK_EPOCH: 18446744073709551615 # WHISK WHISK_FORK_VERSION: 0x08000000 # temporary stub WHISK_FORK_EPOCH: 18446744073709551615 -# EIP7594 -EIP7594_FORK_VERSION: 0x06000001 -EIP7594_FORK_EPOCH: 18446744073709551615 - # Time parameters # --------------------------------------------------------------- diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 99e84e5fbe..e7a92a811e 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -52,12 +52,12 @@ DENEB_FORK_EPOCH: 18446744073709551615 # Electra ELECTRA_FORK_VERSION: 0x05000001 ELECTRA_FORK_EPOCH: 18446744073709551615 -# WHISK -WHISK_FORK_VERSION: 0x08000001 -WHISK_FORK_EPOCH: 18446744073709551615 # EIP7594 EIP7594_FORK_VERSION: 0x06000001 EIP7594_FORK_EPOCH: 18446744073709551615 +# WHISK +WHISK_FORK_VERSION: 0x08000001 +WHISK_FORK_EPOCH: 18446744073709551615 # Time parameters # --------------------------------------------------------------- diff --git a/specs/_features/eip7594/fork.md b/specs/_features/eip7594/fork.md index d8e8f208cc..790ab0287d 100644 --- a/specs/_features/eip7594/fork.md +++ b/specs/_features/eip7594/fork.md @@ -28,7 +28,7 @@ Warning: this configuration is not definitive. | Name | Value | | - | - | -| `EIP7594_FORK_VERSION` | `Version('0x05000000')` | +| `EIP7594_FORK_VERSION` | `Version('0x06000000')` | | `EIP7594_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** | ## Helper functions diff --git a/tests/core/pyspec/eth2spec/test/bellatrix/block_processing/test_process_execution_payload.py b/tests/core/pyspec/eth2spec/test/bellatrix/block_processing/test_process_execution_payload.py index 0f3a0b0b8f..8eb6b4eab3 100644 --- a/tests/core/pyspec/eth2spec/test/bellatrix/block_processing/test_process_execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/bellatrix/block_processing/test_process_execution_payload.py @@ -338,26 +338,30 @@ def run_randomized_non_validated_execution_fields_test(spec, state, execution_va @with_bellatrix_and_later @spec_state_test def test_randomized_non_validated_execution_fields_first_payload__execution_valid(spec, state): + rng = Random(1111) state = build_state_with_incomplete_transition(spec, state) - yield from run_randomized_non_validated_execution_fields_test(spec, state) + yield from run_randomized_non_validated_execution_fields_test(spec, state, rng=rng) @with_bellatrix_and_later @spec_state_test def test_randomized_non_validated_execution_fields_regular_payload__execution_valid(spec, state): + rng = Random(2222) state = build_state_with_complete_transition(spec, state) - yield from run_randomized_non_validated_execution_fields_test(spec, state) + yield from run_randomized_non_validated_execution_fields_test(spec, state, rng=rng) @with_bellatrix_and_later @spec_state_test def test_invalid_randomized_non_validated_execution_fields_first_payload__execution_invalid(spec, state): + rng = Random(3333) state = build_state_with_incomplete_transition(spec, state) - yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False) + yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False, rng=rng) @with_bellatrix_and_later @spec_state_test def test_invalid_randomized_non_validated_execution_fields_regular_payload__execution_invalid(spec, state): + rng = Random(4444) state = build_state_with_complete_transition(spec, state) - yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False) + yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False, rng=rng) diff --git a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_effective_balance_updates.py b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_effective_balance_updates.py index ddff613407..34e31b2748 100644 --- a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_effective_balance_updates.py +++ b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_effective_balance_updates.py @@ -7,6 +7,6 @@ @with_electra_and_later @spec_state_test def test_effective_balance_hysteresis_with_compounding_credentials(spec, state): - run_test_effective_balance_hysteresis( + yield from run_test_effective_balance_hysteresis( spec, state, with_compounding_credentials=True ) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 299495322f..666de68094 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -11,6 +11,8 @@ pairing as py_ecc_pairing, final_exponentiate as py_ecc_final_exponentiate, FQ12 as py_ecc_GT, + FQ, + FQ2, ) from py_ecc.bls.g2_primitives import ( # noqa: F401 curve_order as BLS_MODULUS, @@ -252,14 +254,14 @@ def multi_exp(points, integers): raise Exception("Invalid point type") result = None - if isinstance(points[0], py_ecc_G1): + if isinstance(points[0][0], FQ): result = Z1() - elif isinstance(points[0], py_ecc_G2): + elif isinstance(points[0][0], FQ2): result = Z2() else: raise Exception("Invalid point type") - for point, scalar in points.zip(integers): + for point, scalar in zip(points, integers): result = add(result, multiply(point, scalar)) return result