Skip to content

Commit

Permalink
Migrating to latest crypto APIs
Browse files Browse the repository at this point in the history
  • Loading branch information
hwwhww committed Jan 15, 2024
1 parent 7f2a291 commit f1ac670
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 24 deletions.
5 changes: 2 additions & 3 deletions pysetup/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,7 @@ def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[s
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
objects[item] = objects.pop(item)


def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:
"""
Takes in old spec and new spec ssz objects, combines them,
and returns the newer versions of the objects in dependency order.
Expand All @@ -226,7 +225,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
func_dep_presets = combine_dicts(spec0.func_dep_presets, spec1.func_dep_presets)
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types)
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects)
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
return SpecObject(
functions=functions,
Expand Down
17 changes: 16 additions & 1 deletion pysetup/md_doc_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@
BELLATRIX: "sync/optimistic.md"
}

DEFAULT_ORDER = (
"beacon-chain",
"polynomial-commitments",
)


def is_post_fork(a, b) -> bool:
"""
Expand Down Expand Up @@ -62,15 +67,25 @@ def get_fork_directory(fork):
raise FileNotFoundError(f"No directory found for fork: {fork}")


def sort_key(s):
for index, key in enumerate(DEFAULT_ORDER):
if key in s:
return (index, s)
return (len(DEFAULT_ORDER), s)


def get_md_doc_paths(spec_fork: str) -> str:
md_doc_paths = ""

for fork in ALL_FORKS:
if is_post_fork(spec_fork, fork):
# Append all files in fork directory recursively
for root, dirs, files in os.walk(get_fork_directory(fork)):
for root, _, files in os.walk(get_fork_directory(fork)):
filepaths = []
for filename in files:
filepath = os.path.join(root, filename)
filepaths.append(filepath)
for filepath in sorted(filepaths, key=sort_key):
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
md_doc_paths += filepath + "\n"
# Append extra files if any
Expand Down
1 change: 0 additions & 1 deletion pysetup/spec_builders/eip7594.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
return {
'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value,
'NUMBER_OF_COLUMNS': spec_object.preset_vars['NUMBER_OF_COLUMNS'].value,
'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value,
}

@classmethod
Expand Down
22 changes: 3 additions & 19 deletions specs/_features/eip7594/das-core.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
- [`get_custody_lines`](#get_custody_lines)
- [`compute_extended_data`](#compute_extended_data)
- [`compute_extended_matrix`](#compute_extended_matrix)
- [`compute_samples_and_proofs`](#compute_samples_and_proofs)
- [`get_data_column_sidecars`](#get_data_column_sidecars)
- [Custody](#custody)
- [Custody requirement](#custody-requirement)
Expand All @@ -43,9 +42,8 @@ We define the following Python custom types for type hinting and readability:

| Name | SSZ equivalent | Description |
| - | - | - |
| `DataCell` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The data unit of a cell in the extended data matrix |
| `DataColumn` | `List[DataCell, MAX_BLOBS_PER_BLOCK]` | The data of each column in EIP7594 |
| `ExtendedMatrix` | `List[DataCell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data with blobs and one-dimensional erasure coding extension |
| `DataColumn` | `List[Cell, MAX_BLOBS_PER_BLOCK]` | The data of each column in EIP7594 |
| `ExtendedMatrix` | `List[Cell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data with blobs and one-dimensional erasure coding extension |
| `FlatExtendedMatrix` | `List[BLSFieldElement, MAX_BLOBS_PER_BLOCK * FIELD_ELEMENTS_PER_BLOB * NUMBER_OF_COLUMNS]` | The flattened format of `ExtendedMatrix` |
| `LineIndex` | `uint64` | The index of the rows or columns in `FlatExtendedMatrix` matrix |

Expand All @@ -55,7 +53,6 @@ We define the following Python custom types for type hinting and readability:

| Name | Value | Description |
| - | - | - |
| `FIELD_ELEMENTS_PER_CELL` | `uint64(2**6)` (= 64) | Elements per `DataCell` |
| `NUMBER_OF_COLUMNS` | `uint64((FIELD_ELEMENTS_PER_BLOB * 2) // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. |

### Custody setting
Expand Down Expand Up @@ -95,19 +92,6 @@ def compute_extended_matrix(blobs: Sequence[Blob]) -> FlatExtendedMatrix:
return FlatExtendedMatrix(matrix)
```

#### `compute_samples_and_proofs`

```python
def compute_samples_and_proofs(blob: Blob) -> Tuple[
Vector[DataCell, NUMBER_OF_COLUMNS],
Vector[KZGProof, NUMBER_OF_COLUMNS]]:
"""
Defined in polynomial-commitments-sampling.md
"""
# pylint: disable=unused-argument
...
```

#### `get_data_column_sidecars`

```python
Expand All @@ -119,7 +103,7 @@ def get_data_column_sidecars(signed_block: SignedBeaconBlock,
block.body,
get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments'),
)
cells_and_proofs = [compute_samples_and_proofs(blob) for blob in blobs]
cells_and_proofs = [compute_cells_and_proofs(blob) for blob in blobs]
blob_count = len(blobs)
cells = [cells_and_proofs[i][0] for i in range(blob_count)]
proofs = [cells_and_proofs[i][1] for i in range(blob_count)]
Expand Down

0 comments on commit f1ac670

Please sign in to comment.