Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

backport v0.6.1 into dev #1038

Merged
merged 8 commits into from
May 3, 2019
2 changes: 0 additions & 2 deletions scripts/phase0/build_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,8 @@ def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int,
param_hash = (hash_tree_root(indices), seed, index, count)

if param_hash in committee_cache:
print("Cache hit, param_hash: ", param_hash)
return committee_cache[param_hash]
else:
print("Cache miss, param_hash: ", param_hash)
ret = _compute_committee(indices, seed, index, count)
committee_cache[param_hash] = ret
return ret
Expand Down
10 changes: 6 additions & 4 deletions specs/core/0_beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -799,8 +799,8 @@ def get_randao_mix(state: BeaconState,
epoch: Epoch) -> Bytes32:
"""
Return the randao mix at a recent ``epoch``.
``epoch`` expected to be between (current_epoch - LATEST_RANDAO_MIXES_LENGTH, current_epoch].
"""
assert get_current_epoch(state) - LATEST_RANDAO_MIXES_LENGTH < epoch <= get_current_epoch(state)
return state.latest_randao_mixes[epoch % LATEST_RANDAO_MIXES_LENGTH]
```

Expand All @@ -811,8 +811,9 @@ def get_active_index_root(state: BeaconState,
epoch: Epoch) -> Bytes32:
"""
Return the index root at a recent ``epoch``.
``epoch`` expected to be between
(current_epoch - LATEST_ACTIVE_INDEX_ROOTS_LENGTH + ACTIVATION_EXIT_DELAY, current_epoch + ACTIVATION_EXIT_DELAY].
"""
assert get_current_epoch(state) - LATEST_ACTIVE_INDEX_ROOTS_LENGTH + ACTIVATION_EXIT_DELAY < epoch <= get_current_epoch(state) + ACTIVATION_EXIT_DELAY
return state.latest_active_index_roots[epoch % LATEST_ACTIVE_INDEX_ROOTS_LENGTH]
```

Expand All @@ -825,7 +826,7 @@ def generate_seed(state: BeaconState,
Generate a seed for the given ``epoch``.
"""
return hash(
get_randao_mix(state, epoch - MIN_SEED_LOOKAHEAD) +
get_randao_mix(state, epoch + LATEST_RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD) +
get_active_index_root(state, epoch) +
int_to_bytes32(epoch)
)
Expand Down Expand Up @@ -1426,7 +1427,8 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
# Proposer and inclusion delay micro-rewards
for index in get_unslashed_attesting_indices(state, matching_source_attestations):
attestation = min([
a for a in attestations if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
a for a in matching_source_attestations
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
], key=lambda a: a.inclusion_delay)
rewards[attestation.proposer_index] += get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT
rewards[index] += get_base_reward(state, index) * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay
Expand Down
55 changes: 31 additions & 24 deletions test_libs/pyspec/eth2spec/utils/minimal_ssz.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from .hash_function import hash

BYTES_PER_CHUNK = 32
BYTES_PER_LENGTH_PREFIX = 4
BYTES_PER_LENGTH_OFFSET = 4
ZERO_CHUNK = b'\x00' * BYTES_PER_CHUNK


Expand Down Expand Up @@ -111,19 +111,34 @@ def coerce_to_bytes(x):
raise Exception("Expecting bytes")


def encode_bytes(value):
serialized_bytes = coerce_to_bytes(value)
assert len(serialized_bytes) < 2 ** (8 * BYTES_PER_LENGTH_PREFIX)
serialized_length = len(serialized_bytes).to_bytes(BYTES_PER_LENGTH_PREFIX, 'little')
return serialized_length + serialized_bytes
def encode_series(values, types):
# Recursively serialize
parts = [(is_constant_sized(types[i]), serialize_value(values[i], types[i])) for i in range(len(values))]

# Compute and check lengths
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
for (constant_size, serialized) in parts]
variable_lengths = [len(serialized) if not constant_size else 0
for (constant_size, serialized) in parts]

def encode_variable_size_container(values, types):
return encode_bytes(encode_fixed_size_container(values, types))
# Check if integer is not out of bounds (Python)
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)

# Interleave offsets of variable-size parts with fixed-size parts.
# Avoid quadratic complexity in calculation of offsets.
offset = sum(fixed_lengths)
variable_parts = []
fixed_parts = []
for (constant_size, serialized) in parts:
if constant_size:
fixed_parts.append(serialized)
else:
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
variable_parts.append(serialized)
offset += len(serialized)

def encode_fixed_size_container(values, types):
return b''.join([serialize_value(v, typ) for (v, typ) in zip(values, types)])
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
return b"".join(fixed_parts + variable_parts)


def serialize_value(value, typ=None):
Expand All @@ -142,18 +157,13 @@ def serialize_value(value, typ=None):
elif isinstance(typ, list) and len(typ) == 2:
# (regardless of element type, sanity-check if the length reported in the vector type matches the value length)
assert len(value) == typ[1]
# If value is fixed-size (i.e. element type is fixed-size):
if is_constant_sized(typ):
return encode_fixed_size_container(value, [typ[0]] * len(value))
# If value is variable-size (i.e. element type is variable-size)
else:
return encode_variable_size_container(value, [typ[0]] * len(value))
# "bytes" (variable size)
elif isinstance(typ, str) and typ == 'bytes':
return encode_bytes(value)
return encode_series(value, [typ[0]] * len(value))
# List
elif isinstance(typ, list) and len(typ) == 1:
return encode_variable_size_container(value, [typ[0]] * len(value))
return encode_series(value, [typ[0]] * len(value))
# "bytes" (variable size)
elif isinstance(typ, str) and typ == 'bytes':
return coerce_to_bytes(value)
# "bytesN" (fixed size)
elif isinstance(typ, str) and len(typ) > 5 and typ[:5] == 'bytes':
assert len(value) == int(typ[5:]), (value, int(typ[5:]))
Expand All @@ -162,10 +172,7 @@ def serialize_value(value, typ=None):
elif hasattr(typ, 'fields'):
values = [getattr(value, field) for field in typ.fields.keys()]
types = list(typ.fields.values())
if is_constant_sized(typ):
return encode_fixed_size_container(values, types)
else:
return encode_variable_size_container(values, types)
return encode_series(values, types)
else:
print(value, typ)
raise Exception("Type not recognized")
Expand Down