Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ lint = [
"codespell==2.4.1",
"mdformat-gfm-alerts==1.0.2",
"mdformat-gfm==0.4.1",
"mdformat-ruff==0.1.3",
"mdformat-toc==0.3.0",
"mdformat==0.7.22",
"mypy==1.16.0",
Expand Down
153 changes: 107 additions & 46 deletions specs/_deprecated/custody_game/beacon-chain.md

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions specs/_deprecated/custody_game/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,9 @@ for which the custody bit is one. The custody bit is computed using the custody
secret:

```python
def get_custody_secret(state: BeaconState,
validator_index: ValidatorIndex,
privkey: int,
epoch: Epoch=None) -> BLSSignature:
def get_custody_secret(
state: BeaconState, validator_index: ValidatorIndex, privkey: int, epoch: Epoch = None
) -> BLSSignature:
if epoch is None:
epoch = get_current_epoch(state)
period = get_custody_period_for_validator(validator_index, epoch)
Expand Down
28 changes: 17 additions & 11 deletions specs/_deprecated/das/das-core.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def reverse_bit_order(n: int, order: int):
Reverse the bit order of an integer n
"""
assert is_power_of_two(order)
return int(('{:0' + str(order.bit_length() - 1) + 'b}').format(n)[::-1], 2)
return int(("{:0" + str(order.bit_length() - 1) + "b}").format(n)[::-1], 2)
```

#### `reverse_bit_order_list`
Expand All @@ -86,7 +86,7 @@ def das_fft_extension(data: Sequence[Point]) -> Sequence[Point]:
such that the second output half of the IFFT is all zeroes.
"""
poly = inverse_fft(data)
return fft(poly + [0]*len(poly))[1::2]
return fft(poly + [0] * len(poly))[1::2]
```

### Data recovery
Expand Down Expand Up @@ -119,11 +119,13 @@ def extend_data(data: Sequence[Point]) -> Sequence[Point]:

```python
def unextend_data(extended_data: Sequence[Point]) -> Sequence[Point]:
return extended_data[:len(extended_data)//2]
return extended_data[: len(extended_data) // 2]
```

```python
def check_multi_kzg_proof(commitment: BLSCommitment, proof: BLSCommitment, x: Point, ys: Sequence[Point]) -> bool:
def check_multi_kzg_proof(
commitment: BLSCommitment, proof: BLSCommitment, x: Point, ys: Sequence[Point]
) -> bool:
"""
Run a KZG multi-proof check to verify that for the subgroup starting at x,
the proof indeed complements the ys to match the commitment.
Expand All @@ -137,12 +139,12 @@ def construct_proofs(extended_data_as_poly: Sequence[Point]) -> Sequence[BLSComm
Constructs proofs for samples of extended data (in polynomial form, 2nd half being zeroes).
Use the FK20 multi-proof approach to construct proofs for a chunk length of POINTS_PER_SAMPLE.
"""
... # Omitted for now, refer to KZG implementation resources.
... # Omitted for now, refer to KZG implementation resources.
```

```python
def commit_to_data(data_as_poly: Sequence[Point]) -> BLSCommitment:
"""Commit to a polynomial by """
"""Commit to a polynomial by"""
```

```python
Expand All @@ -151,7 +153,7 @@ def sample_data(slot: Slot, shard: Shard, extended_data: Sequence[Point]) -> Seq
assert sample_count <= MAX_SAMPLES_PER_BLOCK
# get polynomial form of full extended data, second half will be all zeroes.
poly = ifft(reverse_bit_order_list(extended_data))
assert all(v == 0 for v in poly[len(poly)//2:])
assert all(v == 0 for v in poly[len(poly) // 2 :])
proofs = construct_proofs(poly)
return [
DASSample(
Expand All @@ -163,15 +165,17 @@ def sample_data(slot: Slot, shard: Shard, extended_data: Sequence[Point]) -> Seq
proof=proofs[reverse_bit_order(i, sample_count)],
# note: we leave the sample data as-is so it matches the original nicely.
# The proof applies to `ys = reverse_bit_order_list(sample.data)`
data=extended_data[i*POINTS_PER_SAMPLE:(i+1)*POINTS_PER_SAMPLE]
) for i in range(sample_count)
data=extended_data[i * POINTS_PER_SAMPLE : (i + 1) * POINTS_PER_SAMPLE],
)
for i in range(sample_count)
]
```

```python
def verify_sample(sample: DASSample, sample_count: uint64, commitment: BLSCommitment):
domain_pos = reverse_bit_order(sample.index, sample_count)
sample_root_of_unity = ROOT_OF_UNITY**MAX_SAMPLES_PER_BLOCK # change point-level to sample-level domain
# Change point-level to sample-level domain
sample_root_of_unity = ROOT_OF_UNITY**MAX_SAMPLES_PER_BLOCK
x = sample_root_of_unity**domain_pos
ys = reverse_bit_order_list(sample.data)
assert check_multi_kzg_proof(commitment, sample.proof, x, ys)
Expand All @@ -180,6 +184,8 @@ def verify_sample(sample: DASSample, sample_count: uint64, commitment: BLSCommit
```python
def reconstruct_extended_data(samples: Sequence[Optional[DASSample]]) -> Sequence[Point]:
# Instead of recovering with a point-by-point approach, recover the samples by recovering missing subgroups.
subgroups = [None if sample is None else reverse_bit_order_list(sample.data) for sample in samples]
subgroups = [
None if sample is None else reverse_bit_order_list(sample.data) for sample in samples
]
return recover_data(subgroups)
```
13 changes: 10 additions & 3 deletions specs/_deprecated/das/fork-choice.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,18 @@ and a length.
def get_new_dependencies(state: BeaconState) -> Set[DataCommitment]:
return set(
# Already confirmed during this epoch
[c.commitment for c in state.current_epoch_pending_headers if c.confirmed] +
[c.commitment for c in state.current_epoch_pending_headers if c.confirmed]
+
# Already confirmed during previous epoch
[c.commitment for c in state.previous_epoch_pending_headers if c.confirmed] +
[c.commitment for c in state.previous_epoch_pending_headers if c.confirmed]
+
# Confirmed in the epoch before the previous
[c for c in shard for shard in state.grandparent_epoch_confirmed_commitments if c != DataCommitment()]
[
c
for c in shard
for shard in state.grandparent_epoch_confirmed_commitments
if c != DataCommitment()
]
)
```

Expand Down
96 changes: 70 additions & 26 deletions specs/_deprecated/sharding/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,15 @@ class BuilderBlockBid(Container):

execution_payload_root: Root

sharded_data_commitment_root: Root # Root of the sharded data (only data, not beacon/builder block commitments)
sharded_data_commitment_root: (
Root # Root of the sharded data (only data, not beacon/builder block commitments)
)

sharded_data_commitment_count: uint64 # Count of sharded data commitments
sharded_data_commitment_count: uint64 # Count of sharded data commitments

bid: Gwei # Block builder bid paid to proposer
bid: Gwei # Block builder bid paid to proposer

validator_index: ValidatorIndex # Validator index for this bid
validator_index: ValidatorIndex # Validator index for this bid

# Block builders use an Eth1 address -- need signature as
# block bid and data gas base fees will be charged to this address
Expand All @@ -142,7 +144,7 @@ class BuilderBlockBid(Container):
```python
class BuilderBlockBidWithRecipientAddress(Container):
builder_block_bid: Union[None, BuilderBlockBid]
recipient_address: ExecutionAddress # Address to receive the block builder bid
recipient_address: ExecutionAddress # Address to receive the block builder bid
```

#### `ShardedCommitmentsContainer`
Expand Down Expand Up @@ -284,18 +286,31 @@ def verify_builder_block_bid(state: BeaconState, block: BeaconBlock) -> None:
if is_builder_block_slot(block.slot):
# Get last builder block bid
assert state.blocks_since_builder_block[-1].body.payload_data.selector == 0
builder_block_bid = state.blocks_since_builder_block[-1].body.payload_data.value.builder_block_bid
builder_block_bid = state.blocks_since_builder_block[
-1
].body.payload_data.value.builder_block_bid
assert builder_block_bid.slot + 1 == block.slot

assert block.body.payload_data.selector == 1 # Verify that builder block does not contain bid
assert (
block.body.payload_data.selector == 1
) # Verify that builder block does not contain bid

builder_block_data = block.body.payload_data.value

assert builder_block_bid.execution_payload_root == hash_tree_root(builder_block_data.execution_payload)
assert builder_block_bid.execution_payload_root == hash_tree_root(
builder_block_data.execution_payload
)

assert builder_block_bid.sharded_data_commitment_count == builder_block_data.included_sharded_data_commitments
assert (
builder_block_bid.sharded_data_commitment_count
== builder_block_data.included_sharded_data_commitments
)

assert builder_block_bid.sharded_data_commitment_root == hash_tree_root(builder_block_data.sharded_commitments[-builder_block_bid.included_sharded_data_commitments:])
assert builder_block_bid.sharded_data_commitment_root == hash_tree_root(
builder_block_data.sharded_commitments[
-builder_block_bid.included_sharded_data_commitments :
]
)

assert builder_block_bid.validator_index == block.proposer_index

Expand Down Expand Up @@ -324,47 +339,70 @@ def process_sharded_data(state: BeaconState, block: BeaconBlock) -> None:
sharded_commitments_container = block.body.payload_data.value.sharded_commitments_container

# Verify not too many commitments
assert len(sharded_commitments_container.sharded_commitments) // 2 <= get_active_shard_count(state, get_current_epoch(state))
assert len(
sharded_commitments_container.sharded_commitments
) // 2 <= get_active_shard_count(state, get_current_epoch(state))

# Verify the degree proof
r = hash_to_bls_field(sharded_commitments_container.sharded_commitments, 0)
r_powers = compute_powers(r, len(sharded_commitments_container.sharded_commitments))
combined_commitment = elliptic_curve_lincomb(sharded_commitments_container.sharded_commitments, r_powers)
combined_commitment = elliptic_curve_lincomb(
sharded_commitments_container.sharded_commitments, r_powers
)

payload_field_elements_per_blob = SAMPLES_PER_BLOB * FIELD_ELEMENTS_PER_SAMPLE // 2

verify_degree_proof(combined_commitment, payload_field_elements_per_blob, sharded_commitments_container.degree_proof)
verify_degree_proof(
combined_commitment,
payload_field_elements_per_blob,
sharded_commitments_container.degree_proof,
)

# Verify that the 2*N commitments lie on a degree < N polynomial
low_degree_check(sharded_commitments_container.sharded_commitments)

# Verify that blocks since the last builder block have been included
blocks_chunked = [bytes_to_field_elements(ssz_serialize(block)) for block in state.blocks_since_builder_block]
blocks_chunked = [
bytes_to_field_elements(ssz_serialize(block))
for block in state.blocks_since_builder_block
]
block_vectors = []

for block_chunked in blocks_chunked:
for i in range(0, len(block_chunked), payload_field_elements_per_blob):
block_vectors.append(block_chunked[i:i + payload_field_elements_per_blob])
block_vectors.append(block_chunked[i : i + payload_field_elements_per_blob])

number_of_blobs = len(block_vectors)
r = hash_to_bls_field(sharded_commitments_container.sharded_commitments[:number_of_blobs], 0)
x = hash_to_bls_field(sharded_commitments_container.sharded_commitments[:number_of_blobs], 1)
r = hash_to_bls_field(
sharded_commitments_container.sharded_commitments[:number_of_blobs], 0
)
x = hash_to_bls_field(
sharded_commitments_container.sharded_commitments[:number_of_blobs], 1
)

r_powers = compute_powers(r, number_of_blobs)
combined_vector = vector_lincomb(block_vectors, r_powers)
combined_commitment = elliptic_curve_lincomb(sharded_commitments_container.sharded_commitments[:number_of_blobs], r_powers)
combined_commitment = elliptic_curve_lincomb(
sharded_commitments_container.sharded_commitments[:number_of_blobs], r_powers
)
y = evaluate_polynomial_in_evaluation_form(combined_vector, x)

verify_kzg_proof(combined_commitment, x, y, sharded_commitments_container.block_verification_kzg_proof)
verify_kzg_proof(
combined_commitment, x, y, sharded_commitments_container.block_verification_kzg_proof
)

# Verify that number of sharded data commitments is correctly indicated
assert 2 * (number_of_blobs + included_sharded_data_commitments) == len(sharded_commitments_container.sharded_commitments)
assert 2 * (number_of_blobs + included_sharded_data_commitments) == len(
sharded_commitments_container.sharded_commitments
)
```

#### Execution payload

```python
def process_execution_payload(state: BeaconState, block: BeaconBlock, execution_engine: ExecutionEngine) -> None:
def process_execution_payload(
state: BeaconState, block: BeaconBlock, execution_engine: ExecutionEngine
) -> None:
if is_builder_block_slot(block.slot):
assert block.body.payload_data.selector == 1
payload = block.body.payload_data.value.execution_payload
Expand All @@ -378,21 +416,27 @@ def process_execution_payload(state: BeaconState, block: BeaconBlock, execution_

# Get sharded data commitments
sharded_commitments_container = block.body.sharded_commitments_container
sharded_data_commitments = sharded_commitments_container.sharded_commitments[-sharded_commitments_container.included_sharded_data_commitments:]
sharded_data_commitments = sharded_commitments_container.sharded_commitments[
-sharded_commitments_container.included_sharded_data_commitments :
]

# Get all unprocessed builder block bids
unprocessed_builder_block_bid_with_recipient_addresses = []
for block in state.blocks_since_builder_block[1:]:
unprocessed_builder_block_bid_with_recipient_addresses.append(block.body.builder_block_bid_with_recipient_address.value)
unprocessed_builder_block_bid_with_recipient_addresses.append(
block.body.builder_block_bid_with_recipient_address.value
)

# Verify the execution payload is valid
# The execution engine gets two extra payloads: One for the sharded data commitments (these are needed to verify type 3 transactions)
# and one for all so far unprocessed builder block bids:
# * The execution engine needs to transfer the balance from the bidder to the proposer.
# * The execution engine needs to deduct data gas fees from the bidder balances
assert execution_engine.execute_payload(payload,
sharded_data_commitments,
unprocessed_builder_block_bid_with_recipient_addresses)
assert execution_engine.execute_payload(
payload,
sharded_data_commitments,
unprocessed_builder_block_bid_with_recipient_addresses,
)

# Cache execution payload header
state.latest_execution_payload_header = ExecutionPayloadHeader(
Expand Down
Loading