Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(network): unite Data with SignedBlockHeader #2041

Merged
merged 1 commit into from
May 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions crates/papyrus_network/src/converters/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
pub mod protobuf_conversion;

#[cfg(test)]
mod test;

use std::collections::HashMap;

use futures::channel::mpsc::{Receiver, Sender};
Expand Down
63 changes: 16 additions & 47 deletions crates/papyrus_network/src/converters/protobuf_conversion/header.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
#[cfg(test)]
#[path = "header_test.rs"]
mod header_test;

use starknet_api::block::{
BlockHash,
BlockHeader,
Expand All @@ -16,10 +20,9 @@ use starknet_api::core::{
use starknet_api::crypto::Signature;

use super::common::{enum_int_to_l1_data_availability_mode, l1_data_availability_mode_to_enum_int};
use super::{ProtobufConversionError, ProtobufResponseToDataError};
use crate::db_executor::Data;
use super::ProtobufConversionError;
use crate::protobuf_messages::protobuf::{self};
use crate::{DataType, InternalQuery, Query, SignedBlockHeader};
use crate::{InternalQuery, Query, SignedBlockHeader};

impl TryFrom<protobuf::BlockHeadersResponse> for Option<SignedBlockHeader> {
type Error = ProtobufConversionError;
Expand Down Expand Up @@ -264,55 +267,21 @@ impl From<starknet_api::block::BlockSignature> for protobuf::ConsensusSignature
}
}

impl TryFrom<Data> for protobuf::BlockHeadersResponse {
type Error = ProtobufResponseToDataError;

fn try_from(data: Data) -> Result<Self, Self::Error> {
impl From<Option<SignedBlockHeader>> for protobuf::BlockHeadersResponse {
fn from(data: Option<SignedBlockHeader>) -> Self {
match data {
Data::BlockHeaderAndSignature { header, signatures } => {
Ok(protobuf::BlockHeadersResponse {
Some(SignedBlockHeader { block_header, signatures }) => {
protobuf::BlockHeadersResponse {
header_message: Some(protobuf::block_headers_response::HeaderMessage::Header(
(header, signatures).into(),
(block_header, signatures).into(),
)),
})
}
}
Data::Fin(data_type) => match data_type {
DataType::SignedBlockHeader => Ok(protobuf::BlockHeadersResponse {
header_message: Some(protobuf::block_headers_response::HeaderMessage::Fin(
protobuf::Fin {},
)),
}),
_ => Err(ProtobufResponseToDataError::UnsupportedDataType {
data_type: data_type.to_string(),
type_description: "BlockHeadersResponse".to_string(),
}),
None => protobuf::BlockHeadersResponse {
header_message: Some(protobuf::block_headers_response::HeaderMessage::Fin(
protobuf::Fin {},
)),
},
Data::StateDiff { .. } => Err(ProtobufResponseToDataError::UnsupportedDataType {
data_type: "StateDiff".to_string(),
type_description: "BlockHeadersResponse".to_string(),
}),
}
}
}

impl TryFrom<protobuf::BlockHeadersResponse> for Data {
type Error = ProtobufConversionError;

fn try_from(value: protobuf::BlockHeadersResponse) -> Result<Self, Self::Error> {
match value.header_message {
Some(protobuf::block_headers_response::HeaderMessage::Header(header)) => {
let signed_block_header = SignedBlockHeader::try_from(header)?;
Ok(Data::BlockHeaderAndSignature {
header: signed_block_header.block_header,
signatures: signed_block_header.signatures,
})
}
Some(protobuf::block_headers_response::HeaderMessage::Fin(_)) => {
Ok(Data::Fin(DataType::SignedBlockHeader))
}
None => Err(ProtobufConversionError::MissingField {
field_description: "BlockHeadersResponse::header_message",
}),
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
use starknet_api::block::BlockHeader;

use crate::protobuf_messages::protobuf;
use crate::SignedBlockHeader;

#[test]
fn block_header_to_protobuf_and_back() {
let data = SignedBlockHeader {
// TODO(shahak): Remove state_diff_length from here once we correctly deduce if it should
// be None or Some.
block_header: BlockHeader { state_diff_length: Some(0), ..Default::default() },
signatures: vec![],
};
dbg!(&data);
let proto_data = protobuf::BlockHeadersResponse::from(Some(data.clone()));

let res_data = Option::<SignedBlockHeader>::try_from(proto_data).unwrap().unwrap();
assert_eq!(res_data, data);
}

#[test]
fn fin_to_protobuf_and_back() {
let proto_data = protobuf::BlockHeadersResponse::from(None);

let res_data = Option::<SignedBlockHeader>::try_from(proto_data).unwrap();
assert!(res_data.is_none());
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,3 @@ pub enum ProtobufConversionError {
#[error("Type `{type_description}` should be {num_expected} bytes but it got {value:?}.")]
BytesDataLengthMismatch { type_description: &'static str, num_expected: usize, value: Vec<u8> },
}

#[derive(thiserror::Error, Debug)]
pub enum ProtobufResponseToDataError {
#[error("Type `{type_description}` got unsupported data type {data_type}")]
UnsupportedDataType { data_type: String, type_description: String },
}
26 changes: 0 additions & 26 deletions crates/papyrus_network/src/converters/test.rs

This file was deleted.

22 changes: 12 additions & 10 deletions crates/papyrus_network/src/db_executor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ use papyrus_storage::header::HeaderStorageReader;
use papyrus_storage::state::StateStorageReader;
use papyrus_storage::{db, StorageReader, StorageTxn};
use prost::Message;
use starknet_api::block::{BlockHeader, BlockNumber, BlockSignature};
use starknet_api::block::BlockNumber;
use starknet_api::state::ThinStateDiff;
use tokio::task::JoinHandle;

use crate::converters::protobuf_conversion::state_diff::StateDiffsResponseVec;
use crate::protobuf_messages::protobuf;
use crate::{BlockHashOrNumber, DataType, InternalQuery};
use crate::{BlockHashOrNumber, DataType, InternalQuery, SignedBlockHeader};

#[cfg(test)]
mod test;
Expand All @@ -36,8 +36,7 @@ pub struct DataEncodingError;

#[cfg_attr(test, derive(Debug, Clone, PartialEq, Eq))]
pub enum Data {
// TODO(shahak): Consider uniting with SignedBlockHeader.
BlockHeaderAndSignature { header: BlockHeader, signatures: Vec<BlockSignature> },
BlockHeaderAndSignature(SignedBlockHeader),
StateDiff { state_diff: ThinStateDiff },
Fin(DataType),
}
Expand All @@ -59,13 +58,13 @@ impl Data {
B: BufMut,
{
match self {
Data::BlockHeaderAndSignature { .. } => self
.try_into()
.map(|data: protobuf::BlockHeadersResponse| match encode_with_length_prefix_flag {
Data::BlockHeaderAndSignature(signed_block_header) => {
let data: protobuf::BlockHeadersResponse = Some(signed_block_header).into();
match encode_with_length_prefix_flag {
true => data.encode_length_delimited(buf).map_err(|_| DataEncodingError),
false => data.encode(buf).map_err(|_| DataEncodingError),
})
.map_err(|_| DataEncodingError)?,
}
}
Data::StateDiff { state_diff } => {
let state_diffs_response_vec = Into::<StateDiffsResponseVec>::into(state_diff);
let res = state_diffs_response_vec
Expand Down Expand Up @@ -348,7 +347,10 @@ impl FetchBlockDataFromDb for DataType {
storage_error: err,
})?
.ok_or(DBExecutorError::SignatureNotFound { block_number, query_id })?;
Ok(Data::BlockHeaderAndSignature { header, signatures: vec![signature] })
Ok(Data::BlockHeaderAndSignature(SignedBlockHeader {
block_header: header,
signatures: vec![signature],
}))
}
DataType::StateDiff => {
let state_diff = txn
Expand Down
6 changes: 3 additions & 3 deletions crates/papyrus_network/src/db_executor/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use crate::db_executor::{
MockFetchBlockDataFromDb,
QueryId,
};
use crate::{BlockHashOrNumber, DataType, Direction, InternalQuery};
use crate::{BlockHashOrNumber, DataType, Direction, InternalQuery, SignedBlockHeader};
const BUFFER_SIZE: usize = 10;

#[tokio::test]
Expand Down Expand Up @@ -84,7 +84,7 @@ async fn header_db_executor_can_register_and_run_a_query() {
}
}
match data {
Data::BlockHeaderAndSignature { header: BlockHeader { block_number: BlockNumber(block_number), .. }, ..} => {
Data::BlockHeaderAndSignature(SignedBlockHeader{ block_header: BlockHeader { block_number: BlockNumber(block_number), .. }, ..}) => {
assert_eq!(block_number, &(i as u64));
}
Data::StateDiff{state_diff: ThinStateDiff { .. }} => {
Expand Down Expand Up @@ -135,7 +135,7 @@ async fn header_db_executor_start_block_given_by_hash() {
res = receiver.collect::<Vec<_>>() => {
assert_eq!(res.len(), NUM_OF_BLOCKS as usize);
for (i, data) in res.iter().enumerate() {
assert_matches!(data, BlockHeaderAndSignature { header: BlockHeader { block_number: BlockNumber(block_number), .. }, ..} if block_number == &(i as u64));
assert_matches!(data, BlockHeaderAndSignature(SignedBlockHeader{block_header: BlockHeader { block_number: BlockNumber(block_number), .. }, ..}) if block_number == &(i as u64));
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/papyrus_network/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ pub enum Direction {
}

#[derive(Debug)]
#[cfg_attr(test, derive(Clone))]
#[cfg_attr(test, derive(Clone, PartialEq, Eq))]
pub struct SignedBlockHeader {
pub block_header: BlockHeader,
pub signatures: Vec<BlockSignature>,
Expand Down
48 changes: 26 additions & 22 deletions crates/papyrus_network/src/network_manager/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ use crate::{
Direction,
InternalQuery,
Query,
SignedBlockHeader,
};

const TIMEOUT: Duration = Duration::from_secs(1);
Expand Down Expand Up @@ -108,21 +109,15 @@ impl MockSwarm {
for block_number in (start_block_number..block_max_number)
.step_by(query.step.try_into().expect("step too large to convert to usize"))
{
let signed_header = Data::BlockHeaderAndSignature {
header: BlockHeader {
let signed_header = SignedBlockHeader {
block_header: BlockHeader {
block_number: BlockNumber(block_number),
..Default::default()
},
signatures: vec![],
};
let mut data_bytes = vec![];
protobuf::BlockHeadersResponse::try_from(signed_header)
.expect(
"Data::BlockHeaderAndSignature should be convertable to \
protobuf::BlockHeadersResponse",
)
.encode(&mut data_bytes)
.expect("failed to convert data to bytes");
let data_bytes =
protobuf::BlockHeadersResponse::from(Some(signed_header)).encode_to_vec();
self.pending_events.push(Event::Behaviour(mixed_behaviour::Event::ExternalEvent(
mixed_behaviour::ExternalEvent::StreamedBytes(GenericEvent::ReceivedData {
data: data_bytes,
Expand All @@ -144,11 +139,17 @@ impl SwarmTrait for MockSwarm {
first",
);
// TODO(shahak): Add tests for state diff.
let data = protobuf::BlockHeadersResponse::decode_length_delimited(&data[..])
.unwrap()
.try_into()
.unwrap();
let is_fin = matches!(data, Data::Fin(DataType::SignedBlockHeader));
let (data, is_fin) =
match protobuf::BlockHeadersResponse::decode_length_delimited(&data[..])
.unwrap()
.try_into()
.unwrap()
{
Some(signed_block_header) => {
(Data::BlockHeaderAndSignature(signed_block_header), false)
}
None => (Data::Fin(DataType::SignedBlockHeader), true),
};
data_sender.unbounded_send(data).unwrap();
if is_fin {
data_sender.close_channel();
Expand Down Expand Up @@ -233,10 +234,12 @@ impl DBExecutorTrait for MockDBExecutor {
for header in headers.iter().cloned() {
// Using poll_fn because Sender::poll_ready is not a future
if let Ok(()) = poll_fn(|cx| sender.poll_ready(cx)).await {
if let Err(e) = sender.start_send(Data::BlockHeaderAndSignature {
header,
signatures: vec![],
}) {
if let Err(e) =
sender.start_send(Data::BlockHeaderAndSignature(SignedBlockHeader {
block_header: header,
signatures: vec![],
}))
{
return Err(DBExecutorError::SendError { query_id, send_error: e });
};
}
Expand Down Expand Up @@ -356,9 +359,10 @@ async fn process_incoming_query() {
let mut expected_data = headers
.into_iter()
.map(|header| {
Data::BlockHeaderAndSignature {
header, signatures: vec![]
}})
Data::BlockHeaderAndSignature(SignedBlockHeader {
block_header: header, signatures: vec![]
})
})
.collect::<Vec<_>>();
expected_data.push(Data::Fin(DataType::SignedBlockHeader));
assert_eq!(inbound_session_data, expected_data);
Expand Down
Loading