Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update Proof and ProofOp #206

Merged
merged 5 commits into from
Apr 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Dependencies
- Update to bytes `0.5` and amino_rs `0.5`.
- Tokens for amino_rs are now fully non-conflicting with prost. Allowing both to be used together
- Made RPC type values optional for full compatibility with tendermint-go@v0.32: `abci_info`, `abci_query` [#120]
- JSON ID is JSON specification compatible and accepts int, string or null - [#88]

## [0.11.0] (2019-12-11)

Expand Down
Empty file removed tendermint-lite/Cargo.toml
Empty file.
Empty file removed tendermint-lite/src/main.rs
Empty file.
96 changes: 53 additions & 43 deletions tendermint/src/abci/proof.rs
Original file line number Diff line number Diff line change
@@ -1,51 +1,61 @@
//! ABCI Merkle proofs

use crate::error::Error;
use serde::{de::Error as _, Deserialize, Deserializer, Serialize, Serializer};
use std::{
fmt::{self, Display},
str::FromStr,
};
use subtle_encoding::{Encoding, Hex};

/// ABCI Merkle proofs
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Proof(Vec<u8>);

impl AsRef<[u8]> for Proof {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}

impl Display for Proof {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
&Hex::upper_case().encode_to_string(&self.0).unwrap()
)
}
use crate::serializers;
use serde::{Deserialize, Serialize};

/// Proof is Merkle proof defined by the list of ProofOps
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub struct Proof {
/// The list of ProofOps
pub ops: Vec<ProofOp>,
}

impl FromStr for Proof {
type Err = Error;

fn from_str(s: &str) -> Result<Self, Error> {
let bytes = Hex::upper_case().decode(s)?;
Ok(Proof(bytes))
}
}

impl<'de> Deserialize<'de> for Proof {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let hex = String::deserialize(deserializer)?;
Ok(Self::from_str(&hex).map_err(|e| D::Error::custom(format!("{}", e)))?)
}
/// ProofOp defines an operation used for calculating Merkle root
/// The data could be arbitrary format, providing necessary data
/// for example neighbouring node hash
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub struct ProofOp {
/// Type of the ProofOp
#[serde(alias = "type")]
pub field_type: String,
/// Key of the ProofOp
#[serde(
default,
serialize_with = "serializers::serialize_base64",
deserialize_with = "serializers::parse_base64"
)]
pub key: Vec<u8>,
/// Actual data
#[serde(
default,
serialize_with = "serializers::serialize_base64",
deserialize_with = "serializers::parse_base64"
)]
pub data: Vec<u8>,
}

impl Serialize for Proof {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.to_string().serialize(serializer)
#[cfg(test)]
mod test {
use super::Proof;
use crate::test::test_serialization_roundtrip;

#[test]
fn serialization_roundtrip() {
let payload = r#"
{
"ops": [
{
"type": "iavl:v",
"key": "Y29uc2Vuc3VzU3RhdGUvaWJjb25lY2xpZW50LzIy",
"data": "8QEK7gEKKAgIEAwYHCIgG9RAkJgHlxNjmyzOW6bUAidhiRSja0x6+GXCVENPG1oKKAgGEAUYFyIgwRns+dJvjf1Zk2BaFrXz8inPbvYHB7xx2HCy9ima5f8KKAgEEAMYFyogOr8EGajEV6fG5fzJ2fAAvVMgRLhdMJTzCPlogl9rxlIKKAgCEAIYFyIgcjzX/a+2bFbnNldpawQqZ+kYhIwz5r4wCUzuu1IFW04aRAoeY29uc2Vuc3VzU3RhdGUvaWJjb25lY2xpZW50LzIyEiAZ1uuG60K4NHJZZMuS9QX6o4eEhica5jIHYwflRiYkDBgX"
},
{
"type": "multistore",
"key": "aWJj",
"data": "CvEECjAKBGJhbmsSKAomCIjYAxIg2MEyyonbZButYnvSRkf2bPQg+nqA+Am1MeDxG6F4p1UKLwoDYWNjEigKJgiI2AMSIN2YHczeuXNvyetrSFQpkCcJzfB6PXVCw0i/XShMgPnIChEKB3VwZ3JhZGUSBgoECIjYAwovCgNnb3YSKAomCIjYAxIgYM0TfBli7KxhY4nWgDSDPykhUJwtKFql9RU5l86WinQKLwoDaWJjEigKJgiI2AMSIFp6aJASeInQKF8y824zjmgcFORN6M+ECbgFfJkobKs8CjAKBG1haW4SKAomCIjYAxIgsZzwmLQ7PH1UeZ/vCUSqlQmfgt3CGfoMgJLkUqKCv0EKMwoHc3Rha2luZxIoCiYIiNgDEiCiBZoBLyDGj5euy3n33ik+SpqYK9eB5xbI+iY8ycYVbwo0CghzbGFzaGluZxIoCiYIiNgDEiAJz3gEYuIhdensHU3b5qH5ons2quepd6EaRgCHXab6PQoyCgZzdXBwbHkSKAomCIjYAxIglWLA5/THPTiTxAlaLHOBYFIzEJTmKPznItUwAc8zD+AKEgoIZXZpZGVuY2USBgoECIjYAwowCgRtaW50EigKJgiI2AMSIMS8dZ1j8F6JVVv+hB1rHBZC+gIFJxHan2hM8qDC64n/CjIKBnBhcmFtcxIoCiYIiNgDEiB8VIzExUHX+SvHZFz/P9NM9THnw/gTDDLVReuZX8htLgo4CgxkaXN0cmlidXRpb24SKAomCIjYAxIg3u/Nd4L+8LT8OXJCh14o8PHIJ/GLQwsmE7KYIl1GdSYKEgoIdHJhbnNmZXISBgoECIjYAw=="
}
]
}"#;
test_serialization_roundtrip::<Proof>(payload);
liamsi marked this conversation as resolved.
Show resolved Hide resolved
}
}
17 changes: 16 additions & 1 deletion tendermint/src/block/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,10 @@ pub struct Header {
pub consensus_hash: Hash,

/// State after txs from the previous block
#[serde(deserialize_with = "serializers::parse_hex")]
#[serde(
serialize_with = "serializers::serialize_hex",
deserialize_with = "serializers::parse_hex"
)]
pub app_hash: Vec<u8>,

/// Root hash of all results from the txs from the previous block
Expand Down Expand Up @@ -80,3 +83,15 @@ pub struct Version {
)]
pub app: u64,
}

#[cfg(test)]
mod tests {
use super::Header;
use crate::test::test_serialization_roundtrip;

#[test]
fn serialization_roundtrip() {
let json_data = include_str!("../../tests/support/serialization/block/header.json");
test_serialization_roundtrip::<Header>(json_data);
}
}
3 changes: 3 additions & 0 deletions tendermint/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ pub mod validator;
mod version;
pub mod vote;

#[cfg(test)]
mod test;

pub use crate::genesis::Genesis;
pub use crate::{
block::Block,
Expand Down
29 changes: 15 additions & 14 deletions tendermint/src/lite/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use std::time::SystemTime;

use crate::serializers;
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde::{de::DeserializeOwned, Deserialize, Serialize};

use crate::lite::error::{Error, Kind};
use crate::Hash;
Expand All @@ -17,7 +17,7 @@ pub type Height = u64;
/// the height, the time, the hash of the validator set
/// that should sign this header, and the hash of the validator
/// set that should sign the next header.
pub trait Header: Clone {
pub trait Header: Clone + Debug + Serialize + DeserializeOwned {
/// The header's notion of (bft-)time.
/// We assume it can be converted to SystemTime.
type Time: Into<SystemTime>;
Expand All @@ -33,7 +33,7 @@ pub trait Header: Clone {

/// ValidatorSet is the full validator set.
/// It exposes its hash and its total power.
pub trait ValidatorSet: Clone {
pub trait ValidatorSet: Clone + Debug + Serialize + DeserializeOwned {
/// Hash of the validator set.
fn hash(&self) -> Hash;

Expand All @@ -44,7 +44,7 @@ pub trait ValidatorSet: Clone {
/// Commit is used to prove a Header can be trusted.
/// Verifying the Commit requires access to an associated ValidatorSet
/// to determine what voting power signed the commit.
pub trait Commit: Clone {
pub trait Commit: Clone + Debug + Serialize + DeserializeOwned {
type ValidatorSet: ValidatorSet;

/// Hash of the header this commit is for.
Expand Down Expand Up @@ -77,7 +77,7 @@ pub trait Commit: Clone {
/// TrustThreshold defines how much of the total voting power of a known
/// and trusted validator set is sufficient for a commit to be
/// accepted going forward.
pub trait TrustThreshold: Copy + Clone + Debug {
pub trait TrustThreshold: Copy + Clone + Debug + Serialize + DeserializeOwned {
fn is_enough_power(&self, signed_voting_power: u64, total_voting_power: u64) -> bool;
}

Expand Down Expand Up @@ -148,7 +148,12 @@ where
/// TrustedState contains a state trusted by a lite client,
/// including the last header (at height h-1) and the validator set
/// (at height h) to use to verify the next header.
///
/// **Note:** The `#[serde(bound = ...)]` attribute is required to
/// derive `Deserialize` for this struct as Serde is not able to infer
/// the proper bound when associated types are involved.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(bound(deserialize = "C::ValidatorSet: Deserialize<'de>"))]
pub struct TrustedState<C, H>
where
H: Header,
Expand Down Expand Up @@ -183,12 +188,8 @@ where
}

/// SignedHeader bundles a [`Header`] and a [`Commit`] for convenience.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] // NOTE: Copy/Clone/Debug for convenience in testing ...
pub struct SignedHeader<C, H>
where
C: Commit,
H: Header,
{
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SignedHeader<C, H> {
commit: C,
header: H,
}
Expand Down Expand Up @@ -222,7 +223,7 @@ pub(super) mod mocks {

use std::collections::HashMap;

#[derive(Clone, Debug, PartialEq, Serialize)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MockHeader {
height: u64,
time: SystemTime,
Expand Down Expand Up @@ -272,7 +273,7 @@ pub(super) mod mocks {
}

// vals are just ints, each has power 1
#[derive(Clone, Debug, PartialEq, Serialize)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MockValSet {
// NOTE: use HashSet instead?
vals: Vec<usize>,
Expand All @@ -294,7 +295,7 @@ pub(super) mod mocks {
}

// commit is a list of vals that signed.
#[derive(Clone, Debug, PartialEq, Serialize)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MockCommit {
hash: Hash,
vals: Vec<usize>,
Expand Down
20 changes: 10 additions & 10 deletions tendermint/src/lite/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -582,8 +582,8 @@ mod tests {
}

// valid to skip, but invalid commit. 1 validator.
#[tokio::test]
async fn test_verify_single_skip_1_val_verify() {
#[test]
fn test_verify_single_skip_1_val_verify() {
let vac = ValsAndCommit::new(vec![0], vec![0]);
let ts = &init_trusted_state(vac, vec![0], 1);

Expand All @@ -606,8 +606,8 @@ mod tests {

// valid commit and data, starting with 1 validator.
// test if we can skip to it.
#[tokio::test]
async fn test_verify_single_skip_1_val_skip() {
#[test]
fn test_verify_single_skip_1_val_skip() {
let mut vac = ValsAndCommit::new(vec![0], vec![0]);
let ts = &init_trusted_state(vac.clone(), vec![0], 1);
//*****
Expand Down Expand Up @@ -644,8 +644,8 @@ mod tests {

// valid commit and data, starting with 2 validators.
// test if we can skip to it.
#[tokio::test]
async fn test_verify_single_skip_2_val_skip() {
#[test]
fn test_verify_single_skip_2_val_skip() {
let mut vac = ValsAndCommit::new(vec![0, 1], vec![0, 1]);
let ts = &init_trusted_state(vac.clone(), vec![0, 1], 1);

Expand Down Expand Up @@ -684,8 +684,8 @@ mod tests {

// valid commit and data, starting with 3 validators.
// test if we can skip to it.
#[tokio::test]
async fn test_verify_single_skip_3_val_skip() {
#[test]
fn test_verify_single_skip_3_val_skip() {
let mut vac = ValsAndCommit::new(vec![0, 1, 2], vec![0, 1, 2]);
let ts = &init_trusted_state(vac.clone(), vec![0, 1, 2], 1);

Expand Down Expand Up @@ -736,8 +736,8 @@ mod tests {
assert_single_err(ts, vac, err.into());
}

#[tokio::test]
async fn test_verify_single_skip_4_val_skip() {
#[test]
fn test_verify_single_skip_4_val_skip() {
let vac = ValsAndCommit::new(vec![0, 1, 2, 3], vec![0, 1, 2, 3]);
let ts = &init_trusted_state(vac.clone(), vec![0, 1, 2, 3], 1);

Expand Down
27 changes: 27 additions & 0 deletions tendermint/src/test.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
use serde::{de::DeserializeOwned, Serialize};
use std::fmt::Debug;

/// Test that a struct `T` can be:
///
/// - parsed out of the provided JSON data
/// - serialized back to JSON
/// - parsed back from the serialized JSON of the previous step
/// - that the two parsed structs are equal according to their `PartialEq` impl
pub fn test_serialization_roundtrip<T>(json_data: &str)
where
T: Debug + PartialEq + Serialize + DeserializeOwned,
{
let parsed0 = serde_json::from_str::<T>(json_data);
assert!(parsed0.is_ok());
let parsed0 = parsed0.unwrap();

let serialized = serde_json::to_string(&parsed0);
assert!(serialized.is_ok());
let serialized = serialized.unwrap();

let parsed1 = serde_json::from_str::<T>(&serialized);
assert!(parsed1.is_ok());
let parsed1 = parsed1.unwrap();

assert_eq!(parsed0, parsed1);
}
15 changes: 14 additions & 1 deletion tendermint/tests/support/rpc/abci_query.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,20 @@
"response": {
"log": "exists",
"height": "1",
"proof": "010114FED0DAD959F36091AD761C922ABA3CBF1D8349990101020103011406AA2262E2F448242DF2C2607C3CDC705313EE3B0001149D16177BC71E445476174622EA559715C293740C",
"proof": {
"ops": [
{
"type": "iavl:v",
"key": "Y29uc2Vuc3VzU3RhdGUvaWJjb25lY2xpZW50LzIy",
"data": "8QEK7gEKKAgIEAwYHCIgG9RAkJgHlxNjmyzOW6bUAidhiRSja0x6+GXCVENPG1oKKAgGEAUYFyIgwRns+dJvjf1Zk2BaFrXz8inPbvYHB7xx2HCy9ima5f8KKAgEEAMYFyogOr8EGajEV6fG5fzJ2fAAvVMgRLhdMJTzCPlogl9rxlIKKAgCEAIYFyIgcjzX/a+2bFbnNldpawQqZ+kYhIwz5r4wCUzuu1IFW04aRAoeY29uc2Vuc3VzU3RhdGUvaWJjb25lY2xpZW50LzIyEiAZ1uuG60K4NHJZZMuS9QX6o4eEhica5jIHYwflRiYkDBgX"
},
{
"type": "multistore",
"key": "aWJj",
"data": "CvEECjAKBGJhbmsSKAomCIjYAxIg2MEyyonbZButYnvSRkf2bPQg+nqA+Am1MeDxG6F4p1UKLwoDYWNjEigKJgiI2AMSIN2YHczeuXNvyetrSFQpkCcJzfB6PXVCw0i/XShMgPnIChEKB3VwZ3JhZGUSBgoECIjYAwovCgNnb3YSKAomCIjYAxIgYM0TfBli7KxhY4nWgDSDPykhUJwtKFql9RU5l86WinQKLwoDaWJjEigKJgiI2AMSIFp6aJASeInQKF8y824zjmgcFORN6M+ECbgFfJkobKs8CjAKBG1haW4SKAomCIjYAxIgsZzwmLQ7PH1UeZ/vCUSqlQmfgt3CGfoMgJLkUqKCv0EKMwoHc3Rha2luZxIoCiYIiNgDEiCiBZoBLyDGj5euy3n33ik+SpqYK9eB5xbI+iY8ycYVbwo0CghzbGFzaGluZxIoCiYIiNgDEiAJz3gEYuIhdensHU3b5qH5ons2quepd6EaRgCHXab6PQoyCgZzdXBwbHkSKAomCIjYAxIglWLA5/THPTiTxAlaLHOBYFIzEJTmKPznItUwAc8zD+AKEgoIZXZpZGVuY2USBgoECIjYAwowCgRtaW50EigKJgiI2AMSIMS8dZ1j8F6JVVv+hB1rHBZC+gIFJxHan2hM8qDC64n/CjIKBnBhcmFtcxIoCiYIiNgDEiB8VIzExUHX+SvHZFz/P9NM9THnw/gTDDLVReuZX8htLgo4CgxkaXN0cmlidXRpb24SKAomCIjYAxIg3u/Nd4L+8LT8OXJCh14o8PHIJ/GLQwsmE7KYIl1GdSYKEgoIdHJhbnNmZXISBgoECIjYAw=="
}
]
},
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Surprised the integration tests didn't kick in here.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

all are ignored. But something else is wrong there (testing with null key). the rpc abci_query test is passing in parent but the tests/support/rpc/abci_query.json file has the old proof format there. Will fail without your changes and new proof format in the .json file.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The integration tests should be spun up in CI and specifically run the ignored ones.

"value": "61626364",
"key": "61626364",
"index": "-1",
Expand Down
27 changes: 27 additions & 0 deletions tendermint/tests/support/serialization/block/header.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"version": {
"block": "10",
"app": "0"
},
"chain_id": "cosmoshub-1",
"height": "15",
"time": "2019-03-13T23:09:34.503701042Z",
"num_txs": "2",
"total_txs": "4",
"last_block_id": {
"hash": "42C70F10EF1835CED7248114514B4EF3D06F0D7FD24F6486E3315DEE310D305C",
"parts": {
"total": "1",
"hash": "F51D1B8E6ED859CE23F6B0539E0101653ED4025B13DAA3E76FCC779D5FD96ABE"
}
},
"last_commit_hash": "C499C138BCABA4D40D68A1446F6E5DE1965E07DF17EEACE1A69C1C9B1B8AC5AB",
"data_hash": "AC6A27A91A6EF9057D1A33F7944DD9EDD5FC1A3CA49E04EF0801A17FF01B4412",
"validators_hash": "EB25B1ACF639219180EB77AFC67E75A51A7CA0D666123E514B6882EC38868652",
"next_validators_hash": "EB25B1ACF639219180EB77AFC67E75A51A7CA0D666123E514B6882EC38868652",
"consensus_hash": "29C5629148426FB74676BE07F40F2ED79674A67F5833E4C9CCBF759C9372E99C",
"app_hash": "0A5826D21A3B0B341C843A0E4946AC787EC9B42A7DC1BEAA344C03C43943B179",
"last_results_hash": "",
"evidence_hash": "",
"proposer_address": "CC05882978FC5FDD6A7721687E14C0299AE004B8"
}