Skip to content

Commit

Permalink
5 random fixes (paritytech#2) (paritytech#623)
Browse files Browse the repository at this point in the history
* Tabs instead of spaces.

* Remove double spaces.

* Add spaces between ID and the following {

* Update to nightly 2018-08-27

* Align wat code properly
  • Loading branch information
pepyakin authored and gguoss committed Sep 3, 2018
1 parent af1443a commit 4170e54
Show file tree
Hide file tree
Showing 27 changed files with 103 additions and 103 deletions.
2 changes: 1 addition & 1 deletion demo/cli/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ error_chain! {
}
links {
Client(client::error::Error, client::error::ErrorKind) #[doc="Client error"];
}
}
}
2 changes: 1 addition & 1 deletion demo/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ impl extrinsic_pool::ChainApi for Pool {
unimplemented!()
}

fn ready(&self) -> Self::Ready { }
fn ready(&self) -> Self::Ready { }

fn is_ready(&self, _at: &BlockId, _ready: &mut Self::Ready, _xt: &VerifiedFor<Self>) -> Readiness {
unimplemented!()
Expand Down
16 changes: 8 additions & 8 deletions subkey/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ mod tests {
#[test]
fn test_score_1_char_100() {
let score = calculate_score("j", "5jolkadotwHY5k9GpdTgpqs9xjuNvtv8EcwCFpEeyEf3KHim");
assert!(score == 100, format!("Wrong score, we found {}", score));
assert!(score == 100, format!("Wrong score, we found {}", score));
}

#[test]
Expand All @@ -159,17 +159,17 @@ mod tests {

#[cfg(feature = "bench")]
#[bench]
fn bench_paranoiac(b: &mut Bencher) {
b.iter(|| {
fn bench_paranoiac(b: &mut Bencher) {
b.iter(|| {
generate_key("polka", 3, true)
});
}
}

#[cfg(feature = "bench")]
#[bench]
fn bench_not_paranoiac(b: &mut Bencher) {
b.iter(|| {
#[bench]
fn bench_not_paranoiac(b: &mut Bencher) {
b.iter(|| {
generate_key("polka", 3, false)
});
}
}
}
2 changes: 1 addition & 1 deletion substrate/cli/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ error_chain! {
}
links {
Client(client::error::Error, client::error::ErrorKind) #[doc="Client error"];
}
}
errors {
/// Input error.
Input(m: String) {
Expand Down
2 changes: 1 addition & 1 deletion substrate/client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ impl<Block> client::backend::Backend<Block, KeccakHasher, RlpCodec> for Backend<
}

self.blockchain.header(block).and_then(|maybe_hdr| maybe_hdr.map(|hdr| {
let root: H256 = H256::from_slice(hdr.state_root().as_ref());
let root: H256 = H256::from_slice(hdr.state_root().as_ref());
DbState::with_storage(self.storage.clone(), root)
}).ok_or_else(|| client::error::ErrorKind::UnknownBlock(format!("{:?}", block)).into()))
}
Expand Down
2 changes: 1 addition & 1 deletion substrate/client/src/call_executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ where
call_data,
native_when_possible(),
)?;
Ok(CallResult{ return_data, changes })
Ok(CallResult { return_data, changes })
}

fn runtime_version(&self, id: &BlockId<Block>) -> error::Result<RuntimeVersion> {
Expand Down
6 changes: 3 additions & 3 deletions substrate/client/src/light/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ impl<S, F, Block, H, C> BlockImportOperation<Block, H, C> for ImportOperation<Bl
where
Block: BlockT,
F: Fetcher<Block>,
S: BlockchainStorage<Block>,
S: BlockchainStorage<Block>,
H: Hasher,
C: NodeCodec<H>,
{
Expand Down Expand Up @@ -175,8 +175,8 @@ impl<Block, S, F, H, C> StateBackend<H, C> for OnDemandState<Block, S, F>
Block: BlockT,
S: BlockchainStorage<Block>,
F: Fetcher<Block>,
H: Hasher,
C: NodeCodec<H>,
H: Hasher,
C: NodeCodec<H>,
{
type Error = ClientError;
type Transaction = ();
Expand Down
2 changes: 1 addition & 1 deletion substrate/client/src/light/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ pub fn new_fetch_checker<E, H, C>(
) -> LightDataChecker<E, H, C>
where
E: CodeExecutor<H>,
H: Hasher,
H: Hasher,
C: NodeCodec<H>,
{
LightDataChecker::new(executor)
Expand Down
2 changes: 1 addition & 1 deletion substrate/executor/wasm/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#![no_std]
#![feature(panic_implementation)]
#![feature(panic_handler)]
#![cfg_attr(feature = "strict", deny(warnings))]

#![feature(alloc)]
Expand Down
2 changes: 1 addition & 1 deletion substrate/extrinsic-pool/src/pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ impl<B: ChainApi> Pool<B> {
}

/// Imports one unverified extrinsic to the pool
pub fn submit_one(&self, at: &BlockId<B::Block>, xt: ExtrinsicFor<B>) -> Result<Arc<VerifiedFor<B>>, B::Error> {
pub fn submit_one(&self, at: &BlockId<B::Block>, xt: ExtrinsicFor<B>) -> Result<Arc<VerifiedFor<B>>, B::Error> {
Ok(self.submit_at(at, ::std::iter::once(xt))?.pop().expect("One extrinsic passed; one result returned; qed"))
}

Expand Down
2 changes: 1 addition & 1 deletion substrate/network-libp2p/src/network_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,7 @@ impl NetworkState {
}

/// Disables a peer for `PEER_DISABLE_DURATION`. This adds the peer to the
/// list of disabled peers, and drops any existing connections if
/// list of disabled peers, and drops any existing connections if
/// necessary (ie. drops the sender that was stored in the `UniqueConnec`
/// of `custom_proto`).
pub fn ban_peer(&self, who: NodeIndex, reason: &str) {
Expand Down
12 changes: 6 additions & 6 deletions substrate/network-libp2p/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -864,7 +864,7 @@ fn handle_custom_connection(
/// nodes and only accept incoming connections.
fn start_kademlia_discovery<T, To, St, C>(shared: Arc<Shared>, transport: T,
swarm_controller: SwarmController<St>) -> impl Future<Item = (), Error = IoError>
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down Expand Up @@ -931,7 +931,7 @@ fn perform_kademlia_query<T, To, St, C>(
transport: T,
swarm_controller: SwarmController<St>
) -> impl Future<Item = (), Error = IoError>
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down Expand Up @@ -980,7 +980,7 @@ fn connect_to_nodes<T, To, St, C>(
base_transport: T,
swarm_controller: &SwarmController<St>
)
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down Expand Up @@ -1025,7 +1025,7 @@ fn connect_with_query_peer_id<T, To, St, C>(
addr: Multiaddr,
swarm_controller: &SwarmController<St>
)
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down Expand Up @@ -1087,7 +1087,7 @@ fn open_peer_custom_proto<T, To, St, C>(
expected_peer_id: PeerstorePeerId,
swarm_controller: &SwarmController<St>
)
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down Expand Up @@ -1177,7 +1177,7 @@ fn open_peer_custom_proto<T, To, St, C>(
fn obtain_kad_connection<T, To, St, C>(shared: Arc<Shared>,
who: PeerstorePeerId, transport: T, swarm_controller: SwarmController<St>)
-> impl Future<Item = KadConnecController, Error = IoError>
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
where T: MuxedTransport<Output = TransportOutput<To>> + Clone + 'static,
T::MultiaddrFuture: 'static,
To: AsyncRead + AsyncWrite + 'static,
St: MuxedTransport<Output = FinalUpgrade<C>> + Clone + 'static,
Expand Down
8 changes: 4 additions & 4 deletions substrate/network/src/blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ impl<B: BlockT> BlockCollection<B> {
loop {
let next = downloading_iter.next();
break match &(prev, next) {
&(Some((start, &BlockRangeState::Downloading { ref len, downloading })), _) if downloading < MAX_PARALLEL_DOWNLOADS =>
&(Some((start, &BlockRangeState::Downloading { ref len, downloading })), _) if downloading < MAX_PARALLEL_DOWNLOADS =>
(*start .. *start + *len, downloading),
&(Some((start, r)), Some((next_start, _))) if *start + r.len() < *next_start =>
(*start + r.len() .. cmp::min(*next_start, *start + r.len() + count), 0), // gap
Expand All @@ -130,7 +130,7 @@ impl<B: BlockT> BlockCollection<B> {
}
range.end = cmp::min(peer_best + As::sa(1), range.end);
self.peer_requests.insert(who, range.start);
self.blocks.insert(range.start, BlockRangeState::Downloading{ len: range.end - range.start, downloading: downloading + 1 });
self.blocks.insert(range.start, BlockRangeState::Downloading { len: range.end - range.start, downloading: downloading + 1 });
if range.end <= range.start {
panic!("Empty range {:?}, count={}, peer_best={}, common={}, blocks={:?}", range, count, peer_best, common, self.blocks);
}
Expand Down Expand Up @@ -171,7 +171,7 @@ impl<B: BlockT> BlockCollection<B> {
*downloading = *downloading - 1;
false
},
Some(&mut BlockRangeState::Downloading { .. }) => {
Some(&mut BlockRangeState::Downloading { .. }) => {
true
},
_ => {
Expand Down Expand Up @@ -217,7 +217,7 @@ mod test {
fn create_clear() {
let mut bc = BlockCollection::new();
assert!(is_empty(&bc));
bc.insert(1, generate_blocks(100), 0);
bc.insert(1, generate_blocks(100), 0);
assert!(!is_empty(&bc));
bc.clear();
assert!(is_empty(&bc));
Expand Down
4 changes: 2 additions & 2 deletions substrate/network/src/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ impl<B: BlockT, S: Specialization<B>, H: ExHashT> Protocol<B, S, H> {
on_demand: Option<Arc<OnDemandService<B>>>,
transaction_pool: Arc<TransactionPool<H, B>>,
specialization: S,
) -> error::Result<Self> {
) -> error::Result<Self> {
let info = chain.info()?;
let sync = ChainSync::new(config.roles, &info, import_queue);
let protocol = Protocol {
Expand Down Expand Up @@ -321,7 +321,7 @@ impl<B: BlockT, S: Specialization<B>, H: ExHashT> Protocol<B, S, H> {
let get_body = request.fields.contains(message::BlockAttributes::BODY);
let get_justification = request.fields.contains(message::BlockAttributes::JUSTIFICATION);
while let Some(header) = self.context_data.chain.header(&id).unwrap_or(None) {
if blocks.len() >= max{
if blocks.len() >= max {
break;
}
let number = header.number().clone();
Expand Down
2 changes: 1 addition & 1 deletion substrate/primitives/src/bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ pub fn deserialize_check_len<'de, D>(deserializer: D, len: ExpectedLen) -> Resul
}

fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
if v.len() < 2 || &v[0..2] != "0x" {
if v.len() < 2 || &v[0..2] != "0x" {
return Err(E::custom("prefix is missing"))
}

Expand Down
84 changes: 42 additions & 42 deletions substrate/primitives/src/rlp_codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,53 +77,53 @@ impl NodeCodec<KeccakHasher> for RlpCodec {
fn is_empty_node(data: &[u8]) -> bool {
Rlp::new(data).is_empty()
}
fn empty_node() -> ElasticArray1024<u8> {
let mut stream = RlpStream::new();
stream.append_empty_data();
stream.drain()
}
fn empty_node() -> ElasticArray1024<u8> {
let mut stream = RlpStream::new();
stream.append_empty_data();
stream.drain()
}

fn leaf_node(partial: &[u8], value: &[u8]) -> ElasticArray1024<u8> {
let mut stream = RlpStream::new_list(2);
stream.append(&partial);
stream.append(&value);
fn leaf_node(partial: &[u8], value: &[u8]) -> ElasticArray1024<u8> {
let mut stream = RlpStream::new_list(2);
stream.append(&partial);
stream.append(&value);
stream.drain()
}
}

fn ext_node(partial: &[u8], child_ref: ChildReference<<KeccakHasher as Hasher>::Out>) -> ElasticArray1024<u8> {
let mut stream = RlpStream::new_list(2);
stream.append(&partial);
match child_ref {
ChildReference::Hash(h) => stream.append(&h),
ChildReference::Inline(inline_data, len) => {
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
stream.append_raw(bytes, 1)
},
};
stream.drain()
let mut stream = RlpStream::new_list(2);
stream.append(&partial);
match child_ref {
ChildReference::Hash(h) => stream.append(&h),
ChildReference::Inline(inline_data, len) => {
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
stream.append_raw(bytes, 1)
},
};
stream.drain()
}

fn branch_node<I>(children: I, value: Option<ElasticArray128<u8>>) -> ElasticArray1024<u8>
where I: IntoIterator<Item=Option<ChildReference<<KeccakHasher as Hasher>::Out>>>
{
let mut stream = RlpStream::new_list(17);
for child_ref in children {
match child_ref {
Some(c) => match c {
ChildReference::Hash(h) => stream.append(&h),
ChildReference::Inline(inline_data, len) => {
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
stream.append_raw(bytes, 1)
},
},
None => stream.append_empty_data()
};
}
if let Some(value) = value {
stream.append(&&*value);
} else {
stream.append_empty_data();
}
stream.drain()
}
}
{
let mut stream = RlpStream::new_list(17);
for child_ref in children {
match child_ref {
Some(c) => match c {
ChildReference::Hash(h) => stream.append(&h),
ChildReference::Inline(inline_data, len) => {
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
stream.append_raw(bytes, 1)
},
},
None => stream.append_empty_data()
};
}
if let Some(value) = value {
stream.append(&&*value);
} else {
stream.append_empty_data();
}
stream.drain()
}
}
2 changes: 1 addition & 1 deletion substrate/rpc/src/author/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl ChainApi for TestApi {
fn verify_transaction(&self, _at: &BlockId<Block>, uxt: &ExtrinsicFor<Self>) -> Result<Self::VEx, Self::Error> {
Ok(Verified {
sender: uxt.transfer.from[31] as u64,
hash: uxt.transfer.nonce,
hash: uxt.transfer.nonce,
})
}

Expand Down
2 changes: 1 addition & 1 deletion substrate/runtime-io/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(not(feature = "std"), feature(lang_items))]
#![cfg_attr(not(feature = "std"), feature(panic_implementation))]
#![cfg_attr(not(feature = "std"), feature(panic_handler))]
#![cfg_attr(not(feature = "std"), feature(alloc_error_handler))]
#![cfg_attr(not(feature = "std"), feature(core_intrinsics))]
#![cfg_attr(not(feature = "std"), feature(alloc))]
Expand Down
2 changes: 1 addition & 1 deletion substrate/runtime-io/without_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use core::intrinsics;
use rstd::vec::Vec;
pub use rstd::{mem, slice};

#[panic_implementation]
#[panic_handler]
#[no_mangle]
pub fn panic(info: &::core::panic::PanicInfo) -> ! {
unsafe {
Expand Down
2 changes: 1 addition & 1 deletion substrate/runtime-sandbox/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@

#![warn(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(not(feature = "std"), feature(panic_implementation))]
#![cfg_attr(not(feature = "std"), feature(panic_handler))]
#![cfg_attr(not(feature = "std"), feature(core_intrinsics))]
#![cfg_attr(not(feature = "std"), feature(alloc))]

Expand Down
2 changes: 1 addition & 1 deletion substrate/runtime-std/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
//! or core/alloc to be used with any code that depends on the runtime.

#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(not(feature = "std"), feature(panic_implementation))]
#![cfg_attr(not(feature = "std"), feature(panic_handler))]
#![cfg_attr(not(feature = "std"), feature(core_intrinsics))]
#![cfg_attr(not(feature = "std"), feature(alloc))]

Expand Down
Loading

0 comments on commit 4170e54

Please sign in to comment.