Skip to content

Commit

Permalink
lightning: move no_std conditional into main lib
Browse files Browse the repository at this point in the history
Move conditional compilation for no_std builds into main lib.rs
  • Loading branch information
GeneFerneau committed Mar 17, 2021
1 parent b441165 commit dd9f430
Show file tree
Hide file tree
Showing 30 changed files with 76 additions and 208 deletions.
6 changes: 1 addition & 5 deletions lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,8 @@ use util::logger::Logger;
use util::events;
use util::events::Event;

#[cfg(not(feature = "no_std"))]
use std::collections::{HashMap, hash_map};
#[cfg(feature = "no_std")]
use hashbrown::{HashMap, hash_map};
use crate::{HashMap, hash_map, ops::Deref};
use std::sync::RwLock;
use std::ops::Deref;

/// An implementation of [`chain::Watch`] for monitoring channels.
///
Expand Down
15 changes: 4 additions & 11 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,7 @@ use util::ser::{Readable, ReadableArgs, MaybeReadable, Writer, Writeable, U48};
use util::byte_utils;
use util::events::Event;

#[cfg(not(feature = "no_std"))]
use std::collections::{HashMap, HashSet, hash_map};
#[cfg(feature = "no_std")]
use hashbrown::{HashMap, HashSet, hash_map};
#[cfg(not(feature = "no_std"))]
use std::{cmp, mem, ops::Deref};
#[cfg(feature = "no_std")]
use core::{cmp, mem, ops::Deref};
use crate::{HashMap, HashSet, cmp, hash_map, mem, ops::Deref};
use std::io::Error;
use std::sync::Mutex;

Expand Down Expand Up @@ -92,7 +85,7 @@ pub struct ChannelMonitorUpdate {
/// then we allow the `ChannelManager` to send a `ChannelMonitorUpdate` with this update ID,
/// with the update providing said payment preimage. No other update types are allowed after
/// force-close.
pub const CLOSED_CHANNEL_UPDATE_ID: u64 = std::u64::MAX;
pub const CLOSED_CHANNEL_UPDATE_ID: u64 = crate::u64::MAX;

impl Writeable for ChannelMonitorUpdate {
fn write<W: Writer>(&self, w: &mut W) -> Result<(), ::std::io::Error> {
Expand Down Expand Up @@ -1826,7 +1819,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {

for &(ref htlc, _, _) in holder_tx.htlc_outputs.iter() {
if let Some(transaction_output_index) = htlc.transaction_output_index {
claim_requests.push(ClaimRequest { absolute_timelock: ::std::u32::MAX, aggregable: false, outpoint: BitcoinOutPoint { txid: holder_tx.txid, vout: transaction_output_index as u32 },
claim_requests.push(ClaimRequest { absolute_timelock: crate::u32::MAX, aggregable: false, outpoint: BitcoinOutPoint { txid: holder_tx.txid, vout: transaction_output_index as u32 },
witness_data: InputMaterial::HolderHTLC {
preimage: if !htlc.offered {
if let Some(preimage) = self.payment_preimages.get(&htlc.payment_hash) {
Expand Down Expand Up @@ -2374,7 +2367,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
fn is_paying_spendable_output<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) where L::Target: Logger {
let mut spendable_output = None;
for (i, outp) in tx.output.iter().enumerate() { // There is max one spendable output for any channel tx, including ones generated by us
if i > ::std::u16::MAX as usize {
if i > crate::u16::MAX as usize {
// While it is possible that an output exists on chain which is greater than the
// 2^16th output in a given transaction, this is only possible if the output is not
// in a lightning transaction and was instead placed there by some third party who
Expand Down
13 changes: 3 additions & 10 deletions lightning/src/chain/keysinterface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,7 @@ use ln::chan_utils;
use ln::chan_utils::{HTLCOutputInCommitment, make_funding_redeemscript, ChannelPublicKeys, HolderCommitmentTransaction, ChannelTransactionParameters, CommitmentTransaction};
use ln::msgs::UnsignedChannelAnnouncement;

#[cfg(not(feature = "no_std"))]
use std::collections::HashSet;
#[cfg(feature = "no_std")]
use hashbrown::HashSet;
#[cfg(not(feature = "no_std"))]
use std::sync::atomic::{AtomicUsize, Ordering};
#[cfg(feature = "no_std")]
use core::sync::atomic::{AtomicUsize, Ordering};
use crate::{HashSet, atomic::{AtomicUsize, Ordering}};
use std::io::Error;
use ln::msgs::{DecodeError, MAX_VALUE_MSAT};

Expand Down Expand Up @@ -839,7 +832,7 @@ impl KeysManager {
/// onchain output detection for which a corresponding delayed_payment_key must be derived.
pub fn derive_channel_keys(&self, channel_value_satoshis: u64, params: &[u8; 32]) -> InMemorySigner {
let chan_id = byte_utils::slice_to_be64(&params[0..8]);
assert!(chan_id <= std::u32::MAX as u64); // Otherwise the params field wasn't created by us
assert!(chan_id <= crate::u32::MAX as u64); // Otherwise the params field wasn't created by us
let mut unique_start = Sha256::engine();
unique_start.input(params);
unique_start.input(&self.seed);
Expand Down Expand Up @@ -1021,7 +1014,7 @@ impl KeysInterface for KeysManager {

fn get_channel_signer(&self, _inbound: bool, channel_value_satoshis: u64) -> Self::Signer {
let child_ix = self.channel_child_index.fetch_add(1, Ordering::AcqRel);
assert!(child_ix <= std::u32::MAX as usize);
assert!(child_ix <= crate::u32::MAX as usize);
let mut id = [0; 32];
id[0..8].copy_from_slice(&byte_utils::be64_to_array(child_ix as u64));
id[8..16].copy_from_slice(&byte_utils::be64_to_array(self.starting_time_nanos as u64));
Expand Down
4 changes: 2 additions & 2 deletions lightning/src/chain/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ pub trait Filter: Send + Sync {
fn register_output(&self, outpoint: &OutPoint, script_pubkey: &Script);
}

impl<T: Listen> Listen for std::ops::Deref<Target = T> {
impl<T: Listen> Listen for crate::ops::Deref<Target = T> {
fn block_connected(&self, block: &Block, height: u32) {
(**self).block_connected(block, height);
}
Expand All @@ -147,7 +147,7 @@ impl<T: Listen> Listen for std::ops::Deref<Target = T> {
}
}

impl<T: std::ops::Deref, U: std::ops::Deref> Listen for (T, U)
impl<T: crate::ops::Deref, U: crate::ops::Deref> Listen for (T, U)
where
T::Target: Listen,
U::Target: Listen,
Expand Down
12 changes: 12 additions & 0 deletions lightning/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,18 @@ extern crate bitcoin;
#[cfg(feature = "no_std")] extern crate core;
#[cfg(feature = "no_std")] extern crate hashbrown;

#[cfg(feature = "no_std")]
use core::{cell, cmp, default, fmt, hash, iter, marker, mem, ops, sync::atomic, time, u16, u32, u64, usize};
#[cfg(not(feature = "no_std"))]
use std::{cell, cmp, default, fmt, hash, iter, marker, mem, ops, sync::atomic, time, u16, u32, u64, usize};

#[cfg(feature = "no_std")]
use hashbrown::{HashMap, HashSet, hash_map};
#[cfg(feature = "no_std")]
use alloc::collections::{BinaryHeap, BTreeMap, BTreeSet, LinkedList, btree_map};
#[cfg(not(feature = "no_std"))]
use std::collections::{BinaryHeap, BTreeMap, BTreeSet, HashMap, HashSet, LinkedList, btree_map, hash_map};

#[macro_use]
pub mod util;
pub mod chain;
Expand Down
5 changes: 1 addition & 4 deletions lightning/src/ln/chan_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,7 @@ use bitcoin::secp256k1::{Secp256k1, Signature, Message};
use bitcoin::secp256k1::Error as SecpError;
use bitcoin::secp256k1;

#[cfg(not(feature = "no_std"))]
use std::{cmp, ops::Deref};
#[cfg(feature = "no_std")]
use core::{cmp, ops::Deref};
use crate::{cmp, ops::Deref};
use ln::chan_utils;
use util::transaction_utils::sort_outputs;
use ln::channel::INITIAL_COMMITMENT_NUMBER;
Expand Down
17 changes: 7 additions & 10 deletions lightning/src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,7 @@ use util::logger::Logger;
use util::errors::APIError;
use util::config::{UserConfig,ChannelConfig};

#[cfg(not(feature = "no_std"))]
use std::{cmp, mem, fmt, ops::Deref};
#[cfg(feature = "no_std")]
use core::{cmp, mem, fmt, ops::Deref};
use crate::{cmp, mem, fmt, ops::Deref};
#[cfg(any(test, feature = "fuzztarget"))]
use std::sync::Mutex;
use bitcoin::hashes::hex::ToHex;
Expand Down Expand Up @@ -1192,7 +1189,7 @@ impl<Signer: Sign> Channel<Signer> {
// on-chain ChannelsMonitors during block rescan. Ideally we'd figure out a way to drop
// these, but for now we just have to treat them as normal.

let mut pending_idx = std::usize::MAX;
let mut pending_idx = crate::usize::MAX;
for (idx, htlc) in self.pending_inbound_htlcs.iter().enumerate() {
if htlc.htlc_id == htlc_id_arg {
assert_eq!(htlc.payment_hash, payment_hash_calc);
Expand All @@ -1215,7 +1212,7 @@ impl<Signer: Sign> Channel<Signer> {
break;
}
}
if pending_idx == std::usize::MAX {
if pending_idx == crate::usize::MAX {
return Err(ChannelError::Ignore("Unable to find a pending HTLC which matched the given HTLC ID".to_owned()));
}

Expand Down Expand Up @@ -1314,7 +1311,7 @@ impl<Signer: Sign> Channel<Signer> {
// on-chain ChannelsMonitors during block rescan. Ideally we'd figure out a way to drop
// these, but for now we just have to treat them as normal.

let mut pending_idx = std::usize::MAX;
let mut pending_idx = crate::usize::MAX;
for (idx, htlc) in self.pending_inbound_htlcs.iter().enumerate() {
if htlc.htlc_id == htlc_id_arg {
match htlc.state {
Expand All @@ -1331,7 +1328,7 @@ impl<Signer: Sign> Channel<Signer> {
pending_idx = idx;
}
}
if pending_idx == std::usize::MAX {
if pending_idx == crate::usize::MAX {
return Err(ChannelError::Ignore("Unable to find a pending HTLC which matched the given HTLC ID".to_owned()));
}

Expand Down Expand Up @@ -4284,8 +4281,8 @@ impl<Signer: Sign> Writeable for Channel<Signer> {

let mut key_data = VecWriter(Vec::new());
self.holder_signer.write(&mut key_data)?;
assert!(key_data.0.len() < std::usize::MAX);
assert!(key_data.0.len() < std::u32::MAX as usize);
assert!(key_data.0.len() < crate::usize::MAX);
assert!(key_data.0.len() < crate::u32::MAX as usize);
(key_data.0.len() as u32).write(writer)?;
writer.write_all(&key_data.0[..])?;

Expand Down
29 changes: 7 additions & 22 deletions lightning/src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,28 +55,17 @@ use util::chacha20::{ChaCha20, ChaChaReader};
use util::logger::Logger;
use util::errors::APIError;

#[cfg(not(feature = "no_std"))]
use std::{
use crate::{
HashMap,
HashSet,
cmp,
hash_map,
mem,
marker::{Sync, Send},
ops::Deref,
sync::atomic::{AtomicUsize, Ordering},
atomic::{AtomicUsize, Ordering},
time::Duration,
};
#[cfg(feature = "no_std")]
use core::{
cmp,
mem,
marker::{Sync, Send},
ops::Deref,
sync::atomic::{AtomicUsize, Ordering},
time::Duration,
};
#[cfg(not(feature = "no_std"))]
use std::collections::{HashMap, hash_map, HashSet};
#[cfg(feature = "no_std")]
use hashbrown::{HashMap, HashSet, hash_map};
use std::io::{Cursor, Read};
use std::sync::{Arc, Condvar, Mutex, MutexGuard, RwLock, RwLockReadGuard};
#[cfg(any(test, feature = "allow_wallclock_use"))]
Expand Down Expand Up @@ -1594,7 +1583,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
// be absurd. We ensure this by checking that at least 500 (our stated public contract on when
// broadcast_node_announcement panics) of the maximum-length addresses would fit in a 64KB
// message...
const HALF_MESSAGE_IS_ADDRS: u32 = ::std::u16::MAX as u32 / (NetAddress::MAX_LEN as u32 + 1) / 2;
const HALF_MESSAGE_IS_ADDRS: u32 = crate::u16::MAX as u32 / (NetAddress::MAX_LEN as u32 + 1) / 2;
#[deny(const_err)]
#[allow(dead_code)]
// ...by failing to compile if the number of addresses that would be half of a message is
Expand Down Expand Up @@ -4322,12 +4311,8 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
mod tests {
use ln::channelmanager::PersistenceNotifier;
use std::sync::Arc;
#[cfg(not(feature = "no_std"))]
use std::sync::atomic::{AtomicBool, Ordering};
#[cfg(feature = "no_std")]
use core::sync::atomic::{AtomicBool, Ordering};
use crate::{atomic::{AtomicBool, Ordering}, time::Duration};
use std::thread;
use std::time::Duration;

#[test]
fn test_wait_timeout() {
Expand Down
5 changes: 1 addition & 4 deletions lightning/src/ln/features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,7 @@
//! [`Features`]: struct.Features.html
//! [`Context`]: sealed/trait.Context.html

#[cfg(not(feature = "no_std"))]
use std::{cmp, fmt, marker::PhantomData};
#[cfg(feature = "no_std")]
use core::{cmp, fmt, marker::PhantomData};
use crate::{cmp, fmt, marker::PhantomData};

use ln::msgs::DecodeError;
use util::ser::{Readable, Writeable, Writer};
Expand Down
9 changes: 1 addition & 8 deletions lightning/src/ln/functional_test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,7 @@ use bitcoin::secp256k1::key::PublicKey;

use std::rc::Rc;
use std::sync::Mutex;
#[cfg(not(feature = "no_std"))]
use std::{cell::RefCell, mem};
#[cfg(feature = "no_std")]
use core::{cell::RefCell, mem};
#[cfg(not(feature = "no_std"))]
use std::collections::HashMap;
#[cfg(feature = "no_std")]
use hashbrown::HashMap;
use crate::{HashMap, cell::RefCell, mem};

pub const CHAN_CONFIRM_DEPTH: u32 = 100;

Expand Down
12 changes: 1 addition & 11 deletions lightning/src/ln/functional_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,18 +47,8 @@ use bitcoin::secp256k1::key::{PublicKey,SecretKey};

use regex;

#[cfg(not(feature = "no_std"))]
use std::collections::{BTreeSet, HashMap, HashSet};
#[cfg(feature = "no_std")]
use hashbrown::{HashMap, HashSet};
#[cfg(feature = "no_std")]
use alloc::collections::BTreeSet;
use std::default::Default;
use std::sync::Mutex;
#[cfg(not(feature = "no_std"))]
use std::sync::atomic::Ordering;
#[cfg(feature = "no_std")]
use core::sync::atomic::Ordering;
use crate::{BTreeSet, HashMap, HashSet, atomic::Ordering, default::Default};

use ln::functional_test_utils::*;
use ln::chan_utils::CommitmentTransaction;
Expand Down
5 changes: 1 addition & 4 deletions lightning/src/ln/msgs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@ use bitcoin::hash_types::{Txid, BlockHash};

use ln::features::{ChannelFeatures, InitFeatures, NodeFeatures};

#[cfg(not(feature = "no_std"))]
use std::{cmp, fmt::{self, Debug}};
#[cfg(feature = "no_std")]
use core::{cmp, fmt::{self, Debug}};
use crate::{cmp, fmt::{self, Debug}};
use std::io::Read;

use util::events::MessageSendEventsProvider;
Expand Down
15 changes: 4 additions & 11 deletions lightning/src/ln/onchaintx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,7 @@ use util::logger::Logger;
use util::ser::{Readable, ReadableArgs, Writer, Writeable, VecWriter};
use util::byte_utils;

#[cfg(not(feature = "no_std"))]
use std::collections::{HashMap, hash_map};
#[cfg(feature = "no_std")]
use hashbrown::{HashMap, hash_map};
#[cfg(not(feature = "no_std"))]
use std::{cmp, mem::replace, ops::Deref};
#[cfg(feature = "no_std")]
use core::{cmp, mem::replace, ops::Deref};
use crate::{HashMap, hash_map, cmp, mem::replace, ops::Deref};

const MAX_ALLOC_SIZE: usize = 64*1024;

Expand Down Expand Up @@ -303,8 +296,8 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {

let mut key_data = VecWriter(Vec::new());
self.signer.write(&mut key_data)?;
assert!(key_data.0.len() < std::usize::MAX);
assert!(key_data.0.len() < std::u32::MAX as usize);
assert!(key_data.0.len() < crate::usize::MAX);
assert!(key_data.0.len() < crate::u32::MAX as usize);
(key_data.0.len() as u32).write(writer)?;
writer.write_all(&key_data.0[..])?;

Expand Down Expand Up @@ -696,7 +689,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
log_trace!(logger, "Updating claims view at height {} with {} matched transactions and {} claim requests", height, txn_matched.len(), claimable_outpoints.len());
let mut new_claims = Vec::new();
let mut aggregated_claim = HashMap::new();
let mut aggregated_soonest = ::std::u32::MAX;
let mut aggregated_soonest = crate::u32::MAX;

// Try to aggregate outputs if their timelock expiration isn't imminent (absolute_timelock
// <= CLTV_SHARED_CLAIM_BUFFER) and they don't require an immediate nLockTime (aggregable).
Expand Down
6 changes: 1 addition & 5 deletions lightning/src/ln/onion_route_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,7 @@ use bitcoin::hashes::Hash;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::key::SecretKey;

use std::default::Default;
#[cfg(not(feature = "no_std"))]
use std::sync::atomic::Ordering;
#[cfg(feature = "no_std")]
use core::sync::atomic::Ordering;
use crate::{atomic::Ordering, default::Default};
use std::io;

use ln::functional_test_utils::*;
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/ln/onion_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use bitcoin::secp256k1::ecdh::SharedSecret;
use bitcoin::secp256k1;

use std::io::Cursor;
use std::ops::Deref;
use crate::ops::Deref;

pub(super) struct OnionKeys {
#[cfg(test)]
Expand Down
4 changes: 2 additions & 2 deletions lightning/src/ln/peer_channel_encryptor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use bitcoin::hashes::hex::ToHex;
/// Maximum Lightning message data length according to
/// [BOLT-8](https://github.com/lightningnetwork/lightning-rfc/blob/v1.0/08-transport.md#lightning-message-specification)
/// and [BOLT-1](https://github.com/lightningnetwork/lightning-rfc/blob/master/01-messaging.md#lightning-message-format):
pub const LN_MAX_MSG_LEN: usize = ::std::u16::MAX as usize; // Must be equal to 65535
pub const LN_MAX_MSG_LEN: usize = crate::u16::MAX as usize; // Must be equal to 65535

// Sha256("Noise_XK_secp256k1_ChaChaPoly_SHA256")
const NOISE_CK: [u8; 32] = [0x26, 0x40, 0xf5, 0x2e, 0xeb, 0xcd, 0x9e, 0x88, 0x29, 0x58, 0x95, 0x1c, 0x79, 0x42, 0x50, 0xee, 0xdb, 0x28, 0x00, 0x2c, 0x05, 0xd7, 0xdc, 0x2e, 0xa0, 0xf1, 0x95, 0x40, 0x60, 0x42, 0xca, 0xf1];
Expand Down Expand Up @@ -715,7 +715,7 @@ mod tests {
#[test]
fn max_msg_len_limit_value() {
assert_eq!(LN_MAX_MSG_LEN, 65535);
assert_eq!(LN_MAX_MSG_LEN, ::std::u16::MAX as usize);
assert_eq!(LN_MAX_MSG_LEN, crate::u16::MAX as usize);
}

#[test]
Expand Down

0 comments on commit dd9f430

Please sign in to comment.