Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unidirectional arcs pt. 2 #1283

Merged
merged 29 commits into from Mar 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
7df8947
Make DhtArc unidirectional -- defined by left edge rather than center
maackle Mar 11, 2022
f44107a
CHANGELOG
maackle Mar 11, 2022
83aadf0
Clippy
maackle Mar 11, 2022
26d2428
Completely remove DhtArc in favor of ArcInterval
maackle Mar 11, 2022
f3bb754
Move ArcInterval to dht_arc module, DhtLocation to dht_location module
maackle Mar 11, 2022
2536988
Rename ArcInterval -> DhtArc
maackle Mar 11, 2022
a60dadd
Fixes
maackle Mar 11, 2022
cda0496
Lots of clippy
maackle Mar 11, 2022
bdccd1d
Fix DhtArcBucket
maackle Mar 11, 2022
cd75c0b
Fix tests based on unidirectional arcs
maackle Mar 11, 2022
32e8ddb
fmt
maackle Mar 11, 2022
fbc826e
Revert cutting edge clip
maackle Mar 11, 2022
206bbd2
Fix length calc error
maackle Mar 12, 2022
1d2de41
Fix ascii tests
maackle Mar 14, 2022
bfa9fd2
Fix gap/redundancy tests
maackle Mar 14, 2022
5886748
Clippy
maackle Mar 14, 2022
5b76ccd
Undo reorganization for cleaner diff (REVERT ME)
maackle Mar 14, 2022
434069e
Revert "Undo reorganization for cleaner diff (REVERT ME)"
maackle Mar 14, 2022
18c36b8
Inline powers of 2
maackle Mar 15, 2022
04c906d
Split DhtArc into DhtArc and DhtArcRange
maackle Mar 16, 2022
9b93cda
Remove location from DhtArcRange::Empty|Full
maackle Mar 16, 2022
977a83d
Fix wasm test
maackle Mar 16, 2022
cb08bdb
Clippy
maackle Mar 16, 2022
a52f1f1
Fix ascii tests
maackle Mar 16, 2022
8b34d67
Remove deref_mut on DhtArc, which allowed for inconsistency
maackle Mar 22, 2022
caf64bb
fmt
maackle Mar 22, 2022
c90aa7c
Add docs for DhtArc/Range
maackle Mar 23, 2022
285ca29
Merge pull request #1288 from holochain/unidirectional-arcs-4
maackle Mar 23, 2022
58ba2c7
Merge remote-tracking branch 'origin/develop' into unidirectional-arcs-2
maackle Mar 23, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions crates/fixt/src/serialized_bytes.rs
Expand Up @@ -69,8 +69,8 @@ fixturator!(
let mut index = get_fixt_index!();
// iteratively select a thing to serialize
let thing_to_serialize = THINGS_TO_SERIALIZE
.to_vec()
.into_iter()
.iter()
.copied()
.cycle()
.nth(index)
.unwrap();
Expand Down
2 changes: 1 addition & 1 deletion crates/hdk/src/chain.rs
Expand Up @@ -3,7 +3,7 @@ use crate::prelude::*;
/// Query the _headers_ of a remote agent's chain.
///
/// The agent activity is only the headers of their source chain.
/// The agent activity is held by the neighbourhood centered on the agent's public key, rather than a content hash like the rest of the DHT.
/// The agent activity is held by the neighbourhood of the agent's public key, rather than a content hash like the rest of the DHT.
///
/// The agent activity can be filtered with [ `ChainQueryFilter` ] like a local chain query.
pub fn get_agent_activity(
Expand Down
2 changes: 1 addition & 1 deletion crates/hdk/src/lib.rs
Expand Up @@ -413,7 +413,7 @@ pub use paste;
/// Agent activity for any other agent on the network can be fetched.
/// The agent activity is _only the headers_ of the remote agent's source chain.
/// Agent activity allows efficient building of the history of an agent.
/// Agent activity is retrieved from a dedicated neighbourhood centered around the agent.
/// Agent activity is retrieved from a dedicated neighbourhood near the agent.
/// The agent's neighbourhood also maintains a passive security net that guards against attempted chain forks and/or rollbacks.
/// The same query DSL for local chain queries is used to filter remote agent activity headers.
pub mod chain;
Expand Down
17 changes: 5 additions & 12 deletions crates/holochain/src/conductor/p2p_agent_store.rs
Expand Up @@ -123,22 +123,15 @@ pub async fn get_single_agent_info(
#[cfg(any(test, feature = "test_utils"))]
pub async fn exchange_peer_info(envs: Vec<DbWrite<DbKindP2pAgents>>) {
for (i, a) in envs.iter().enumerate() {
let infos_a = all_agent_infos(a.clone().into()).await.unwrap();
for (j, b) in envs.iter().enumerate() {
if i == j {
continue;
}
inject_agent_infos(
a.clone(),
all_agent_infos(b.clone().into()).await.unwrap().iter(),
)
.await
.unwrap();
inject_agent_infos(
b.clone(),
all_agent_infos(a.clone().into()).await.unwrap().iter(),
)
.await
.unwrap();
let infos_b = all_agent_infos(b.clone().into()).await.unwrap();

inject_agent_infos(a.clone(), infos_b.iter()).await.unwrap();
inject_agent_infos(b.clone(), infos_a.iter()).await.unwrap();
}
}
}
Expand Down
8 changes: 4 additions & 4 deletions crates/holochain/src/conductor/space.rs
Expand Up @@ -5,7 +5,7 @@ use std::{collections::HashMap, sync::Arc};

use holo_hash::{DhtOpHash, DnaHash};
use holochain_conductor_api::conductor::DatabaseRootPath;
use holochain_p2p::dht_arc::{ArcInterval, DhtArcSet};
use holochain_p2p::dht_arc::{DhtArcRange, DhtArcSet};
use holochain_sqlite::{
conn::{DbSyncLevel, DbSyncStrategy},
db::{
Expand Down Expand Up @@ -225,7 +225,7 @@ impl Spaces {
.unwrap_or("AND DhtOp.when_integrated IS NOT NULL\n");

let intervals = dht_arc_set.intervals();
let sql = if let Some(ArcInterval::Full) = intervals.first() {
let sql = if let Some(DhtArcRange::Full) = intervals.first() {
format!(
"{} {} {}",
holochain_sqlite::sql::sql_cell::FETCH_OP_HASHES_P1,
Expand All @@ -235,9 +235,9 @@ impl Spaces {
} else {
let sql_ranges = intervals
.into_iter()
.filter(|i| matches!(i, &ArcInterval::Bounded(_, _)))
.filter(|i| matches!(i, &DhtArcRange::Bounded(_, _)))
.map(|interval| match interval {
ArcInterval::Bounded(start_loc, end_loc) => {
DhtArcRange::Bounded(start_loc, end_loc) => {
if start_loc <= end_loc {
format!(
"AND storage_center_loc >= {} AND storage_center_loc <= {}",
Expand Down
2 changes: 1 addition & 1 deletion crates/holochain/src/test_utils.rs
Expand Up @@ -238,7 +238,7 @@ where
respond.r(Ok(async move {
Ok(PeerViewBeta::new(
Default::default(),
DhtArc::full(0),
DhtArc::full(0.into()),
1.0,
1,
))
Expand Down
14 changes: 7 additions & 7 deletions crates/holochain/src/test_utils/network_simulation.rs
Expand Up @@ -9,7 +9,7 @@ use ::fixt::prelude::*;
use hdk::prelude::*;
use holo_hash::{DhtOpHash, DnaHash};
use holochain_conductor_api::conductor::ConductorConfig;
use holochain_p2p::dht_arc::{ArcInterval, DhtArc, DhtLocation};
use holochain_p2p::dht_arc::{DhtArc, DhtArcRange, DhtLocation};
use holochain_p2p::{AgentPubKeyExt, DhtOpHashExt, DnaHashExt};
use holochain_sqlite::db::{p2p_put_single, AsP2pStateTxExt};
use holochain_state::prelude::from_blob;
Expand Down Expand Up @@ -141,9 +141,9 @@ impl MockNetworkData {
pub fn hashes_authority_for(&self, agent: &AgentPubKey) -> Vec<Arc<DhtOpHash>> {
let arc = self.agent_to_arc[agent].interval();
match arc {
ArcInterval::Empty => Vec::with_capacity(0),
ArcInterval::Full => self.ops_by_loc.values().flatten().cloned().collect(),
ArcInterval::Bounded(start, end) => {
DhtArcRange::Empty => Vec::with_capacity(0),
DhtArcRange::Full => self.ops_by_loc.values().flatten().cloned().collect(),
DhtArcRange::Bounded(start, end) => {
if start <= end {
self.ops_by_loc
.range(start..=end)
Expand Down Expand Up @@ -253,8 +253,8 @@ fn cache_data(in_memory: bool, data: &MockNetworkData, is_cached: bool) -> Conne
[&data.uuid],
)
.unwrap();
for op in data.ops.values().cloned() {
holochain_state::test_utils::mutations_helpers::insert_valid_integrated_op(&mut txn, &op)
for op in data.ops.values() {
holochain_state::test_utils::mutations_helpers::insert_valid_integrated_op(&mut txn, op)
.unwrap();
}
for (author, ops) in &data.authored {
Expand Down Expand Up @@ -323,7 +323,7 @@ async fn create_test_data(
let num_storage_buckets = (1.0 / coverage).round() as u32;
let bucket_size = u32::MAX / num_storage_buckets;
let buckets = (0..num_storage_buckets)
.map(|i| ArcInterval::new(i * bucket_size, i * bucket_size + bucket_size))
.map(|i| DhtArcRange::from_bounds(i * bucket_size, i * bucket_size + bucket_size))
.collect::<Vec<_>>();
let mut bucket_counts = vec![0; buckets.len()];
let mut entries = Vec::with_capacity(buckets.len() * approx_num_ops_held);
Expand Down
2 changes: 1 addition & 1 deletion crates/holochain/tests/dht_arc/mod.rs
Expand Up @@ -17,7 +17,7 @@ async fn get_peers(num: usize, half_lens: &[u32], keystore: MetaLairClient) -> V
let agents = SweetAgents::get(keystore, num).await;
for agent in agents {
let agent = holochain_p2p::agent_holo_to_kit(agent);
let arc = DhtArc::new(agent.get_loc(), *half_lens.next().unwrap());
let arc = DhtArc::from_start_and_half_len(agent.get_loc(), *half_lens.next().unwrap());
out.push(arc);
}
out
Expand Down
20 changes: 10 additions & 10 deletions crates/holochain/tests/sharded_gossip/mod.rs
Expand Up @@ -337,15 +337,15 @@ async fn mock_network_sharded_gossip() {
last_intervals = Some(intervals);
let arc = data.agent_to_arc[&agent];
let agent_info = data.agent_to_info[&agent].clone();
let interval = arc.interval();
let interval = arc;

// If we have info for alice check the overlap.
if let Some(alice) = &alice {
let a = alice.storage_arc.interval();
let a = alice.storage_arc;
let b = interval.clone();
debug!("{}\n{}", a.to_ascii(10), b.to_ascii(10));
let a: DhtArcSet = a.into();
let b: DhtArcSet = b.into();
let a: DhtArcSet = a.inner().into();
let b: DhtArcSet = b.inner().into();
if !a.overlap(&b) {
num_missed_gossips += 1;
}
Expand All @@ -363,7 +363,7 @@ async fn mock_network_sharded_gossip() {
module: module.clone(),
gossip: GossipProtocol::Sharded(
ShardedGossipWire::accept(
vec![interval],
vec![interval.into()],
vec![agent_info],
),
),
Expand Down Expand Up @@ -475,8 +475,8 @@ async fn mock_network_sharded_gossip() {
.as_ref()
.map(|alice| {
let arc = data.agent_to_arc[&agent];
let a = alice.storage_arc.interval();
let b = arc.interval();
let a = alice.storage_arc;
let b = arc;
let num_should_hold = this_agent_hashes
.iter()
.filter(|hash| {
Expand Down Expand Up @@ -837,7 +837,7 @@ async fn mock_network_sharding() {
if arc.contains(basis_loc) {
0
} else {
(arc.center_loc().as_u32() as i64 - basis_loc_i).abs()
(arc.start_loc().as_u32() as i64 - basis_loc_i).abs()
},
a,
)
Expand Down Expand Up @@ -873,15 +873,15 @@ async fn mock_network_sharding() {
last_intervals = Some(intervals);
let arc = data.agent_to_arc[&agent];
let agent_info = data.agent_to_info[&agent].clone();
let interval = arc.interval();
let interval = arc;

// Accept the initiate.
let msg = HolochainP2pMockMsg::Gossip {
dna: dna.clone(),
module: module.clone(),
gossip: GossipProtocol::Sharded(
ShardedGossipWire::accept(
vec![interval],
vec![interval.into()],
vec![agent_info],
),
),
Expand Down
10 changes: 5 additions & 5 deletions crates/holochain_p2p/src/test.rs
Expand Up @@ -238,7 +238,7 @@ mod tests {
QueryPeerDensity { respond, .. } => {
let view = kitsune_p2p_types::dht_arc::PeerViewBeta::new(
PeerStratBeta::default(),
dht_arc::DhtArc::full(0),
dht_arc::DhtArc::full(0.into()),
1.0,
2,
);
Expand Down Expand Up @@ -305,7 +305,7 @@ mod tests {
QueryPeerDensity { respond, .. } => {
let view = kitsune_p2p_types::dht_arc::PeerViewBeta::new(
PeerStratBeta::default(),
dht_arc::DhtArc::full(0),
dht_arc::DhtArc::full(0.into()),
1.0,
2,
);
Expand Down Expand Up @@ -363,7 +363,7 @@ mod tests {
QueryPeerDensity { respond, .. } => {
let view = kitsune_p2p_types::dht_arc::PeerViewBeta::new(
PeerStratBeta::default(),
dht_arc::DhtArc::full(0),
dht_arc::DhtArc::full(0.into()),
1.0,
2,
);
Expand Down Expand Up @@ -456,7 +456,7 @@ mod tests {
QueryPeerDensity { respond, .. } => {
let view = kitsune_p2p_types::dht_arc::PeerViewBeta::new(
PeerStratBeta::default(),
dht_arc::DhtArc::full(0),
dht_arc::DhtArc::full(0.into()),
1.0,
2,
);
Expand Down Expand Up @@ -548,7 +548,7 @@ mod tests {
QueryPeerDensity { respond, .. } => {
let view = kitsune_p2p_types::dht_arc::PeerViewBeta::new(
PeerStratBeta::default(),
dht_arc::DhtArc::full(0),
dht_arc::DhtArc::full(0.into()),
1.0,
2,
);
Expand Down
13 changes: 7 additions & 6 deletions crates/holochain_sqlite/src/db/p2p_agent_store.rs
Expand Up @@ -3,7 +3,8 @@
use crate::prelude::*;
use crate::sql::*;
use kitsune_p2p::agent_store::AgentInfoSigned;
use kitsune_p2p::dht_arc::{ArcInterval, DhtArcSet};
use kitsune_p2p::dht_arc::DhtArcRange;
use kitsune_p2p::dht_arc::DhtArcSet;
use kitsune_p2p::KitsuneAgent;
use rusqlite::*;
use std::sync::Arc;
Expand Down Expand Up @@ -244,7 +245,7 @@ impl AsP2pStateTxExt for Transaction<'_> {
},
)? {
let info = r?;
let interval = info.storage_arc.interval();
let interval = DhtArcRange::from(info.storage_arc);
if arcset.overlap(&interval.into()) {
out.push(info);
}
Expand Down Expand Up @@ -285,7 +286,7 @@ impl AsP2pStateTxExt for Transaction<'_> {

for interval in dht_arc_set.intervals() {
match interval {
ArcInterval::Full => {
DhtArcRange::Full => {
out.push(stmt.query_row(
named_params! {
":now": now,
Expand All @@ -295,7 +296,7 @@ impl AsP2pStateTxExt for Transaction<'_> {
|r| r.get(0),
)?);
}
ArcInterval::Bounded(start, end) => {
DhtArcRange::Bounded(start, end) => {
out.push(stmt.query_row(
named_params! {
":now": now,
Expand Down Expand Up @@ -353,11 +354,11 @@ impl P2pRecord {
let expires_at_ms = signed.expires_at_ms;
let arc = signed.storage_arc;

let storage_center_loc = arc.center_loc().into();
let storage_center_loc = arc.start_loc().into();

let is_active = !signed.url_list.is_empty();

let (storage_start_loc, storage_end_loc) = arc.primitive_range_detached();
let (storage_start_loc, storage_end_loc) = arc.to_primitive_bounds_detached();

Ok(Self {
agent,
Expand Down
14 changes: 7 additions & 7 deletions crates/holochain_sqlite/src/db/p2p_agent_store/p2p_test.rs
@@ -1,6 +1,6 @@
use crate::prelude::*;
use kitsune_p2p::agent_store::AgentInfoSigned;
use kitsune_p2p::dht_arc::{ArcInterval, DhtArc, DhtArcSet};
use kitsune_p2p::dht_arc::{DhtArcRange, DhtArcSet};
use kitsune_p2p::{KitsuneAgent, KitsuneSignature, KitsuneSpace};
use rand::Rng;
use std::sync::Arc;
Expand Down Expand Up @@ -100,8 +100,8 @@ async fn test_p2p_agent_store_extrapolated_coverage() {
let res = con
.p2p_extrapolated_coverage(DhtArcSet::from(
&[
ArcInterval::from_bounds((1.into(), (u32::MAX / 2 - 1).into())),
ArcInterval::from_bounds(((u32::MAX / 2 + 1).into(), (u32::MAX - 1).into())),
DhtArcRange::from_bounds(1u32, u32::MAX / 2 - 1),
DhtArcRange::from_bounds(u32::MAX / 2 + 1, u32::MAX - 1),
][..],
))
.unwrap();
Expand Down Expand Up @@ -158,7 +158,7 @@ async fn test_p2p_agent_store_gossip_query_sanity() {
.p2p_gossip_query_agents(
u64::MIN,
u64::MAX,
DhtArc::new(0, u32::MAX).interval().into(),
DhtArcRange::from_bounds(0, u32::MAX).into(),
)
.unwrap();
assert_eq!(all.len(), num_nonzero);
Expand All @@ -168,14 +168,14 @@ async fn test_p2p_agent_store_gossip_query_sanity() {
.p2p_gossip_query_agents(
u64::MIN,
u64::MIN,
DhtArc::new(0, u32::MAX).interval().into(),
DhtArcRange::from_bounds(0, u32::MAX).into(),
)
.unwrap();
assert_eq!(all.len(), 0);

// check that gossip query over zero arc returns zero results
let all = con
.p2p_gossip_query_agents(u64::MIN, u64::MAX, DhtArc::new(0, 0).interval().into())
.p2p_gossip_query_agents(u64::MIN, u64::MAX, DhtArcRange::Empty.into())
.unwrap();
assert_eq!(all.len(), 0);

Expand All @@ -185,7 +185,7 @@ async fn test_p2p_agent_store_gossip_query_sanity() {
.p2p_gossip_query_agents(
u64::MIN,
u64::MAX,
DhtArc::new(0, u32::MAX / 4).interval().into(),
DhtArcRange::from_bounds(0, u32::MAX as u64 / 4).into(),
)
.unwrap();
// NOTE - not sure this is right with <= num_nonzero... but it breaks
Expand Down
@@ -1,6 +1,6 @@
SELECT
SUM(
-- first, sum up the 0.0 - 1.0 coverage of everyone centered in our arc
-- first, sum up the 0.0 - 1.0 coverage of everyone contained in our arc
CASE
-- if start is before end
WHEN (storage_start_loc <= storage_end_loc) THEN IFNULL(
Expand Down
Expand Up @@ -36,7 +36,7 @@ FROM
:basis <= storage_end_loc
OR :basis >= storage_start_loc
) THEN 0
/* if it is in the center, uncovered area */
/* if it is in the middle, uncovered area */
ELSE min(
:basis - storage_end_loc,
storage_start_loc - :basis
Expand Down
3 changes: 1 addition & 2 deletions crates/holochain_types/src/app.rs
Expand Up @@ -424,8 +424,7 @@ impl InstalledAppCommon {
pub fn cloned_cells(&self) -> impl Iterator<Item = &CellId> {
self.role_assignments
.iter()
.map(|(_, role)| &role.clones)
.flatten()
.flat_map(|(_, role)| &role.clones)
}

/// Iterator of all cells, both provisioned and cloned
Expand Down