Skip to content

Commit

Permalink
Remove backward_maps
Browse files Browse the repository at this point in the history
Turns out they are not really needed:
During reverse filter (push) only the very first's
commit of the to be pushed sequence can possibly
be in that table. All others have to be calculated
by traversing the new commits, and the results of
that where never kept in the first place.
For the very first the parent can be found by searching
the target branch in forward topological order, which will
almost always be a very short search as long as the new
commits are not based on an ancient version of the target.
  • Loading branch information
christian-schilling committed Dec 18, 2020
1 parent f9a5269 commit 6cd2bdb
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 58 deletions.
59 changes: 14 additions & 45 deletions src/filter_cache.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::sync::{Arc, RwLock};

const FORMAT_VERSION: u64 = 5;
const FORMAT_VERSION: u64 = 6;

#[derive(Eq, PartialEq, PartialOrd, Hash, Clone, Copy)]
pub struct JoshOid(git2::Oid);
Expand All @@ -11,18 +11,12 @@ pub type OidMap = HashMap<JoshOid, JoshOid>;
lazy_static! {
static ref FORWARD_MAPS: Arc<RwLock<FilterCache>> =
Arc::new(RwLock::new(FilterCache::new("forward".to_owned())));
static ref BACKWARD_MAPS: Arc<RwLock<FilterCache>> =
Arc::new(RwLock::new(FilterCache::new("backward".to_owned())));
}

fn forward() -> Arc<RwLock<FilterCache>> {
FORWARD_MAPS.clone()
}

pub fn backward() -> Arc<RwLock<FilterCache>> {
BACKWARD_MAPS.clone()
}

#[derive(serde::Serialize, serde::Deserialize)]
pub struct FilterCache {
maps: HashMap<String, OidMap>,
Expand Down Expand Up @@ -84,6 +78,9 @@ pub fn lookup_forward(
spec: &str,
oid: git2::Oid,
) -> Option<git2::Oid> {
if spec == ":nop" {
return Some(oid);
}
if let Ok(f) = forward().read() {
if f.has(&repo, &spec, oid) {
return Some(f.get(&spec, oid));
Expand Down Expand Up @@ -199,15 +196,9 @@ fn try_load(path: &std::path::Path) -> FilterCache {

pub fn load(path: &std::path::Path) {
*(forward().write().unwrap()) = try_load(&path.join("josh_forward_maps"));
*(backward().write().unwrap()) = try_load(&path.join("josh_backward_maps"));
}

pub fn persist(path: &std::path::Path) {
persist_file(
&*super::filter_cache::backward().read().unwrap(),
&path.join("josh_backward_maps"),
)
.ok();
persist_file(
&*super::filter_cache::forward().read().unwrap(),
&path.join("josh_forward_maps"),
Expand All @@ -227,32 +218,6 @@ fn persist_file(
return Ok(());
}

fn try_merge_both(fm: &FilterCache, bm: &FilterCache) {
rs_tracing::trace_scoped!("merge");
tracing::span!(tracing::Level::TRACE, "write_lock backward_maps").in_scope(
|| {
backward()
.try_write()
.map(|mut bm_locked| {
tracing::span!(
tracing::Level::TRACE,
"write_lock forward_maps"
)
.in_scope(|| {
forward()
.try_write()
.map(|mut fm_locked| {
bm_locked.merge(&bm);
fm_locked.merge(&fm);
})
.ok();
});
})
.ok();
},
);
}

pub fn new_downstream(u: &Arc<RwLock<FilterCache>>) -> FilterCache {
return FilterCache {
maps: HashMap::new(),
Expand All @@ -264,22 +229,17 @@ pub fn new_downstream(u: &Arc<RwLock<FilterCache>>) -> FilterCache {

pub struct Transaction {
fm: FilterCache,
bm: FilterCache,
}

impl Transaction {
pub fn new() -> Transaction {
Transaction {
fm: new_downstream(&super::filter_cache::forward()),
bm: new_downstream(&super::filter_cache::backward()),
}
}

pub fn insert(&mut self, spec: &str, from: git2::Oid, to: git2::Oid) {
self.fm.set(spec, from, to);
if to != git2::Oid::zero() {
self.bm.set(spec, to, from);
}
}

pub fn has(
Expand All @@ -298,6 +258,15 @@ impl Transaction {

impl Drop for Transaction {
fn drop(&mut self) {
try_merge_both(&self.fm, &self.bm);
rs_tracing::trace_scoped!("merge");
let s =
tracing::span!(tracing::Level::TRACE, "write_lock forward_maps");
let _e = s.enter();
forward()
.try_write()
.map(|mut fm_locked| {
fm_locked.merge(&self.fm);
})
.ok();
}
}
73 changes: 62 additions & 11 deletions src/scratch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,41 @@ pub fn rewrite(
)?);
}

fn find_original(
repo: &git2::Repository,
bm: &mut std::collections::HashMap<git2::Oid, git2::Oid>,
spec: &str,
contained_in: git2::Oid,
filtered: git2::Oid,
) -> super::JoshResult<git2::Oid> {
if contained_in == git2::Oid::zero() {
return Ok(git2::Oid::zero());
}
if let Some(original) = bm.get(&filtered) {
return Ok(*original);
}
if let Some(oid) =
super::filter_cache::lookup_forward(&repo, &spec, contained_in)
{
bm.insert(contained_in, oid);
}
let mut walk = repo.revwalk()?;
walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
walk.push(contained_in)?;

for original in walk {
let original = original?;
if Some(filtered)
== super::filter_cache::lookup_forward(&repo, spec, original)
{
bm.insert(filtered, original);
return Ok(original);
}
}

return Ok(git2::Oid::zero());
}

#[tracing::instrument(skip(repo))]
pub fn unapply_filter(
repo: &git2::Repository,
Expand All @@ -53,7 +88,6 @@ pub fn unapply_filter(
old: git2::Oid,
new: git2::Oid,
) -> super::JoshResult<UnapplyFilter> {
let backward_maps = super::filter_cache::backward();
let walk = {
let mut walk = repo.revwalk()?;
walk.set_sorting(git2::Sort::REVERSE | git2::Sort::TOPOLOGICAL)?;
Expand All @@ -66,8 +100,9 @@ pub fn unapply_filter(
walk
};

let mut bm = filter_cache::new_downstream(&backward_maps);
let mut ret = bm.get(&filterobj.spec(), new);
let mut bm = std::collections::HashMap::new();
let mut ret =
find_original(&repo, &mut bm, &filterobj.spec(), unfiltered_old, new)?;
for rev in walk {
let rev = rev?;

Expand All @@ -76,7 +111,7 @@ pub fn unapply_filter(

let module_commit = repo.find_commit(rev)?;

if bm.has(&repo, &filterobj.spec(), module_commit.id()) {
if bm.contains_key(&module_commit.id()) {
continue;
}

Expand All @@ -85,19 +120,35 @@ pub fn unapply_filter(
let original_parents: std::result::Result<Vec<_>, _> =
filtered_parent_ids
.iter()
.map(|x| {
if *x == old {
unfiltered_old
.map(|x| -> super::JoshResult<_> {
find_original(
&repo,
&mut bm,
&filterobj.spec(),
unfiltered_old,
*x,
)
})
.filter(|x| {
if let Ok(i) = x {
*i != git2::Oid::zero()
} else {
bm.get(&filterobj.spec(), *x)
true
}
})
.map(|x| repo.find_commit(x))
.map(|x| -> super::JoshResult<_> { Ok(repo.find_commit(x?)?) })
.collect();

tracing::info!(
"parents: {:?} -> {:?}",
original_parents,
filtered_parent_ids
);

let original_parents = original_parents?;

let original_parents_refs: Vec<&_> = original_parents.iter().collect();
let original_parents_refs: Vec<&git2::Commit> =
original_parents.iter().collect();

let tree = module_commit.tree()?;

Expand Down Expand Up @@ -158,7 +209,7 @@ pub fn unapply_filter(

ret =
rewrite(&repo, &module_commit, &original_parents_refs, &new_tree)?;
bm.set(&filterobj.spec(), module_commit.id(), ret);
bm.insert(module_commit.id(), ret);
}

tracing::trace!("done {:?}", ret);
Expand Down
2 changes: 0 additions & 2 deletions tests/proxy/unrelated_leak.t
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,6 @@
* initial
$ git log --graph --pretty=%s origin/from_filtered
* add file4
* add file1
* initial

$ . ${TESTDIR}/destroy_test_env.sh
remote/scratch/refs
Expand Down

0 comments on commit 6cd2bdb

Please sign in to comment.