Navigation Menu

Skip to content

Commit

Permalink
Use measureme in self-profiler
Browse files Browse the repository at this point in the history
Related to #58372
Related to #58967
  • Loading branch information
wesleywiser committed Apr 13, 2019
1 parent 99da733 commit 56e434d
Show file tree
Hide file tree
Showing 16 changed files with 198 additions and 464 deletions.
12 changes: 12 additions & 0 deletions Cargo.lock
Expand Up @@ -1474,6 +1474,16 @@ dependencies = [
"toml-query 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]

[[package]]
name = "measureme"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]

[[package]]
name = "memchr"
version = "2.1.1"
Expand Down Expand Up @@ -2326,6 +2336,7 @@ dependencies = [
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"polonius-engine 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
Expand Down Expand Up @@ -4106,6 +4117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f"
"checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819"
"checksum measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36bb2b263a6795d352035024d6b30ce465bb79a5e5280d74c3b5f8464c657bcc"
"checksum memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a3eb002f0535929f1199681417029ebea04aadc0c7a4224b46be99c7f5d6a16"
"checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff"
"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
Expand Down
1 change: 1 addition & 0 deletions src/librustc/Cargo.toml
Expand Up @@ -36,6 +36,7 @@ byteorder = { version = "1.1", features = ["i128"]}
chalk-engine = { version = "0.9.0", default-features=false }
rustc_fs_util = { path = "../librustc_fs_util" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
measureme = "0.2.1"

# Note that these dependencies are a lie, they're just here to get linkage to
# work.
Expand Down
26 changes: 17 additions & 9 deletions src/librustc/session/mod.rs
Expand Up @@ -46,8 +46,6 @@ use std::path::PathBuf;
use std::time::Duration;
use std::sync::{Arc, mpsc};

use parking_lot::Mutex as PlMutex;

mod code_stats;
pub mod config;
pub mod filesearch;
Expand Down Expand Up @@ -130,7 +128,7 @@ pub struct Session {
pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,

/// Used by -Z self-profile
pub self_profiling: Option<Arc<PlMutex<SelfProfiler>>>,
pub self_profiling: Option<Arc<SelfProfiler>>,

/// Some measurements that are being gathered during compilation.
pub perf_stats: PerfStats,
Expand Down Expand Up @@ -838,19 +836,17 @@ impl Session {

#[inline(never)]
#[cold]
fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
match &self.self_profiling {
None => bug!("profiler_active() called but there was no profiler active"),
Some(profiler) => {
let mut p = profiler.lock();

f(&mut p);
f(&profiler);
}
}
}

#[inline(always)]
pub fn profiler<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
pub fn profiler<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
if unlikely!(self.self_profiling.is_some()) {
self.profiler_active(f)
}
Expand Down Expand Up @@ -1138,7 +1134,19 @@ fn build_session_(
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
) -> Session {
let self_profiler =
if sopts.debugging_opts.self_profile { Some(Arc::new(PlMutex::new(SelfProfiler::new()))) }
if sopts.debugging_opts.self_profile {
let profiler = SelfProfiler::new();
match profiler {
Ok(profiler) => {
crate::ty::query::QueryName::register_with_profiler(&profiler);
Some(Arc::new(profiler))
},
Err(e) => {
early_warn(sopts.error_format, &format!("failed to create profiler: {}", e));
None
}
}
}
else { None };

let host_triple = TargetTriple::from_triple(config::host_triple());
Expand Down
4 changes: 2 additions & 2 deletions src/librustc/ty/query/config.rs
Expand Up @@ -3,7 +3,7 @@ use crate::dep_graph::DepNode;
use crate::hir::def_id::{CrateNum, DefId};
use crate::ty::TyCtxt;
use crate::ty::query::queries;
use crate::ty::query::Query;
use crate::ty::query::{Query, QueryName};
use crate::ty::query::QueryCache;
use crate::ty::query::plumbing::CycleError;
use crate::util::profiling::ProfileCategory;
Expand All @@ -18,7 +18,7 @@ use crate::ich::StableHashingContext;
// Query configuration and description traits.

pub trait QueryConfig<'tcx> {
const NAME: &'static str;
const NAME: QueryName;
const CATEGORY: ProfileCategory;

type Key: Eq + Hash + Clone + Debug;
Expand Down
1 change: 0 additions & 1 deletion src/librustc/ty/query/mod.rs
Expand Up @@ -41,7 +41,6 @@ use crate::ty::subst::SubstsRef;
use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet};
use crate::util::common::{ErrorReported};
use crate::util::profiling::ProfileCategory::*;
use crate::session::Session;

use rustc_data_structures::svh::Svh;
use rustc_data_structures::bit_set::BitSet;
Expand Down
67 changes: 40 additions & 27 deletions src/librustc/ty/query/plumbing.rs
Expand Up @@ -114,7 +114,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
let mut lock = cache.borrow_mut();
if let Some(value) = lock.results.get(key) {
profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
tcx.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
tcx.sess.profiler(|p| p.record_query_hit(Q::NAME));
let result = (value.value.clone(), value.index);
#[cfg(debug_assertions)]
{
Expand All @@ -130,7 +130,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
//in another thread has completed. Record how long we wait in the
//self-profiler
#[cfg(parallel_compiler)]
tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME, Q::CATEGORY));
tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME));

job.clone()
},
Expand Down Expand Up @@ -172,7 +172,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
#[cfg(parallel_compiler)]
{
let result = job.r#await(tcx, span);
tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME, Q::CATEGORY));
tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME));

if let Err(cycle) = result {
return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle));
Expand Down Expand Up @@ -358,14 +358,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
key: Q::Key)
-> Q::Value {
debug!("ty::query::get_query<{}>(key={:?}, span={:?})",
Q::NAME,
Q::NAME.as_str(),
key,
span);

profq_msg!(self,
ProfileQueriesMsg::QueryBegin(
span.data(),
profq_query_msg!(Q::NAME, self, key),
profq_query_msg!(Q::NAME.as_str(), self, key),
)
);

Expand All @@ -389,7 +389,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {

if dep_node.kind.is_anon() {
profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.start_query(Q::NAME));

let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| {
Expand All @@ -399,7 +399,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
})
});

self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.end_query(Q::NAME));
profq_msg!(self, ProfileQueriesMsg::ProviderEnd);

self.dep_graph.read_index(dep_node_index);
Expand Down Expand Up @@ -474,22 +474,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {

let result = if let Some(result) = result {
profq_msg!(self, ProfileQueriesMsg::CacheHit);
self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.record_query_hit(Q::NAME));

result
} else {
// We could not load a result from the on-disk cache, so
// recompute.

self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.start_query(Q::NAME));

// The dep-graph for this computation is already in
// place
let result = self.dep_graph.with_ignore(|| {
Q::compute(self, key)
});

self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.end_query(Q::NAME));
result
};

Expand Down Expand Up @@ -552,7 +552,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
key, dep_node);

profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.start_query(Q::NAME));

let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| {
Expand All @@ -572,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
})
});

self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.end_query(Q::NAME));
profq_msg!(self, ProfileQueriesMsg::ProviderEnd);

if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
Expand Down Expand Up @@ -619,7 +619,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
let _ = self.get_query::<Q>(DUMMY_SP, key);
} else {
profq_msg!(self, ProfileQueriesMsg::CacheHit);
self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
self.sess.profiler(|p| p.record_query_hit(Q::NAME));
}
}

Expand All @@ -632,7 +632,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
) {
profq_msg!(
self,
ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key))
ProfileQueriesMsg::QueryBegin(span.data(),
profq_query_msg!(Q::NAME.as_str(), self, key))
);

// We may be concurrently trying both execute and force a query
Expand Down Expand Up @@ -725,18 +726,6 @@ macro_rules! define_queries_inner {
}
}

pub fn record_computed_queries(&self, sess: &Session) {
sess.profiler(|p| {
$(
p.record_computed_queries(
<queries::$name<'_> as QueryConfig<'_>>::NAME,
<queries::$name<'_> as QueryConfig<'_>>::CATEGORY,
self.$name.lock().results.len()
);
)*
});
}

#[cfg(parallel_compiler)]
pub fn collect_active_jobs(&self) -> Vec<Lrc<QueryJob<$tcx>>> {
let mut jobs = Vec::new();
Expand Down Expand Up @@ -854,6 +843,24 @@ macro_rules! define_queries_inner {
}
}

#[allow(nonstandard_style)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum QueryName {
$($name),*
}

impl QueryName {
pub fn register_with_profiler(profiler: &crate::util::profiling::SelfProfiler) {
$(profiler.register_query_name(QueryName::$name);)*
}

pub fn as_str(&self) -> &'static str {
match self {
$(QueryName::$name => stringify!($name),)*
}
}
}

#[allow(nonstandard_style)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Query<$tcx> {
Expand Down Expand Up @@ -894,6 +901,12 @@ macro_rules! define_queries_inner {
$(Query::$name(key) => key.default_span(tcx),)*
}
}

pub fn query_name(&self) -> QueryName {
match self {
$(Query::$name(_) => QueryName::$name,)*
}
}
}

impl<'a, $tcx> HashStable<StableHashingContext<'a>> for Query<$tcx> {
Expand Down Expand Up @@ -930,7 +943,7 @@ macro_rules! define_queries_inner {
type Key = $K;
type Value = $V;

const NAME: &'static str = stringify!($name);
const NAME: QueryName = QueryName::$name;
const CATEGORY: ProfileCategory = $category;
}

Expand Down

0 comments on commit 56e434d

Please sign in to comment.