From bc682132ef1f4fd65f23e34209d05f99c9c014c7 Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Mon, 10 Mar 2025 15:22:25 +0000 Subject: [PATCH 1/7] Add `compiler_target` column, defaulting to `x86_64-unknown-linux-gnu` --- collector/src/bin/collector.rs | 9 ++++++ .../src/compile/benchmark/compiler_target.rs | 8 +++++ collector/src/compile/benchmark/mod.rs | 30 ++++++++++++------- collector/src/compile/execute/bencher.rs | 5 +++- collector/src/compile/execute/mod.rs | 8 ++++- database/schema.md | 7 +++-- database/src/bin/import-sqlite.rs | 3 +- database/src/lib.rs | 12 ++++---- database/src/pool.rs | 1 + database/src/pool/postgres.rs | 22 +++++++++----- database/src/pool/sqlite.rs | 28 ++++++++++++++--- database/src/selector.rs | 13 +++++--- 12 files changed, 109 insertions(+), 37 deletions(-) create mode 100644 collector/src/compile/benchmark/compiler_target.rs diff --git a/collector/src/bin/collector.rs b/collector/src/bin/collector.rs index e9aae52c2..be17c98d9 100644 --- a/collector/src/bin/collector.rs +++ b/collector/src/bin/collector.rs @@ -35,6 +35,7 @@ use collector::artifact_stats::{ use collector::codegen::{codegen_diff, CodegenType}; use collector::compile::benchmark::category::Category; use collector::compile::benchmark::codegen_backend::CodegenBackend; +use collector::compile::benchmark::compiler_target::CompilerTarget; use collector::compile::benchmark::profile::Profile; use collector::compile::benchmark::scenario::Scenario; use collector::compile::benchmark::{ @@ -99,6 +100,7 @@ struct CompileBenchmarkConfig { iterations: Option, is_self_profile: bool, bench_rustc: bool, + compiler_targets: Vec, } struct RuntimeBenchmarkConfig { @@ -200,6 +202,7 @@ fn profile_compile( scenarios: &[Scenario], backends: &[CodegenBackend], errors: &mut BenchmarkErrors, + compiler_targets: &[CompilerTarget] ) { eprintln!("Profiling {} with {:?}", toolchain.id, profiler); if let Profiler::SelfProfile = profiler { @@ -220,6 +223,7 @@ fn profile_compile( backends, toolchain, Some(1), + compiler_targets, )); eprintln!("Finished benchmark {benchmark_id}"); @@ -910,6 +914,7 @@ fn main_result() -> anyhow::Result { iterations: Some(iterations), is_self_profile: self_profile.self_profile, bench_rustc: bench_rustc.bench_rustc, + compiler_targets: vec![CompilerTarget::default()], }; run_benchmarks(&mut rt, conn, shared, Some(config), None)?; @@ -1024,6 +1029,7 @@ fn main_result() -> anyhow::Result { iterations: runs.map(|v| v as usize), is_self_profile: self_profile.self_profile, bench_rustc: bench_rustc.bench_rustc, + compiler_targets: vec![CompilerTarget::default()], }; let runtime_suite = rt.block_on(load_runtime_benchmarks( conn.as_mut(), @@ -1136,6 +1142,7 @@ fn main_result() -> anyhow::Result { scenarios, backends, &mut errors, + &[CompilerTarget::default()], ); Ok(id) }; @@ -1734,6 +1741,7 @@ fn bench_published_artifact( iterations: Some(3), is_self_profile: false, bench_rustc: false, + compiler_targets: vec![CompilerTarget::default()], }), Some(RuntimeBenchmarkConfig::new( runtime_suite, @@ -1834,6 +1842,7 @@ fn bench_compile( &config.backends, &shared.toolchain, config.iterations, + &config.compiler_targets, ))) .with_context(|| anyhow::anyhow!("Cannot compile {}", benchmark.name)) }, diff --git a/collector/src/compile/benchmark/compiler_target.rs b/collector/src/compile/benchmark/compiler_target.rs new file mode 100644 index 000000000..13e7c1979 --- /dev/null +++ b/collector/src/compile/benchmark/compiler_target.rs @@ -0,0 +1,8 @@ +#[derive(Clone, Debug, Eq, Hash, PartialEq, serde::Deserialize)] +pub struct CompilerTarget(pub String); + +impl Default for CompilerTarget { + fn default() -> Self { + return Self("x86_64-unknown-linux-gnu".to_string()) + } +} diff --git a/collector/src/compile/benchmark/mod.rs b/collector/src/compile/benchmark/mod.rs index 1f31ef70d..a4cbc8084 100644 --- a/collector/src/compile/benchmark/mod.rs +++ b/collector/src/compile/benchmark/mod.rs @@ -3,6 +3,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::patch::Patch; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; +use crate::compile::benchmark::compiler_target::CompilerTarget; use crate::compile::execute::{CargoProcess, Processor}; use crate::toolchain::Toolchain; use crate::utils::wait_for_future; @@ -20,6 +21,7 @@ pub mod codegen_backend; pub(crate) mod patch; pub mod profile; pub mod scenario; +pub mod compiler_target; fn default_runs() -> usize { 3 @@ -180,6 +182,7 @@ impl Benchmark { cwd: &'a Path, profile: Profile, backend: CodegenBackend, + compiler_target: &'a CompilerTarget, ) -> CargoProcess<'a> { let mut cargo_args = self .config @@ -220,6 +223,7 @@ impl Benchmark { .collect(), touch_file: self.config.touch_file.clone(), jobserver: None, + compiler_target, } } @@ -232,6 +236,7 @@ impl Benchmark { backends: &[CodegenBackend], toolchain: &Toolchain, iterations: Option, + compiler_targets: &[CompilerTarget] ) -> anyhow::Result<()> { if self.config.disabled { eprintln!("Skipping {}: disabled", self.name); @@ -263,10 +268,12 @@ impl Benchmark { } eprintln!("Preparing {}", self.name); - let mut target_dirs: Vec<((CodegenBackend, Profile), TempDir)> = vec![]; + let mut target_dirs: Vec<((CodegenBackend, Profile, &CompilerTarget), TempDir)> = vec![]; for backend in backends { for profile in &profiles { - target_dirs.push(((*backend, *profile), self.make_temp_dir(&self.path)?)); + for compiler_target in compiler_targets { + target_dirs.push(((*backend, *profile, &compiler_target), self.make_temp_dir(&self.path)?)); + } } } @@ -304,12 +311,12 @@ impl Benchmark { ) .context("jobserver::new")?; let mut threads = Vec::with_capacity(target_dirs.len()); - for ((backend, profile), prep_dir) in &target_dirs { + for ((backend, profile, compiler_target), prep_dir) in &target_dirs { let server = server.clone(); let thread = s.spawn::<_, anyhow::Result<()>>(move || { wait_for_future(async move { let server = server.clone(); - self.mk_cargo_process(toolchain, prep_dir.path(), *profile, *backend) + self.mk_cargo_process(toolchain, prep_dir.path(), *profile, *backend, compiler_target) .jobserver(server) .run_rustc(false) .await?; @@ -343,12 +350,13 @@ impl Benchmark { let mut timing_dirs: Vec> = vec![]; let benchmark_start = std::time::Instant::now(); - for ((backend, profile), prep_dir) in &target_dirs { + for ((backend, profile, compiler_target), prep_dir) in &target_dirs { let backend = *backend; let profile = *profile; + let compiler_target = *compiler_target; eprintln!( - "Running {}: {:?} + {:?} + {:?}", - self.name, profile, scenarios, backend + "Running {}: {:?} + {:?} + {:?} + {:?}", + self.name, profile, scenarios, backend, compiler_target, ); // We want at least two runs for all benchmarks (since we run @@ -370,7 +378,7 @@ impl Benchmark { // A full non-incremental build. if scenarios.contains(&Scenario::Full) { - self.mk_cargo_process(toolchain, cwd, profile, backend) + self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) .processor(processor, Scenario::Full, "Full", None) .run_rustc(true) .await?; @@ -381,7 +389,7 @@ impl Benchmark { // An incremental from scratch (slowest incremental case). // This is required for any subsequent incremental builds. if scenarios.iter().any(|s| s.is_incr()) { - self.mk_cargo_process(toolchain, cwd, profile, backend) + self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) .incremental(true) .processor(processor, Scenario::IncrFull, "IncrFull", None) .run_rustc(true) @@ -390,7 +398,7 @@ impl Benchmark { // An incremental build with no changes (fastest incremental case). if scenarios.contains(&Scenario::IncrUnchanged) { - self.mk_cargo_process(toolchain, cwd, profile, backend) + self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) .incremental(true) .processor(processor, Scenario::IncrUnchanged, "IncrUnchanged", None) .run_rustc(true) @@ -405,7 +413,7 @@ impl Benchmark { // An incremental build with some changes (realistic // incremental case). let scenario_str = format!("IncrPatched{}", i); - self.mk_cargo_process(toolchain, cwd, profile, backend) + self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) .incremental(true) .processor( processor, diff --git a/collector/src/compile/execute/bencher.rs b/collector/src/compile/execute/bencher.rs index d76759846..25d1a462c 100644 --- a/collector/src/compile/execute/bencher.rs +++ b/collector/src/compile/execute/bencher.rs @@ -1,6 +1,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; +use crate::compile::benchmark::compiler_target::CompilerTarget; use crate::compile::benchmark::BenchmarkName; use crate::compile::execute; use crate::compile::execute::{ @@ -91,6 +92,7 @@ impl<'a> BenchProcessor<'a> { scenario: database::Scenario, profile: database::Profile, backend: CodegenBackend, + compiler_target: &CompilerTarget, stats: Stats, ) { let backend = match backend { @@ -109,6 +111,7 @@ impl<'a> BenchProcessor<'a> { backend, stat, value, + &compiler_target.0, )); } @@ -199,7 +202,7 @@ impl Processor for BenchProcessor<'_> { res.0.stats.retain(|key, _| key.starts_with("size:")); } - self.insert_stats(collection, scenario, profile, data.backend, res.0) + self.insert_stats(collection, scenario, profile, data.backend, data.compiler_target, res.0) .await; Ok(Retry::No) diff --git a/collector/src/compile/execute/mod.rs b/collector/src/compile/execute/mod.rs index c43fb02d6..0e884b5b8 100644 --- a/collector/src/compile/execute/mod.rs +++ b/collector/src/compile/execute/mod.rs @@ -4,6 +4,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::patch::Patch; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; +use crate::compile::benchmark::compiler_target::CompilerTarget; use crate::compile::benchmark::BenchmarkName; use crate::toolchain::Toolchain; use crate::utils::fs::EnsureImmutableFile; @@ -22,6 +23,7 @@ use std::process::{self, Command}; use std::str; use std::sync::LazyLock; + pub mod bencher; mod etw_parser; pub mod profiler; @@ -129,6 +131,7 @@ pub struct CargoProcess<'a> { pub rustc_args: Vec, pub touch_file: Option, pub jobserver: Option, + pub compiler_target: &'a CompilerTarget, } /// Returns an optional list of Performance CPU cores, if the system has P and E cores. /// This list *should* be in a format suitable for the `taskset` command. @@ -273,12 +276,13 @@ impl<'a> CargoProcess<'a> { // really. pub async fn run_rustc(&mut self, needs_final: bool) -> anyhow::Result<()> { log::info!( - "run_rustc with incremental={}, profile={:?}, scenario={:?}, patch={:?}, backend={:?}, phase={}", + "run_rustc with incremental={}, profile={:?}, scenario={:?}, patch={:?}, backend={:?}, compiler_target={:?}, phase={}", self.incremental, self.profile, self.processor_etc.as_ref().map(|v| v.1), self.processor_etc.as_ref().and_then(|v| v.3), self.backend, + self.compiler_target, if needs_final { "benchmark" } else { "dependencies" } ); @@ -420,6 +424,7 @@ impl<'a> CargoProcess<'a> { scenario_str, patch, backend: self.backend, + compiler_target: self.compiler_target, }; match processor.process_output(&data, output).await { Ok(Retry::No) => return Ok(()), @@ -484,6 +489,7 @@ pub struct ProcessOutputData<'a> { scenario_str: &'a str, patch: Option<&'a Patch>, backend: CodegenBackend, + compiler_target: &'a CompilerTarget, } /// Trait used by `Benchmark::measure()` to provide different kinds of diff --git a/database/schema.md b/database/schema.md index a51df4b60..7395ffe7b 100644 --- a/database/schema.md +++ b/database/schema.md @@ -35,6 +35,7 @@ Here is the diagram for compile-time benchmarks: │ scenario │ │ cid │ │ │ backend │ │ value ├───┘ │ metric │ └──────────┘ + │ compiler_tar..│ └───────────────┘ ``` @@ -151,9 +152,9 @@ many times in the `pstat` table. ``` sqlite> select * from pstat_series limit 1; -id crate profile scenario backend metric ----------- ---------- ---------- ---------- ------- ------------ -1 helloworld check full llvm task-clock:u +id crate profile scenario backend metric compiler_target +---------- ---------- ---------- ---------- ------- ------------ ------------ +1 helloworld check full llvm task-clock:u x86_64-linux-unknown-gnu ``` ### pstat diff --git a/database/src/bin/import-sqlite.rs b/database/src/bin/import-sqlite.rs index cc060ab8d..58bd73d08 100644 --- a/database/src/bin/import-sqlite.rs +++ b/database/src/bin/import-sqlite.rs @@ -45,7 +45,7 @@ async fn main() { let sqlite_aid = sqlite_conn.artifact_id(&aid).await; let postgres_aid = postgres_conn.artifact_id(&aid).await; - for (&(benchmark, profile, scenario, backend, metric), id) in + for (&(benchmark, profile, scenario, backend, metric, ref compiler_target), id) in sqlite_idx.compile_statistic_descriptions() { if benchmarks.insert(benchmark) { @@ -76,6 +76,7 @@ async fn main() { backend, metric.as_str(), stat, + &compiler_target, ) .await; } diff --git a/database/src/lib.rs b/database/src/lib.rs index 21e3c2fbe..ba3f97a14 100644 --- a/database/src/lib.rs +++ b/database/src/lib.rs @@ -472,7 +472,7 @@ pub struct Index { artifacts: Indexed>, /// Id lookup of compile stat description ids /// For legacy reasons called `pstat_series` in the database, and so the name is kept here. - pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Metric)>, + pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Metric, String)>, /// Id lookup of runtime stat description ids runtime_pstat_series: Indexed<(Benchmark, Metric)>, } @@ -586,7 +586,7 @@ mod index_serde { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum DbLabel { StatisticDescription { benchmark: Benchmark, @@ -594,6 +594,7 @@ pub enum DbLabel { scenario: Scenario, backend: CodegenBackend, metric: Metric, + compiler_target: String, }, } @@ -612,9 +613,10 @@ impl Lookup for DbLabel { scenario, backend, metric, + compiler_target, } => index .pstat_series - .get(&(*benchmark, *profile, *scenario, *backend, *metric)), + .get(&(*benchmark, *profile, *scenario, *backend, *metric, compiler_target.to_string())), } } } @@ -664,7 +666,7 @@ impl Index { self.pstat_series .map .keys() - .map(|(_, _, _, _, metric)| metric) + .map(|(_, _, _, _, metric, _)| metric) .collect::>() .into_iter() .map(|s| s.to_string()) @@ -690,7 +692,7 @@ impl Index { &self, ) -> impl Iterator< Item = ( - &(Benchmark, Profile, Scenario, CodegenBackend, Metric), + &(Benchmark, Profile, Scenario, CodegenBackend, Metric, String), StatisticalDescriptionId, ), > + '_ { diff --git a/database/src/pool.rs b/database/src/pool.rs index 61439c927..6be83c035 100644 --- a/database/src/pool.rs +++ b/database/src/pool.rs @@ -47,6 +47,7 @@ pub trait Connection: Send + Sync { backend: CodegenBackend, metric: &str, value: f64, + compiler_target: &str, ); async fn record_runtime_statistic( &self, diff --git a/database/src/pool/postgres.rs b/database/src/pool/postgres.rs index 29355eb82..e9dda5f24 100644 --- a/database/src/pool/postgres.rs +++ b/database/src/pool/postgres.rs @@ -278,7 +278,12 @@ static MIGRATIONS: &[&str] = &[ alter table pstat_series drop constraint pstat_series_crate_profile_cache_statistic_key; alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, metric); "#, - r#"alter table pull_request_build add column backends text;"#, + // A pstat series shows 1 compiler_target + r#" + alter table pstat_series add compiler_target text not null default 'x86_64-unknown-linux-gnu'; + alter table pstat_series drop constraint test_case; + alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, compiler_target, metric); + "#, ]; #[async_trait::async_trait] @@ -466,8 +471,8 @@ impl PostgresConnection { .await .unwrap(), get_error: conn.prepare("select benchmark, error from error where aid = $1").await.unwrap(), - insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, metric) VALUES ($1, $2, $3, $4, $5) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), - select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and metric = $5").await.unwrap(), + insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, metric, compiler_target) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), + select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and metric = $5 and compiler_target = $6").await.unwrap(), collection_id: conn.prepare("insert into collection (perf_commit) VALUES ($1) returning id").await.unwrap(), record_duration: conn.prepare(" insert into artifact_collection_duration ( @@ -617,7 +622,7 @@ where pstat_series: self .conn() .query( - "select id, crate, profile, scenario, backend, metric from pstat_series;", + "select id, crate, profile, scenario, backend, metric, compiler_target, from pstat_series;", &[], ) .await @@ -632,6 +637,7 @@ where row.get::<_, String>(3).as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4).as_str()).unwrap(), row.get::<_, String>(5).as_str().into(), + row.get::<_, String>(6).as_str().parse().unwrap(), ), ) }) @@ -831,15 +837,17 @@ where backend: CodegenBackend, metric: &str, stat: f64, + compiler_target: &str, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); let backend = backend.to_string(); + let compiler_target = compiler_target.to_string(); let sid = self .conn() .query_opt( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric], + &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], ) .await .unwrap(); @@ -849,14 +857,14 @@ where self.conn() .query_opt( &self.statements().insert_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric], + &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], ) .await .unwrap(); self.conn() .query_one( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric], + &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], ) .await .unwrap() diff --git a/database/src/pool/sqlite.rs b/database/src/pool/sqlite.rs index 9ed540020..fb32cbd0a 100644 --- a/database/src/pool/sqlite.rs +++ b/database/src/pool/sqlite.rs @@ -385,7 +385,23 @@ static MIGRATIONS: &[Migration] = &[ alter table pstat_series_new rename to pstat_series; "#, ), - Migration::new("alter table pull_request_build add column backends text"), + // Add compiler_target as a unique constraint, defaulting to 'x86_64-unknown-linux-gnu' + Migration::without_foreign_key_constraints( + r#" + create table pstat_series_with_compiler_target( + id integer primary key not null, + crate text not null references benchmark(name) on delete cascade on update cascade, + profile text not null, + scenario text not null, + backend text not null, + metric text not null, + compiler_target text not null default 'x86_64-unknown-linux-gnu', + UNIQUE(crate, profile, scenario, backend, compiler_target, metric) + ); + insert into pstat_series_with_compiler_target select id, crate, profile, scenario, backend, metric, 'x86_64-unknown-linux-gnu' from pstat_series; + drop table pstat_series; + alter table pstat_series_with_compiler_target rename to pstat_series; + "#), ]; #[async_trait::async_trait] @@ -501,7 +517,7 @@ impl Connection for SqliteConnection { .collect(); let pstat_series = self .raw() - .prepare("select id, crate, profile, scenario, backend, metric from pstat_series;") + .prepare("select id, crate, profile, scenario, backend, metric, compiler_target from pstat_series;") .unwrap() .query_map(params![], |row| { Ok(( @@ -512,6 +528,7 @@ impl Connection for SqliteConnection { row.get::<_, String>(3)?.as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4)?.as_str()).unwrap(), row.get::<_, String>(5)?.as_str().into(), + row.get::<_, String>(6)?.as_str().into(), ), )) }) @@ -656,23 +673,26 @@ impl Connection for SqliteConnection { backend: CodegenBackend, metric: &str, value: f64, + compiler_target: &str, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); let backend = backend.to_string(); - self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, metric) VALUES (?, ?, ?, ?, ?)", params![ + self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, metric, compiler_target) VALUES (?, ?, ?, ?, ?, ?)", params![ &benchmark, &profile, &scenario, &backend, &metric, + compiler_target, ]).unwrap(); - let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and metric = ?", params![ + let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and metric = ? and compiler_target = ?", params![ &benchmark, &profile, &scenario, &backend, &metric, + compiler_target, ], |r| r.get(0)).unwrap(); self.raw_ref() .execute( diff --git a/database/src/selector.rs b/database/src/selector.rs index 6069989ef..eb8a2ec11 100644 --- a/database/src/selector.rs +++ b/database/src/selector.rs @@ -28,8 +28,7 @@ use std::{ }; use crate::{ - interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, - CodegenBackend, Connection, Index, Lookup, Profile, Scenario, + interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, CodegenBackend, Connection, Index, Lookup, Profile, Scenario }; #[derive(Debug)] @@ -193,6 +192,7 @@ pub struct CompileBenchmarkQuery { profile: Selector, backend: Selector, metric: Selector, + compiler_target: Selector, } impl CompileBenchmarkQuery { @@ -223,6 +223,7 @@ impl CompileBenchmarkQuery { scenario: Selector::All, backend: Selector::All, metric: Selector::One(metric.as_str().into()), + compiler_target: Selector::All, } } } @@ -235,6 +236,7 @@ impl Default for CompileBenchmarkQuery { profile: Selector::All, backend: Selector::All, metric: Selector::All, + compiler_target: Selector::All, } } } @@ -250,20 +252,22 @@ impl BenchmarkQuery for CompileBenchmarkQuery { ) -> Result>, String> { let mut statistic_descriptions: Vec<_> = index .compile_statistic_descriptions() - .filter(|(&(b, p, s, backend, m), _)| { + .filter(|(&(b, p, s, backend, m, ref compiler_target), _)| { self.benchmark.matches(b) && self.profile.matches(p) && self.scenario.matches(s) && self.backend.matches(backend) && self.metric.matches(m) + && self.compiler_target.matches(compiler_target.to_string()) }) - .map(|(&(benchmark, profile, scenario, backend, metric), sid)| { + .map(|(&(benchmark, profile, scenario, backend, metric, ref compiler_target), sid)| { ( CompileTestCase { benchmark, profile, scenario, backend, + compiler_target: compiler_target.to_string(), }, metric, sid, @@ -318,6 +322,7 @@ pub struct CompileTestCase { pub profile: Profile, pub scenario: Scenario, pub backend: CodegenBackend, + pub compiler_target: String, } impl TestCase for CompileTestCase {} From 7cb728f10ecd6cc841ec403d0c6d823cc090be89 Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Wed, 12 Mar 2025 14:01:33 +0000 Subject: [PATCH 2/7] feedback - renaming and enum creation --- collector/src/bin/collector.rs | 18 ++++---- .../src/compile/benchmark/compiler_target.rs | 8 ---- collector/src/compile/benchmark/mod.rs | 34 +++++++-------- collector/src/compile/benchmark/target.rs | 10 +++++ collector/src/compile/execute/bencher.rs | 12 ++++-- collector/src/compile/execute/mod.rs | 12 +++--- database/schema.md | 2 +- database/src/bin/import-sqlite.rs | 4 +- database/src/lib.rs | 41 ++++++++++++++++--- database/src/pool.rs | 4 +- database/src/pool/postgres.rs | 26 ++++++------ database/src/pool/sqlite.rs | 29 ++++++------- database/src/selector.rs | 16 ++++---- 13 files changed, 127 insertions(+), 89 deletions(-) delete mode 100644 collector/src/compile/benchmark/compiler_target.rs create mode 100644 collector/src/compile/benchmark/target.rs diff --git a/collector/src/bin/collector.rs b/collector/src/bin/collector.rs index be17c98d9..dba4c1770 100644 --- a/collector/src/bin/collector.rs +++ b/collector/src/bin/collector.rs @@ -35,7 +35,7 @@ use collector::artifact_stats::{ use collector::codegen::{codegen_diff, CodegenType}; use collector::compile::benchmark::category::Category; use collector::compile::benchmark::codegen_backend::CodegenBackend; -use collector::compile::benchmark::compiler_target::CompilerTarget; +use collector::compile::benchmark::target::Target; use collector::compile::benchmark::profile::Profile; use collector::compile::benchmark::scenario::Scenario; use collector::compile::benchmark::{ @@ -100,7 +100,7 @@ struct CompileBenchmarkConfig { iterations: Option, is_self_profile: bool, bench_rustc: bool, - compiler_targets: Vec, + targets: Vec, } struct RuntimeBenchmarkConfig { @@ -202,7 +202,7 @@ fn profile_compile( scenarios: &[Scenario], backends: &[CodegenBackend], errors: &mut BenchmarkErrors, - compiler_targets: &[CompilerTarget] + targets: &[Target] ) { eprintln!("Profiling {} with {:?}", toolchain.id, profiler); if let Profiler::SelfProfile = profiler { @@ -223,7 +223,7 @@ fn profile_compile( backends, toolchain, Some(1), - compiler_targets, + targets, )); eprintln!("Finished benchmark {benchmark_id}"); @@ -914,7 +914,7 @@ fn main_result() -> anyhow::Result { iterations: Some(iterations), is_self_profile: self_profile.self_profile, bench_rustc: bench_rustc.bench_rustc, - compiler_targets: vec![CompilerTarget::default()], + targets: vec![Target::default()], }; run_benchmarks(&mut rt, conn, shared, Some(config), None)?; @@ -1029,7 +1029,7 @@ fn main_result() -> anyhow::Result { iterations: runs.map(|v| v as usize), is_self_profile: self_profile.self_profile, bench_rustc: bench_rustc.bench_rustc, - compiler_targets: vec![CompilerTarget::default()], + targets: vec![Target::default()], }; let runtime_suite = rt.block_on(load_runtime_benchmarks( conn.as_mut(), @@ -1142,7 +1142,7 @@ fn main_result() -> anyhow::Result { scenarios, backends, &mut errors, - &[CompilerTarget::default()], + &[Target::default()], ); Ok(id) }; @@ -1741,7 +1741,7 @@ fn bench_published_artifact( iterations: Some(3), is_self_profile: false, bench_rustc: false, - compiler_targets: vec![CompilerTarget::default()], + targets: vec![Target::default()], }), Some(RuntimeBenchmarkConfig::new( runtime_suite, @@ -1842,7 +1842,7 @@ fn bench_compile( &config.backends, &shared.toolchain, config.iterations, - &config.compiler_targets, + &config.targets, ))) .with_context(|| anyhow::anyhow!("Cannot compile {}", benchmark.name)) }, diff --git a/collector/src/compile/benchmark/compiler_target.rs b/collector/src/compile/benchmark/compiler_target.rs deleted file mode 100644 index 13e7c1979..000000000 --- a/collector/src/compile/benchmark/compiler_target.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[derive(Clone, Debug, Eq, Hash, PartialEq, serde::Deserialize)] -pub struct CompilerTarget(pub String); - -impl Default for CompilerTarget { - fn default() -> Self { - return Self("x86_64-unknown-linux-gnu".to_string()) - } -} diff --git a/collector/src/compile/benchmark/mod.rs b/collector/src/compile/benchmark/mod.rs index a4cbc8084..cbd0c9f47 100644 --- a/collector/src/compile/benchmark/mod.rs +++ b/collector/src/compile/benchmark/mod.rs @@ -3,7 +3,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::patch::Patch; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; -use crate::compile::benchmark::compiler_target::CompilerTarget; +use crate::compile::benchmark::target::Target; use crate::compile::execute::{CargoProcess, Processor}; use crate::toolchain::Toolchain; use crate::utils::wait_for_future; @@ -21,7 +21,7 @@ pub mod codegen_backend; pub(crate) mod patch; pub mod profile; pub mod scenario; -pub mod compiler_target; +pub mod target; fn default_runs() -> usize { 3 @@ -182,7 +182,7 @@ impl Benchmark { cwd: &'a Path, profile: Profile, backend: CodegenBackend, - compiler_target: &'a CompilerTarget, + target: Target, ) -> CargoProcess<'a> { let mut cargo_args = self .config @@ -223,7 +223,7 @@ impl Benchmark { .collect(), touch_file: self.config.touch_file.clone(), jobserver: None, - compiler_target, + target, } } @@ -236,7 +236,7 @@ impl Benchmark { backends: &[CodegenBackend], toolchain: &Toolchain, iterations: Option, - compiler_targets: &[CompilerTarget] + targets: &[Target] ) -> anyhow::Result<()> { if self.config.disabled { eprintln!("Skipping {}: disabled", self.name); @@ -268,11 +268,11 @@ impl Benchmark { } eprintln!("Preparing {}", self.name); - let mut target_dirs: Vec<((CodegenBackend, Profile, &CompilerTarget), TempDir)> = vec![]; + let mut target_dirs: Vec<((CodegenBackend, Profile, Target), TempDir)> = vec![]; for backend in backends { for profile in &profiles { - for compiler_target in compiler_targets { - target_dirs.push(((*backend, *profile, &compiler_target), self.make_temp_dir(&self.path)?)); + for target in targets { + target_dirs.push(((*backend, *profile, *target), self.make_temp_dir(&self.path)?)); } } } @@ -311,12 +311,12 @@ impl Benchmark { ) .context("jobserver::new")?; let mut threads = Vec::with_capacity(target_dirs.len()); - for ((backend, profile, compiler_target), prep_dir) in &target_dirs { + for ((backend, profile, target), prep_dir) in &target_dirs { let server = server.clone(); let thread = s.spawn::<_, anyhow::Result<()>>(move || { wait_for_future(async move { let server = server.clone(); - self.mk_cargo_process(toolchain, prep_dir.path(), *profile, *backend, compiler_target) + self.mk_cargo_process(toolchain, prep_dir.path(), *profile, *backend, *target) .jobserver(server) .run_rustc(false) .await?; @@ -350,13 +350,13 @@ impl Benchmark { let mut timing_dirs: Vec> = vec![]; let benchmark_start = std::time::Instant::now(); - for ((backend, profile, compiler_target), prep_dir) in &target_dirs { + for ((backend, profile, target), prep_dir) in &target_dirs { let backend = *backend; let profile = *profile; - let compiler_target = *compiler_target; + let target = *target; eprintln!( "Running {}: {:?} + {:?} + {:?} + {:?}", - self.name, profile, scenarios, backend, compiler_target, + self.name, profile, scenarios, backend, target, ); // We want at least two runs for all benchmarks (since we run @@ -378,7 +378,7 @@ impl Benchmark { // A full non-incremental build. if scenarios.contains(&Scenario::Full) { - self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) + self.mk_cargo_process(toolchain, cwd, profile, backend, target) .processor(processor, Scenario::Full, "Full", None) .run_rustc(true) .await?; @@ -389,7 +389,7 @@ impl Benchmark { // An incremental from scratch (slowest incremental case). // This is required for any subsequent incremental builds. if scenarios.iter().any(|s| s.is_incr()) { - self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) + self.mk_cargo_process(toolchain, cwd, profile, backend, target) .incremental(true) .processor(processor, Scenario::IncrFull, "IncrFull", None) .run_rustc(true) @@ -398,7 +398,7 @@ impl Benchmark { // An incremental build with no changes (fastest incremental case). if scenarios.contains(&Scenario::IncrUnchanged) { - self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) + self.mk_cargo_process(toolchain, cwd, profile, backend, target) .incremental(true) .processor(processor, Scenario::IncrUnchanged, "IncrUnchanged", None) .run_rustc(true) @@ -413,7 +413,7 @@ impl Benchmark { // An incremental build with some changes (realistic // incremental case). let scenario_str = format!("IncrPatched{}", i); - self.mk_cargo_process(toolchain, cwd, profile, backend, &compiler_target) + self.mk_cargo_process(toolchain, cwd, profile, backend, target) .incremental(true) .processor( processor, diff --git a/collector/src/compile/benchmark/target.rs b/collector/src/compile/benchmark/target.rs new file mode 100644 index 000000000..36f7984d1 --- /dev/null +++ b/collector/src/compile/benchmark/target.rs @@ -0,0 +1,10 @@ +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, serde::Deserialize)] +pub enum Target { + X86_64UnknownLinuxGnu +} + + impl Default for Target { + fn default() -> Self { + Self::X86_64UnknownLinuxGnu + } +} diff --git a/collector/src/compile/execute/bencher.rs b/collector/src/compile/execute/bencher.rs index 25d1a462c..030fa0d23 100644 --- a/collector/src/compile/execute/bencher.rs +++ b/collector/src/compile/execute/bencher.rs @@ -1,7 +1,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; -use crate::compile::benchmark::compiler_target::CompilerTarget; +use crate::compile::benchmark::target::Target; use crate::compile::benchmark::BenchmarkName; use crate::compile::execute; use crate::compile::execute::{ @@ -92,7 +92,7 @@ impl<'a> BenchProcessor<'a> { scenario: database::Scenario, profile: database::Profile, backend: CodegenBackend, - compiler_target: &CompilerTarget, + target: Target, stats: Stats, ) { let backend = match backend { @@ -100,6 +100,10 @@ impl<'a> BenchProcessor<'a> { CodegenBackend::Cranelift => database::CodegenBackend::Cranelift, }; + let target = match target { + Target::X86_64UnknownLinuxGnu => database::Target::X86_64UnknownLinuxGnu + }; + let mut buf = FuturesUnordered::new(); for (stat, value) in stats.iter() { buf.push(self.conn.record_statistic( @@ -111,7 +115,7 @@ impl<'a> BenchProcessor<'a> { backend, stat, value, - &compiler_target.0, + target, )); } @@ -202,7 +206,7 @@ impl Processor for BenchProcessor<'_> { res.0.stats.retain(|key, _| key.starts_with("size:")); } - self.insert_stats(collection, scenario, profile, data.backend, data.compiler_target, res.0) + self.insert_stats(collection, scenario, profile, data.backend, data.target, res.0) .await; Ok(Retry::No) diff --git a/collector/src/compile/execute/mod.rs b/collector/src/compile/execute/mod.rs index 0e884b5b8..34aa23482 100644 --- a/collector/src/compile/execute/mod.rs +++ b/collector/src/compile/execute/mod.rs @@ -4,7 +4,7 @@ use crate::compile::benchmark::codegen_backend::CodegenBackend; use crate::compile::benchmark::patch::Patch; use crate::compile::benchmark::profile::Profile; use crate::compile::benchmark::scenario::Scenario; -use crate::compile::benchmark::compiler_target::CompilerTarget; +use crate::compile::benchmark::target::Target; use crate::compile::benchmark::BenchmarkName; use crate::toolchain::Toolchain; use crate::utils::fs::EnsureImmutableFile; @@ -131,7 +131,7 @@ pub struct CargoProcess<'a> { pub rustc_args: Vec, pub touch_file: Option, pub jobserver: Option, - pub compiler_target: &'a CompilerTarget, + pub target: Target, } /// Returns an optional list of Performance CPU cores, if the system has P and E cores. /// This list *should* be in a format suitable for the `taskset` command. @@ -276,13 +276,13 @@ impl<'a> CargoProcess<'a> { // really. pub async fn run_rustc(&mut self, needs_final: bool) -> anyhow::Result<()> { log::info!( - "run_rustc with incremental={}, profile={:?}, scenario={:?}, patch={:?}, backend={:?}, compiler_target={:?}, phase={}", + "run_rustc with incremental={}, profile={:?}, scenario={:?}, patch={:?}, backend={:?}, target={:?}, phase={}", self.incremental, self.profile, self.processor_etc.as_ref().map(|v| v.1), self.processor_etc.as_ref().and_then(|v| v.3), self.backend, - self.compiler_target, + self.target, if needs_final { "benchmark" } else { "dependencies" } ); @@ -424,7 +424,7 @@ impl<'a> CargoProcess<'a> { scenario_str, patch, backend: self.backend, - compiler_target: self.compiler_target, + target: self.target, }; match processor.process_output(&data, output).await { Ok(Retry::No) => return Ok(()), @@ -489,7 +489,7 @@ pub struct ProcessOutputData<'a> { scenario_str: &'a str, patch: Option<&'a Patch>, backend: CodegenBackend, - compiler_target: &'a CompilerTarget, + target: Target, } /// Trait used by `Benchmark::measure()` to provide different kinds of diff --git a/database/schema.md b/database/schema.md index 7395ffe7b..6ab447101 100644 --- a/database/schema.md +++ b/database/schema.md @@ -152,7 +152,7 @@ many times in the `pstat` table. ``` sqlite> select * from pstat_series limit 1; -id crate profile scenario backend metric compiler_target +id crate profile scenario backend metric target ---------- ---------- ---------- ---------- ------- ------------ ------------ 1 helloworld check full llvm task-clock:u x86_64-linux-unknown-gnu ``` diff --git a/database/src/bin/import-sqlite.rs b/database/src/bin/import-sqlite.rs index 58bd73d08..83c9f967e 100644 --- a/database/src/bin/import-sqlite.rs +++ b/database/src/bin/import-sqlite.rs @@ -45,7 +45,7 @@ async fn main() { let sqlite_aid = sqlite_conn.artifact_id(&aid).await; let postgres_aid = postgres_conn.artifact_id(&aid).await; - for (&(benchmark, profile, scenario, backend, metric, ref compiler_target), id) in + for (&(benchmark, profile, scenario, backend, metric, target), id) in sqlite_idx.compile_statistic_descriptions() { if benchmarks.insert(benchmark) { @@ -76,7 +76,7 @@ async fn main() { backend, metric.as_str(), stat, - &compiler_target, + target, ) .await; } diff --git a/database/src/lib.rs b/database/src/lib.rs index ba3f97a14..1eb77d3b6 100644 --- a/database/src/lib.rs +++ b/database/src/lib.rs @@ -339,6 +339,37 @@ impl PartialOrd for Scenario { Some(self.cmp(other)) } } +/// The codegen backend used for compilation. +#[derive( + Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize, +)] +pub enum Target { + X86_64UnknownLinuxGnu +} + +impl Target { + pub fn as_str(self) -> &'static str { + match self { + Target::X86_64UnknownLinuxGnu => "x86_64-unknown-linux-gnu", + } + } +} + +impl FromStr for Target { + type Err = String; + fn from_str(s: &str) -> Result { + Ok(match s.to_ascii_lowercase().as_str() { + "x86_64-unknown-linux-gnu" => Target::X86_64UnknownLinuxGnu, + _ => return Err(format!("{} is not a valid target", s)), + }) + } +} + +impl fmt::Display for Target { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} /// The codegen backend used for compilation. #[derive( @@ -472,7 +503,7 @@ pub struct Index { artifacts: Indexed>, /// Id lookup of compile stat description ids /// For legacy reasons called `pstat_series` in the database, and so the name is kept here. - pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Metric, String)>, + pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Metric, Target)>, /// Id lookup of runtime stat description ids runtime_pstat_series: Indexed<(Benchmark, Metric)>, } @@ -594,7 +625,7 @@ pub enum DbLabel { scenario: Scenario, backend: CodegenBackend, metric: Metric, - compiler_target: String, + target: Target, }, } @@ -613,10 +644,10 @@ impl Lookup for DbLabel { scenario, backend, metric, - compiler_target, + target, } => index .pstat_series - .get(&(*benchmark, *profile, *scenario, *backend, *metric, compiler_target.to_string())), + .get(&(*benchmark, *profile, *scenario, *backend, *metric, *target)), } } } @@ -692,7 +723,7 @@ impl Index { &self, ) -> impl Iterator< Item = ( - &(Benchmark, Profile, Scenario, CodegenBackend, Metric, String), + &(Benchmark, Profile, Scenario, CodegenBackend, Metric, Target), StatisticalDescriptionId, ), > + '_ { diff --git a/database/src/pool.rs b/database/src/pool.rs index 6be83c035..4dbef509d 100644 --- a/database/src/pool.rs +++ b/database/src/pool.rs @@ -1,4 +1,4 @@ -use crate::{ArtifactCollection, ArtifactId, ArtifactIdNumber, CodegenBackend, CompileBenchmark}; +use crate::{ArtifactCollection, ArtifactId, ArtifactIdNumber, CodegenBackend, CompileBenchmark, Target}; use crate::{CollectionId, Index, Profile, QueuedCommit, Scenario, Step}; use chrono::{DateTime, Utc}; use hashbrown::HashMap; @@ -47,7 +47,7 @@ pub trait Connection: Send + Sync { backend: CodegenBackend, metric: &str, value: f64, - compiler_target: &str, + target: Target, ); async fn record_runtime_statistic( &self, diff --git a/database/src/pool/postgres.rs b/database/src/pool/postgres.rs index e9dda5f24..d4b78ae3d 100644 --- a/database/src/pool/postgres.rs +++ b/database/src/pool/postgres.rs @@ -1,7 +1,7 @@ use crate::pool::{Connection, ConnectionManager, ManagedConnection, Transaction}; use crate::{ ArtifactCollection, ArtifactId, ArtifactIdNumber, Benchmark, CodegenBackend, CollectionId, - Commit, CommitType, CompileBenchmark, Date, Index, Profile, QueuedCommit, Scenario, + Commit, CommitType, CompileBenchmark, Date, Index, Profile, QueuedCommit, Scenario, Target, }; use anyhow::Context as _; use chrono::{DateTime, TimeZone, Utc}; @@ -278,11 +278,11 @@ static MIGRATIONS: &[&str] = &[ alter table pstat_series drop constraint pstat_series_crate_profile_cache_statistic_key; alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, metric); "#, - // A pstat series shows 1 compiler_target + // A pstat series shows 1 target r#" - alter table pstat_series add compiler_target text not null default 'x86_64-unknown-linux-gnu'; + alter table pstat_series add target text not null default 'x86_64-unknown-linux-gnu'; alter table pstat_series drop constraint test_case; - alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, compiler_target, metric); + alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, target, metric); "#, ]; @@ -471,8 +471,8 @@ impl PostgresConnection { .await .unwrap(), get_error: conn.prepare("select benchmark, error from error where aid = $1").await.unwrap(), - insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, metric, compiler_target) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), - select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and metric = $5 and compiler_target = $6").await.unwrap(), + insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, metric, target) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), + select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and metric = $5 and target = $6").await.unwrap(), collection_id: conn.prepare("insert into collection (perf_commit) VALUES ($1) returning id").await.unwrap(), record_duration: conn.prepare(" insert into artifact_collection_duration ( @@ -622,7 +622,7 @@ where pstat_series: self .conn() .query( - "select id, crate, profile, scenario, backend, metric, compiler_target, from pstat_series;", + "select id, crate, profile, scenario, backend, metric, target, from pstat_series;", &[], ) .await @@ -637,7 +637,7 @@ where row.get::<_, String>(3).as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4).as_str()).unwrap(), row.get::<_, String>(5).as_str().into(), - row.get::<_, String>(6).as_str().parse().unwrap(), + Target::from_str(row.get::<_, String>(6).as_str()).unwrap(), ), ) }) @@ -837,17 +837,17 @@ where backend: CodegenBackend, metric: &str, stat: f64, - compiler_target: &str, + target: Target, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); let backend = backend.to_string(); - let compiler_target = compiler_target.to_string(); + let target = target.to_string(); let sid = self .conn() .query_opt( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], + &[&benchmark, &profile, &scenario, &backend, &metric, &target], ) .await .unwrap(); @@ -857,14 +857,14 @@ where self.conn() .query_opt( &self.statements().insert_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], + &[&benchmark, &profile, &scenario, &backend, &metric, &target], ) .await .unwrap(); self.conn() .query_one( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &compiler_target], + &[&benchmark, &profile, &scenario, &backend, &metric, &target], ) .await .unwrap() diff --git a/database/src/pool/sqlite.rs b/database/src/pool/sqlite.rs index fb32cbd0a..4be8dceac 100644 --- a/database/src/pool/sqlite.rs +++ b/database/src/pool/sqlite.rs @@ -1,7 +1,7 @@ use crate::pool::{Connection, ConnectionManager, ManagedConnection, Transaction}; use crate::{ ArtifactCollection, ArtifactId, Benchmark, CodegenBackend, CollectionId, Commit, CommitType, - CompileBenchmark, Date, Profile, + CompileBenchmark, Date, Profile, Target, }; use crate::{ArtifactIdNumber, Index, QueuedCommit}; use chrono::{DateTime, TimeZone, Utc}; @@ -385,22 +385,22 @@ static MIGRATIONS: &[Migration] = &[ alter table pstat_series_new rename to pstat_series; "#, ), - // Add compiler_target as a unique constraint, defaulting to 'x86_64-unknown-linux-gnu' + // Add target as a unique constraint, defaulting to 'x86_64-unknown-linux-gnu' Migration::without_foreign_key_constraints( r#" - create table pstat_series_with_compiler_target( + create table pstat_series_with_target( id integer primary key not null, crate text not null references benchmark(name) on delete cascade on update cascade, profile text not null, scenario text not null, backend text not null, metric text not null, - compiler_target text not null default 'x86_64-unknown-linux-gnu', - UNIQUE(crate, profile, scenario, backend, compiler_target, metric) + target text not null default 'x86_64-unknown-linux-gnu', + UNIQUE(crate, profile, scenario, backend, target, metric) ); - insert into pstat_series_with_compiler_target select id, crate, profile, scenario, backend, metric, 'x86_64-unknown-linux-gnu' from pstat_series; + insert into pstat_series_with_target select id, crate, profile, scenario, backend, metric, 'x86_64-unknown-linux-gnu' from pstat_series; drop table pstat_series; - alter table pstat_series_with_compiler_target rename to pstat_series; + alter table pstat_series_with_target rename to pstat_series; "#), ]; @@ -517,7 +517,7 @@ impl Connection for SqliteConnection { .collect(); let pstat_series = self .raw() - .prepare("select id, crate, profile, scenario, backend, metric, compiler_target from pstat_series;") + .prepare("select id, crate, profile, scenario, backend, metric, target from pstat_series;") .unwrap() .query_map(params![], |row| { Ok(( @@ -528,7 +528,7 @@ impl Connection for SqliteConnection { row.get::<_, String>(3)?.as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4)?.as_str()).unwrap(), row.get::<_, String>(5)?.as_str().into(), - row.get::<_, String>(6)?.as_str().into(), + Target::from_str(row.get::<_, String>(6)?.as_str()).unwrap(), ), )) }) @@ -673,26 +673,27 @@ impl Connection for SqliteConnection { backend: CodegenBackend, metric: &str, value: f64, - compiler_target: &str, + target: Target, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); let backend = backend.to_string(); - self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, metric, compiler_target) VALUES (?, ?, ?, ?, ?, ?)", params![ + let target = target.to_string(); + self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, metric, target) VALUES (?, ?, ?, ?, ?, ?)", params![ &benchmark, &profile, &scenario, &backend, &metric, - compiler_target, + &target, ]).unwrap(); - let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and metric = ? and compiler_target = ?", params![ + let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and metric = ? and target = ?", params![ &benchmark, &profile, &scenario, &backend, &metric, - compiler_target, + &target, ], |r| r.get(0)).unwrap(); self.raw_ref() .execute( diff --git a/database/src/selector.rs b/database/src/selector.rs index eb8a2ec11..b8547d178 100644 --- a/database/src/selector.rs +++ b/database/src/selector.rs @@ -192,7 +192,7 @@ pub struct CompileBenchmarkQuery { profile: Selector, backend: Selector, metric: Selector, - compiler_target: Selector, + target: Selector, } impl CompileBenchmarkQuery { @@ -223,7 +223,7 @@ impl CompileBenchmarkQuery { scenario: Selector::All, backend: Selector::All, metric: Selector::One(metric.as_str().into()), - compiler_target: Selector::All, + target: Selector::All, } } } @@ -236,7 +236,7 @@ impl Default for CompileBenchmarkQuery { profile: Selector::All, backend: Selector::All, metric: Selector::All, - compiler_target: Selector::All, + target: Selector::All, } } } @@ -252,22 +252,22 @@ impl BenchmarkQuery for CompileBenchmarkQuery { ) -> Result>, String> { let mut statistic_descriptions: Vec<_> = index .compile_statistic_descriptions() - .filter(|(&(b, p, s, backend, m, ref compiler_target), _)| { + .filter(|(&(b, p, s, backend, m, ref target), _)| { self.benchmark.matches(b) && self.profile.matches(p) && self.scenario.matches(s) && self.backend.matches(backend) && self.metric.matches(m) - && self.compiler_target.matches(compiler_target.to_string()) + && self.target.matches(target.to_string()) }) - .map(|(&(benchmark, profile, scenario, backend, metric, ref compiler_target), sid)| { + .map(|(&(benchmark, profile, scenario, backend, metric, ref target), sid)| { ( CompileTestCase { benchmark, profile, scenario, backend, - compiler_target: compiler_target.to_string(), + target: target.to_string(), }, metric, sid, @@ -322,7 +322,7 @@ pub struct CompileTestCase { pub profile: Profile, pub scenario: Scenario, pub backend: CodegenBackend, - pub compiler_target: String, + pub target: String, } impl TestCase for CompileTestCase {} From cabcfbadf3932475f3b61e8dba1ab27303df6db0 Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Thu, 13 Mar 2025 07:15:17 +0000 Subject: [PATCH 3/7] pr feedback - reorder `target`, add documentation, add `target` to the sql conversion scripts --- collector/src/compile/benchmark/target.rs | 6 ++++++ collector/src/compile/execute/bencher.rs | 2 +- collector/src/compile/execute/mod.rs | 1 - database/schema.md | 2 +- database/src/bin/import-sqlite.rs | 4 ++-- database/src/bin/postgres-to-sqlite.rs | 5 +++-- database/src/bin/sqlite-to-postgres.rs | 8 +++++--- database/src/lib.rs | 16 +++++++++++----- database/src/pool.rs | 2 +- database/src/pool/postgres.rs | 17 +++++++++-------- database/src/pool/sqlite.rs | 16 ++++++++-------- database/src/selector.rs | 16 ++++++++-------- 12 files changed, 55 insertions(+), 40 deletions(-) diff --git a/collector/src/compile/benchmark/target.rs b/collector/src/compile/benchmark/target.rs index 36f7984d1..31626f392 100644 --- a/collector/src/compile/benchmark/target.rs +++ b/collector/src/compile/benchmark/target.rs @@ -1,5 +1,11 @@ +/// Target representing an Rust target tripple, for a full list of targets and +/// their support see; +/// https://doc.rust-lang.org/nightly/rustc/platform-support.html +/// +/// Presently we only support x86_64 #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, serde::Deserialize)] pub enum Target { + /// `x86_64-unknown-linux-gnu` X86_64UnknownLinuxGnu } diff --git a/collector/src/compile/execute/bencher.rs b/collector/src/compile/execute/bencher.rs index 030fa0d23..cf4663be4 100644 --- a/collector/src/compile/execute/bencher.rs +++ b/collector/src/compile/execute/bencher.rs @@ -113,9 +113,9 @@ impl<'a> BenchProcessor<'a> { profile, scenario, backend, + target, stat, value, - target, )); } diff --git a/collector/src/compile/execute/mod.rs b/collector/src/compile/execute/mod.rs index 34aa23482..004a05c49 100644 --- a/collector/src/compile/execute/mod.rs +++ b/collector/src/compile/execute/mod.rs @@ -23,7 +23,6 @@ use std::process::{self, Command}; use std::str; use std::sync::LazyLock; - pub mod bencher; mod etw_parser; pub mod profiler; diff --git a/database/schema.md b/database/schema.md index 6ab447101..9bdbd5f79 100644 --- a/database/schema.md +++ b/database/schema.md @@ -35,7 +35,7 @@ Here is the diagram for compile-time benchmarks: │ scenario │ │ cid │ │ │ backend │ │ value ├───┘ │ metric │ └──────────┘ - │ compiler_tar..│ + │ target │ └───────────────┘ ``` diff --git a/database/src/bin/import-sqlite.rs b/database/src/bin/import-sqlite.rs index 83c9f967e..5159d4dae 100644 --- a/database/src/bin/import-sqlite.rs +++ b/database/src/bin/import-sqlite.rs @@ -45,7 +45,7 @@ async fn main() { let sqlite_aid = sqlite_conn.artifact_id(&aid).await; let postgres_aid = postgres_conn.artifact_id(&aid).await; - for (&(benchmark, profile, scenario, backend, metric, target), id) in + for (&(benchmark, profile, scenario, backend, target, metric), id) in sqlite_idx.compile_statistic_descriptions() { if benchmarks.insert(benchmark) { @@ -74,9 +74,9 @@ async fn main() { profile, scenario, backend, + target, metric.as_str(), stat, - target, ) .await; } diff --git a/database/src/bin/postgres-to-sqlite.rs b/database/src/bin/postgres-to-sqlite.rs index b45095fc8..fd120e440 100644 --- a/database/src/bin/postgres-to-sqlite.rs +++ b/database/src/bin/postgres-to-sqlite.rs @@ -246,11 +246,11 @@ impl Table for PstatSeries { } fn postgres_select_statement(&self, _since_weeks_ago: Option) -> String { - "select id, crate, profile, scenario, backend, metric from ".to_string() + self.name() + "select id, crate, profile, scenario, backend, target, metric from ".to_string() + self.name() } fn sqlite_insert_statement(&self) -> &'static str { - "insert into pstat_series (id, crate, profile, scenario, backend, metric) VALUES (?, ?, ?, ?, ?, ?)" + "insert into pstat_series (id, crate, profile, scenario, backend, target, metric) VALUES (?, ?, ?, ?, ?, ?, ?)" } fn sqlite_execute_insert(&self, statement: &mut rusqlite::Statement, row: tokio_postgres::Row) { @@ -262,6 +262,7 @@ impl Table for PstatSeries { row.get::<_, &str>(3), row.get::<_, &str>(4), row.get::<_, &str>(5), + row.get::<_, &str>(6), ]) .unwrap(); } diff --git a/database/src/bin/sqlite-to-postgres.rs b/database/src/bin/sqlite-to-postgres.rs index e2f01ac37..9ac7fb974 100644 --- a/database/src/bin/sqlite-to-postgres.rs +++ b/database/src/bin/sqlite-to-postgres.rs @@ -322,6 +322,7 @@ struct PstatSeriesRow<'a> { profile: &'a str, scenario: &'a str, backend: &'a str, + target: &'a str, metric: &'a str, } @@ -331,11 +332,11 @@ impl Table for PstatSeries { } fn sqlite_attributes() -> &'static str { - "id, crate, profile, scenario, backend, metric" + "id, crate, profile, scenario, backend, target, metric" } fn postgres_attributes() -> &'static str { - "id, crate, profile, scenario, backend, metric" + "id, crate, profile, scenario, backend, target, metric" } fn postgres_generated_id_attribute() -> Option<&'static str> { @@ -350,7 +351,8 @@ impl Table for PstatSeries { profile: row.get_ref(2).unwrap().as_str().unwrap(), scenario: row.get_ref(3).unwrap().as_str().unwrap(), backend: row.get_ref(4).unwrap().as_str().unwrap(), - metric: row.get_ref(5).unwrap().as_str().unwrap(), + target: row.get_ref(5).unwrap().as_str().unwrap(), + metric: row.get_ref(6).unwrap().as_str().unwrap(), }) .unwrap(); } diff --git a/database/src/lib.rs b/database/src/lib.rs index 1eb77d3b6..28f3eab6d 100644 --- a/database/src/lib.rs +++ b/database/src/lib.rs @@ -339,11 +339,17 @@ impl PartialOrd for Scenario { Some(self.cmp(other)) } } -/// The codegen backend used for compilation. + +/// Target representing an Rust target tripple, for a full list of targets and +/// their support see; +/// https://doc.rust-lang.org/nightly/rustc/platform-support.html +/// +/// Presently we only support x86_64 #[derive( Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize, )] pub enum Target { + /// `x86_64-unknown-linux-gnu` X86_64UnknownLinuxGnu } @@ -503,7 +509,7 @@ pub struct Index { artifacts: Indexed>, /// Id lookup of compile stat description ids /// For legacy reasons called `pstat_series` in the database, and so the name is kept here. - pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Metric, Target)>, + pstat_series: Indexed<(Benchmark, Profile, Scenario, CodegenBackend, Target, Metric)>, /// Id lookup of runtime stat description ids runtime_pstat_series: Indexed<(Benchmark, Metric)>, } @@ -617,7 +623,7 @@ mod index_serde { } } -#[derive(Debug, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum DbLabel { StatisticDescription { benchmark: Benchmark, @@ -647,7 +653,7 @@ impl Lookup for DbLabel { target, } => index .pstat_series - .get(&(*benchmark, *profile, *scenario, *backend, *metric, *target)), + .get(&(*benchmark, *profile, *scenario, *backend, *target, *metric)), } } } @@ -723,7 +729,7 @@ impl Index { &self, ) -> impl Iterator< Item = ( - &(Benchmark, Profile, Scenario, CodegenBackend, Metric, Target), + &(Benchmark, Profile, Scenario, CodegenBackend, Target, Metric), StatisticalDescriptionId, ), > + '_ { diff --git a/database/src/pool.rs b/database/src/pool.rs index 4dbef509d..c1d332799 100644 --- a/database/src/pool.rs +++ b/database/src/pool.rs @@ -45,9 +45,9 @@ pub trait Connection: Send + Sync { profile: Profile, scenario: Scenario, backend: CodegenBackend, + target: Target, metric: &str, value: f64, - target: Target, ); async fn record_runtime_statistic( &self, diff --git a/database/src/pool/postgres.rs b/database/src/pool/postgres.rs index d4b78ae3d..e67a21414 100644 --- a/database/src/pool/postgres.rs +++ b/database/src/pool/postgres.rs @@ -278,6 +278,7 @@ static MIGRATIONS: &[&str] = &[ alter table pstat_series drop constraint pstat_series_crate_profile_cache_statistic_key; alter table pstat_series add constraint test_case UNIQUE(crate, profile, scenario, backend, metric); "#, + r#"alter table pull_request_build add column backends text;"#, // A pstat series shows 1 target r#" alter table pstat_series add target text not null default 'x86_64-unknown-linux-gnu'; @@ -471,8 +472,8 @@ impl PostgresConnection { .await .unwrap(), get_error: conn.prepare("select benchmark, error from error where aid = $1").await.unwrap(), - insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, metric, target) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), - select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and metric = $5 and target = $6").await.unwrap(), + insert_pstat_series: conn.prepare("insert into pstat_series (crate, profile, scenario, backend, target, metric) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id").await.unwrap(), + select_pstat_series: conn.prepare("select id from pstat_series where crate = $1 and profile = $2 and scenario = $3 and backend = $4 and target = $5 and metric = $6").await.unwrap(), collection_id: conn.prepare("insert into collection (perf_commit) VALUES ($1) returning id").await.unwrap(), record_duration: conn.prepare(" insert into artifact_collection_duration ( @@ -622,7 +623,7 @@ where pstat_series: self .conn() .query( - "select id, crate, profile, scenario, backend, metric, target, from pstat_series;", + "select id, crate, profile, scenario, backend, target, metric from pstat_series;", &[], ) .await @@ -636,8 +637,8 @@ where Profile::from_str(row.get::<_, String>(2).as_str()).unwrap(), row.get::<_, String>(3).as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4).as_str()).unwrap(), - row.get::<_, String>(5).as_str().into(), Target::from_str(row.get::<_, String>(6).as_str()).unwrap(), + row.get::<_, String>(5).as_str().into(), ), ) }) @@ -835,9 +836,9 @@ where profile: Profile, scenario: Scenario, backend: CodegenBackend, + target: Target, metric: &str, stat: f64, - target: Target, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); @@ -847,7 +848,7 @@ where .conn() .query_opt( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &target], + &[&benchmark, &profile, &scenario, &backend, &target, &metric], ) .await .unwrap(); @@ -857,14 +858,14 @@ where self.conn() .query_opt( &self.statements().insert_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &target], + &[&benchmark, &profile, &scenario, &backend, &target, &metric], ) .await .unwrap(); self.conn() .query_one( &self.statements().select_pstat_series, - &[&benchmark, &profile, &scenario, &backend, &metric, &target], + &[&benchmark, &profile, &scenario, &backend, &target, &metric], ) .await .unwrap() diff --git a/database/src/pool/sqlite.rs b/database/src/pool/sqlite.rs index 4be8dceac..6608b80c4 100644 --- a/database/src/pool/sqlite.rs +++ b/database/src/pool/sqlite.rs @@ -517,7 +517,7 @@ impl Connection for SqliteConnection { .collect(); let pstat_series = self .raw() - .prepare("select id, crate, profile, scenario, backend, metric, target from pstat_series;") + .prepare("select id, crate, profile, scenario, backend, target, metric from pstat_series;") .unwrap() .query_map(params![], |row| { Ok(( @@ -527,8 +527,8 @@ impl Connection for SqliteConnection { Profile::from_str(row.get::<_, String>(2)?.as_str()).unwrap(), row.get::<_, String>(3)?.as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4)?.as_str()).unwrap(), - row.get::<_, String>(5)?.as_str().into(), - Target::from_str(row.get::<_, String>(6)?.as_str()).unwrap(), + Target::from_str(row.get::<_, String>(5)?.as_str()).unwrap(), + row.get::<_, String>(6)?.as_str().into(), ), )) }) @@ -671,29 +671,29 @@ impl Connection for SqliteConnection { profile: Profile, scenario: crate::Scenario, backend: CodegenBackend, + target: Target, metric: &str, value: f64, - target: Target, ) { let profile = profile.to_string(); let scenario = scenario.to_string(); let backend = backend.to_string(); let target = target.to_string(); - self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, metric, target) VALUES (?, ?, ?, ?, ?, ?)", params![ + self.raw_ref().execute("insert or ignore into pstat_series (crate, profile, scenario, backend, target, metric) VALUES (?, ?, ?, ?, ?, ?)", params![ &benchmark, &profile, &scenario, &backend, - &metric, &target, + &metric, ]).unwrap(); - let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and metric = ? and target = ?", params![ + let sid: i32 = self.raw_ref().query_row("select id from pstat_series where crate = ? and profile = ? and scenario = ? and backend = ? and target = ? and metric = ?", params![ &benchmark, &profile, &scenario, &backend, - &metric, &target, + &metric, ], |r| r.get(0)).unwrap(); self.raw_ref() .execute( diff --git a/database/src/selector.rs b/database/src/selector.rs index b8547d178..2afc63fed 100644 --- a/database/src/selector.rs +++ b/database/src/selector.rs @@ -28,7 +28,7 @@ use std::{ }; use crate::{ - interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, CodegenBackend, Connection, Index, Lookup, Profile, Scenario + interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, CodegenBackend, Connection, Index, Lookup, Profile, Scenario, Target }; #[derive(Debug)] @@ -192,7 +192,7 @@ pub struct CompileBenchmarkQuery { profile: Selector, backend: Selector, metric: Selector, - target: Selector, + target: Selector, } impl CompileBenchmarkQuery { @@ -252,22 +252,22 @@ impl BenchmarkQuery for CompileBenchmarkQuery { ) -> Result>, String> { let mut statistic_descriptions: Vec<_> = index .compile_statistic_descriptions() - .filter(|(&(b, p, s, backend, m, ref target), _)| { + .filter(|(&(b, p, s, backend, target, metric), _)| { self.benchmark.matches(b) && self.profile.matches(p) && self.scenario.matches(s) && self.backend.matches(backend) - && self.metric.matches(m) - && self.target.matches(target.to_string()) + && self.target.matches(target) + && self.metric.matches(metric) }) - .map(|(&(benchmark, profile, scenario, backend, metric, ref target), sid)| { + .map(|(&(benchmark, profile, scenario, backend, target, metric), sid)| { ( CompileTestCase { benchmark, profile, scenario, backend, - target: target.to_string(), + target, }, metric, sid, @@ -322,7 +322,7 @@ pub struct CompileTestCase { pub profile: Profile, pub scenario: Scenario, pub backend: CodegenBackend, - pub target: String, + pub target: Target, } impl TestCase for CompileTestCase {} From 46848af4c229a0516252f925041fee9777311c9b Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Thu, 13 Mar 2025 12:22:51 +0000 Subject: [PATCH 4/7] fix clippy - too_many_arguments --- collector/src/compile/benchmark/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/collector/src/compile/benchmark/mod.rs b/collector/src/compile/benchmark/mod.rs index cbd0c9f47..ec0359337 100644 --- a/collector/src/compile/benchmark/mod.rs +++ b/collector/src/compile/benchmark/mod.rs @@ -228,6 +228,7 @@ impl Benchmark { } /// Run a specific benchmark under a processor + profiler combination. + #[allow(clippy::too_many_arguments)] pub async fn measure( &self, processor: &mut dyn Processor, From 2b775a8e01302ecdfac327494845b95b1f56fe6f Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Fri, 14 Mar 2025 11:47:34 +0000 Subject: [PATCH 5/7] pr fixes - spelling + metric as the last column everywhere --- collector/src/compile/benchmark/target.rs | 8 ++++---- database/schema.md | 6 +++--- database/src/lib.rs | 8 ++++---- database/src/pool/postgres.rs | 4 ++-- database/src/pool/sqlite.rs | 13 ++++++++----- 5 files changed, 21 insertions(+), 18 deletions(-) diff --git a/collector/src/compile/benchmark/target.rs b/collector/src/compile/benchmark/target.rs index 31626f392..7af2213b6 100644 --- a/collector/src/compile/benchmark/target.rs +++ b/collector/src/compile/benchmark/target.rs @@ -1,4 +1,4 @@ -/// Target representing an Rust target tripple, for a full list of targets and +/// Target representing an Rust target triple, for a full list of targets and /// their support see; /// https://doc.rust-lang.org/nightly/rustc/platform-support.html /// @@ -6,11 +6,11 @@ #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, serde::Deserialize)] pub enum Target { /// `x86_64-unknown-linux-gnu` - X86_64UnknownLinuxGnu + X86_64UnknownLinuxGnu, } - impl Default for Target { +impl Default for Target { fn default() -> Self { Self::X86_64UnknownLinuxGnu - } + } } diff --git a/database/schema.md b/database/schema.md index 9bdbd5f79..8fb9d8657 100644 --- a/database/schema.md +++ b/database/schema.md @@ -152,9 +152,9 @@ many times in the `pstat` table. ``` sqlite> select * from pstat_series limit 1; -id crate profile scenario backend metric target ----------- ---------- ---------- ---------- ------- ------------ ------------ -1 helloworld check full llvm task-clock:u x86_64-linux-unknown-gnu +id crate profile scenario backend target metric +---------- ---------- ---------- ---------- ------- ------------ ------------ +1 helloworld check full llvm x86_64-linux-unknown-gnu task-clock:u ``` ### pstat diff --git a/database/src/lib.rs b/database/src/lib.rs index 28f3eab6d..618f0178f 100644 --- a/database/src/lib.rs +++ b/database/src/lib.rs @@ -340,7 +340,7 @@ impl PartialOrd for Scenario { } } -/// Target representing an Rust target tripple, for a full list of targets and +/// Target representing an Rust target triple, for a full list of targets and /// their support see; /// https://doc.rust-lang.org/nightly/rustc/platform-support.html /// @@ -350,7 +350,7 @@ impl PartialOrd for Scenario { )] pub enum Target { /// `x86_64-unknown-linux-gnu` - X86_64UnknownLinuxGnu + X86_64UnknownLinuxGnu, } impl Target { @@ -630,8 +630,8 @@ pub enum DbLabel { profile: Profile, scenario: Scenario, backend: CodegenBackend, - metric: Metric, target: Target, + metric: Metric, }, } @@ -703,7 +703,7 @@ impl Index { self.pstat_series .map .keys() - .map(|(_, _, _, _, metric, _)| metric) + .map(|(_, _, _, _, _, metric)| metric) .collect::>() .into_iter() .map(|s| s.to_string()) diff --git a/database/src/pool/postgres.rs b/database/src/pool/postgres.rs index e67a21414..f3501e716 100644 --- a/database/src/pool/postgres.rs +++ b/database/src/pool/postgres.rs @@ -637,8 +637,8 @@ where Profile::from_str(row.get::<_, String>(2).as_str()).unwrap(), row.get::<_, String>(3).as_str().parse().unwrap(), CodegenBackend::from_str(row.get::<_, String>(4).as_str()).unwrap(), - Target::from_str(row.get::<_, String>(6).as_str()).unwrap(), - row.get::<_, String>(5).as_str().into(), + Target::from_str(row.get::<_, String>(5).as_str()).unwrap(), + row.get::<_, String>(6).as_str().into(), ), ) }) diff --git a/database/src/pool/sqlite.rs b/database/src/pool/sqlite.rs index 6608b80c4..6f9f871eb 100644 --- a/database/src/pool/sqlite.rs +++ b/database/src/pool/sqlite.rs @@ -387,21 +387,22 @@ static MIGRATIONS: &[Migration] = &[ ), // Add target as a unique constraint, defaulting to 'x86_64-unknown-linux-gnu' Migration::without_foreign_key_constraints( - r#" + r#" create table pstat_series_with_target( id integer primary key not null, crate text not null references benchmark(name) on delete cascade on update cascade, profile text not null, scenario text not null, backend text not null, - metric text not null, target text not null default 'x86_64-unknown-linux-gnu', + metric text not null, UNIQUE(crate, profile, scenario, backend, target, metric) ); - insert into pstat_series_with_target select id, crate, profile, scenario, backend, metric, 'x86_64-unknown-linux-gnu' from pstat_series; + insert into pstat_series_with_target select id, crate, profile, scenario, backend, 'x86_64-unknown-linux-gnu', metric from pstat_series; drop table pstat_series; alter table pstat_series_with_target rename to pstat_series; - "#), + "#, + ), ]; #[async_trait::async_trait] @@ -517,7 +518,9 @@ impl Connection for SqliteConnection { .collect(); let pstat_series = self .raw() - .prepare("select id, crate, profile, scenario, backend, target, metric from pstat_series;") + .prepare( + "select id, crate, profile, scenario, backend, target, metric from pstat_series;", + ) .unwrap() .query_map(params![], |row| { Ok(( From e4bb4ad4c04bf216b798c43c572398122e88dd1c Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Mon, 17 Mar 2025 10:51:47 +0000 Subject: [PATCH 6/7] Linter fix - format touched files --- collector/src/bin/collector.rs | 4 +-- collector/src/compile/benchmark/mod.rs | 21 +++++++++++----- collector/src/compile/execute/bencher.rs | 13 +++++++--- database/src/bin/postgres-to-sqlite.rs | 3 ++- database/src/pool.rs | 4 ++- database/src/selector.rs | 31 +++++++++++++----------- 6 files changed, 49 insertions(+), 27 deletions(-) diff --git a/collector/src/bin/collector.rs b/collector/src/bin/collector.rs index dba4c1770..c82929267 100644 --- a/collector/src/bin/collector.rs +++ b/collector/src/bin/collector.rs @@ -35,9 +35,9 @@ use collector::artifact_stats::{ use collector::codegen::{codegen_diff, CodegenType}; use collector::compile::benchmark::category::Category; use collector::compile::benchmark::codegen_backend::CodegenBackend; -use collector::compile::benchmark::target::Target; use collector::compile::benchmark::profile::Profile; use collector::compile::benchmark::scenario::Scenario; +use collector::compile::benchmark::target::Target; use collector::compile::benchmark::{ compile_benchmark_dir, get_compile_benchmarks, ArtifactType, Benchmark, BenchmarkName, }; @@ -202,7 +202,7 @@ fn profile_compile( scenarios: &[Scenario], backends: &[CodegenBackend], errors: &mut BenchmarkErrors, - targets: &[Target] + targets: &[Target], ) { eprintln!("Profiling {} with {:?}", toolchain.id, profiler); if let Profiler::SelfProfile = profiler { diff --git a/collector/src/compile/benchmark/mod.rs b/collector/src/compile/benchmark/mod.rs index ec0359337..7fb671dc3 100644 --- a/collector/src/compile/benchmark/mod.rs +++ b/collector/src/compile/benchmark/mod.rs @@ -237,7 +237,7 @@ impl Benchmark { backends: &[CodegenBackend], toolchain: &Toolchain, iterations: Option, - targets: &[Target] + targets: &[Target], ) -> anyhow::Result<()> { if self.config.disabled { eprintln!("Skipping {}: disabled", self.name); @@ -273,7 +273,10 @@ impl Benchmark { for backend in backends { for profile in &profiles { for target in targets { - target_dirs.push(((*backend, *profile, *target), self.make_temp_dir(&self.path)?)); + target_dirs.push(( + (*backend, *profile, *target), + self.make_temp_dir(&self.path)?, + )); } } } @@ -317,10 +320,16 @@ impl Benchmark { let thread = s.spawn::<_, anyhow::Result<()>>(move || { wait_for_future(async move { let server = server.clone(); - self.mk_cargo_process(toolchain, prep_dir.path(), *profile, *backend, *target) - .jobserver(server) - .run_rustc(false) - .await?; + self.mk_cargo_process( + toolchain, + prep_dir.path(), + *profile, + *backend, + *target, + ) + .jobserver(server) + .run_rustc(false) + .await?; Ok::<(), anyhow::Error>(()) })?; Ok(()) diff --git a/collector/src/compile/execute/bencher.rs b/collector/src/compile/execute/bencher.rs index cf4663be4..d113b9025 100644 --- a/collector/src/compile/execute/bencher.rs +++ b/collector/src/compile/execute/bencher.rs @@ -101,7 +101,7 @@ impl<'a> BenchProcessor<'a> { }; let target = match target { - Target::X86_64UnknownLinuxGnu => database::Target::X86_64UnknownLinuxGnu + Target::X86_64UnknownLinuxGnu => database::Target::X86_64UnknownLinuxGnu, }; let mut buf = FuturesUnordered::new(); @@ -206,8 +206,15 @@ impl Processor for BenchProcessor<'_> { res.0.stats.retain(|key, _| key.starts_with("size:")); } - self.insert_stats(collection, scenario, profile, data.backend, data.target, res.0) - .await; + self.insert_stats( + collection, + scenario, + profile, + data.backend, + data.target, + res.0, + ) + .await; Ok(Retry::No) } diff --git a/database/src/bin/postgres-to-sqlite.rs b/database/src/bin/postgres-to-sqlite.rs index fd120e440..c6299544e 100644 --- a/database/src/bin/postgres-to-sqlite.rs +++ b/database/src/bin/postgres-to-sqlite.rs @@ -246,7 +246,8 @@ impl Table for PstatSeries { } fn postgres_select_statement(&self, _since_weeks_ago: Option) -> String { - "select id, crate, profile, scenario, backend, target, metric from ".to_string() + self.name() + "select id, crate, profile, scenario, backend, target, metric from ".to_string() + + self.name() } fn sqlite_insert_statement(&self) -> &'static str { diff --git a/database/src/pool.rs b/database/src/pool.rs index c1d332799..d33f5b432 100644 --- a/database/src/pool.rs +++ b/database/src/pool.rs @@ -1,4 +1,6 @@ -use crate::{ArtifactCollection, ArtifactId, ArtifactIdNumber, CodegenBackend, CompileBenchmark, Target}; +use crate::{ + ArtifactCollection, ArtifactId, ArtifactIdNumber, CodegenBackend, CompileBenchmark, Target, +}; use crate::{CollectionId, Index, Profile, QueuedCommit, Scenario, Step}; use chrono::{DateTime, Utc}; use hashbrown::HashMap; diff --git a/database/src/selector.rs b/database/src/selector.rs index 2afc63fed..d4479b95c 100644 --- a/database/src/selector.rs +++ b/database/src/selector.rs @@ -28,7 +28,8 @@ use std::{ }; use crate::{ - interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, CodegenBackend, Connection, Index, Lookup, Profile, Scenario, Target + interpolate::Interpolate, metric::Metric, ArtifactId, ArtifactIdIter, Benchmark, + CodegenBackend, Connection, Index, Lookup, Profile, Scenario, Target, }; #[derive(Debug)] @@ -260,19 +261,21 @@ impl BenchmarkQuery for CompileBenchmarkQuery { && self.target.matches(target) && self.metric.matches(metric) }) - .map(|(&(benchmark, profile, scenario, backend, target, metric), sid)| { - ( - CompileTestCase { - benchmark, - profile, - scenario, - backend, - target, - }, - metric, - sid, - ) - }) + .map( + |(&(benchmark, profile, scenario, backend, target, metric), sid)| { + ( + CompileTestCase { + benchmark, + profile, + scenario, + backend, + target, + }, + metric, + sid, + ) + }, + ) .collect(); statistic_descriptions.sort_unstable(); From 0b40d8f8ef96195a72daa6345bce43c682a23dac Mon Sep 17 00:00:00 2001 From: James Barford-Evans Date: Mon, 17 Mar 2025 10:52:13 +0000 Subject: [PATCH 7/7] Fix - put back database migration --- database/src/pool/sqlite.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/database/src/pool/sqlite.rs b/database/src/pool/sqlite.rs index 6f9f871eb..27d6b46de 100644 --- a/database/src/pool/sqlite.rs +++ b/database/src/pool/sqlite.rs @@ -385,6 +385,7 @@ static MIGRATIONS: &[Migration] = &[ alter table pstat_series_new rename to pstat_series; "#, ), + Migration::new("alter table pull_request_build add column backends text"), // Add target as a unique constraint, defaulting to 'x86_64-unknown-linux-gnu' Migration::without_foreign_key_constraints( r#"