Skip to content

Commit

Permalink
Rename pipeline to tasks. (#8157)
Browse files Browse the repository at this point in the history
### Description

Renaming the `pipeline` key to `tasks` in turbo.json.

We went this key to better express what Turborepo does with the
configuration that is provided in this key. We've seen with users that
the term "pipeline" can be confusing for how `turbo` _really_ executes
the task graph.

In reality, the keys in the `pipeline` object are really the list of
_tasks_ registered to `turbo` - so let's call it that.

### Non-goals

This PR does not include update:
-  The examples in the repo
- JS packages
- `eslint-plugin-turbo`

These updates will be needed in subsequent PRs.

### Testing Instructions

Hopefully CI will do its job here - but, of course, would appreciate a
review of my changes to make sure I'm not missing anything my untrained
eye may be missing.

CLOSES TURBO-3225
  • Loading branch information
anthonyshew authored and chris-olszewski committed May 29, 2024
1 parent 1787d85 commit b8e21df
Show file tree
Hide file tree
Showing 102 changed files with 193 additions and 204 deletions.
2 changes: 1 addition & 1 deletion crates/turborepo-lib/src/commands/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -720,7 +720,7 @@ mod test {

fs::write(
turbo_json_file.as_path(),
r#"{ "globalEnv": [], "pipeline": {} }"#,
r#"{ "globalEnv": [], "tasks": {} }"#,
)
.unwrap();

Expand Down
36 changes: 18 additions & 18 deletions crates/turborepo-lib/src/engine/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,8 +389,8 @@ impl<'a> EngineBuilder<'a> {
};

let task_id_as_name = task_id.as_task_name();
if turbo_json.pipeline.contains_key(&task_id_as_name)
|| turbo_json.pipeline.contains_key(task_name)
if turbo_json.tasks.contains_key(&task_id_as_name)
|| turbo_json.tasks.contains_key(task_name)
{
Ok(true)
} else if !matches!(workspace, PackageName::Root) {
Expand Down Expand Up @@ -441,7 +441,7 @@ impl<'a> EngineBuilder<'a> {
});
}

if let Some(workspace_def) = workspace_json.pipeline.get(task_name) {
if let Some(workspace_def) = workspace_json.tasks.get(task_name) {
task_definitions.push(workspace_def.value.clone());
}
}
Expand Down Expand Up @@ -665,13 +665,13 @@ mod test {
);

a_turbo_json
.create_with_contents(r#"{"pipeline": {"build": {}}}"#)
.create_with_contents(r#"{"tasks": {"build": {}}}"#)
.unwrap();

let turbo_json = engine_builder
.load_turbo_json(&PackageName::from("a"))
.unwrap();
assert_eq!(turbo_json.pipeline.len(), 1);
assert_eq!(turbo_json.tasks.len(), 1);
}

fn turbo_json(value: serde_json::Value) -> TurboJson {
Expand Down Expand Up @@ -706,7 +706,7 @@ mod test {
(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"test": { "inputs": ["testing"] },
"build": { "inputs": ["primary"] },
"a#build": { "inputs": ["special"] },
Expand All @@ -716,7 +716,7 @@ mod test {
(
PackageName::from("b"),
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "inputs": ["outer"]},
}
})),
Expand Down Expand Up @@ -784,7 +784,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"test": { "dependsOn": ["^build", "prepare"] },
"build": { "dependsOn": ["^build", "prepare"] },
"prepare": {},
Expand Down Expand Up @@ -843,7 +843,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"test": { "dependsOn": ["^build"] },
"build": { "dependsOn": ["^build"] },
}
Expand Down Expand Up @@ -882,7 +882,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
"app1#special": { "dependsOn": ["^build"] },
}
Expand Down Expand Up @@ -919,7 +919,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
"test": { "dependsOn": ["^build"] },
"//#test": {},
Expand Down Expand Up @@ -972,7 +972,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
"libA#build": { "dependsOn": ["//#root-task"] },
"//#root-task": {},
Expand Down Expand Up @@ -1016,7 +1016,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
"libA#build": { "dependsOn": ["//#root-task"] },
}
Expand Down Expand Up @@ -1049,7 +1049,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"libA#build": { "dependsOn": ["app1#compile", "app1#test"] },
"build": { "dependsOn": ["^build"] },
"compile": {},
Expand Down Expand Up @@ -1095,7 +1095,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
"foo": {},
"libA#build": { "dependsOn": ["//#foo"] }
Expand Down Expand Up @@ -1134,7 +1134,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build", "prepare"] },
"test": { "dependsOn": ["^build", "prepare"] },
"prepare": {},
Expand Down Expand Up @@ -1183,7 +1183,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"build": { "dependsOn": ["^build"] },
}
})),
Expand Down Expand Up @@ -1221,7 +1221,7 @@ mod test {
let turbo_jsons = vec![(
PackageName::Root,
turbo_json(json!({
"pipeline": {
"tasks": {
"a#build": { },
"b#build": { "dependsOn": ["a#build"] }
}
Expand Down
4 changes: 2 additions & 2 deletions crates/turborepo-lib/src/run/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ impl RunBuilder {
task_name = task_name.into_root_task()
}

if root_turbo_json.pipeline.contains_key(&task_name) {
if root_turbo_json.tasks.contains_key(&task_name) {
filtered_pkgs.insert(PackageName::Root);
break;
}
Expand Down Expand Up @@ -435,7 +435,7 @@ impl RunBuilder {
pkg_dep_graph,
self.opts.run_opts.single_package,
)
.with_root_tasks(root_turbo_json.pipeline.keys().cloned())
.with_root_tasks(root_turbo_json.tasks.keys().cloned())
.with_turbo_jsons(Some(
Some((PackageName::Root, root_turbo_json.clone()))
.into_iter()
Expand Down
55 changes: 26 additions & 29 deletions crates/turborepo-lib/src/turbo_json/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub struct TurboJson {
pub(crate) global_dot_env: Option<Vec<RelativeUnixPathBuf>>,
pub(crate) global_env: Vec<String>,
pub(crate) global_pass_through_env: Option<Vec<String>>,
pub(crate) pipeline: Pipeline,
pub(crate) tasks: Pipeline,
}

// Iterable is required to enumerate allowed keys
Expand Down Expand Up @@ -118,10 +118,10 @@ pub struct RawTurboJson {
// .env files to consider, in order.
#[serde(skip_serializing_if = "Option::is_none")]
global_dot_env: Option<Vec<UnescapedString>>,
// Pipeline is a map of Turbo pipeline entries which define the task graph
// Tasks is a map of task entries which define the task graph
// and cache behavior on a per task or per package-task basis.
#[serde(skip_serializing_if = "Option::is_none")]
pub pipeline: Option<Pipeline>,
pub tasks: Option<Pipeline>,
// Configuration options when interfacing with the remote cache
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) remote_cache: Option<RawRemoteCacheOptions>,
Expand Down Expand Up @@ -402,7 +402,7 @@ impl RawTurboJson {
/// workspaces
pub fn prune_tasks<S: AsRef<str>>(&self, workspaces: &[S]) -> Self {
let mut this = self.clone();
if let Some(pipeline) = &mut this.pipeline {
if let Some(pipeline) = &mut this.tasks {
pipeline.0.retain(|task_name, _| {
task_name.in_workspace(ROOT_PKG_NAME)
|| workspaces
Expand Down Expand Up @@ -446,7 +446,7 @@ impl RawTurboJson {
}

Some(RawTurboJson {
pipeline: Some(pipeline),
tasks: Some(pipeline),
..RawTurboJson::default()
})
}
Expand Down Expand Up @@ -521,7 +521,7 @@ impl TryFrom<RawTurboJson> for TurboJson {
Ok(global_dot_env)
})
.transpose()?,
pipeline: raw_turbo.pipeline.unwrap_or_default(),
tasks: raw_turbo.tasks.unwrap_or_default(),
// copy these over, we don't need any changes here.
extends: raw_turbo
.extends
Expand Down Expand Up @@ -571,7 +571,7 @@ impl TurboJson {
// tasks
(true, Ok(mut turbo_from_files)) => {
let mut pipeline = Pipeline::default();
for (task_name, task_definition) in turbo_from_files.pipeline {
for (task_name, task_definition) in turbo_from_files.tasks {
if task_name.is_package_task() {
let (span, text) = task_definition.span_and_text("turbo.json");

Expand All @@ -585,7 +585,7 @@ impl TurboJson {
pipeline.insert(task_name.into_root_task(), task_definition);
}

turbo_from_files.pipeline = pipeline;
turbo_from_files.tasks = pipeline;

turbo_from_files
}
Expand All @@ -599,7 +599,7 @@ impl TurboJson {
// Explicitly set cache to Some(false) in this definition
// so we can pretend it was set on purpose. That way it
// won't get clobbered by the merge function.
turbo_json.pipeline.insert(
turbo_json.tasks.insert(
task_name,
Spanned::new(RawTaskDefinition {
cache: Some(Spanned::new(false)),
Expand All @@ -613,7 +613,7 @@ impl TurboJson {
}

fn has_task(&self, task_name: &TaskName) -> bool {
for key in self.pipeline.keys() {
for key in self.tasks.keys() {
if key == task_name || (key.task() == task_name.task() && !task_name.is_package_task())
{
return true;
Expand All @@ -634,12 +634,9 @@ impl TurboJson {
}

pub fn task(&self, task_id: &TaskId, task_name: &TaskName) -> Option<RawTaskDefinition> {
match self.pipeline.get(&task_id.as_task_name()) {
match self.tasks.get(&task_id.as_task_name()) {
Some(entry) => Some(entry.value.clone()),
None => self
.pipeline
.get(task_name)
.map(|entry| entry.value.clone()),
None => self.tasks.get(task_name).map(|entry| entry.value.clone()),
}
}

Expand All @@ -656,7 +653,7 @@ impl TurboJson {
}

pub fn has_root_tasks(&self) -> bool {
self.pipeline
self.tasks
.iter()
.any(|(task_name, _)| task_name.package() == Some(ROOT_PKG_NAME))
}
Expand All @@ -666,7 +663,7 @@ type TurboJSONValidation = fn(&TurboJson) -> Vec<Error>;

pub fn validate_no_package_task_syntax(turbo_json: &TurboJson) -> Vec<Error> {
turbo_json
.pipeline
.tasks
.iter()
.filter(|(task_name, _)| task_name.is_package_task())
.map(|(task_name, entry)| {
Expand Down Expand Up @@ -815,7 +812,7 @@ mod tests {
..PackageJson::default()
},
TurboJson {
pipeline: Pipeline([(
tasks: Pipeline([(
"//#build".into(),
Spanned::new(RawTaskDefinition {
cache: Some(Spanned::new(false)),
Expand All @@ -828,7 +825,7 @@ mod tests {
)]
#[test_case(
Some(r#"{
"pipeline": {
"tasks": {
"build": {
"cache": true
}
Expand All @@ -839,12 +836,12 @@ mod tests {
..PackageJson::default()
},
TurboJson {
pipeline: Pipeline([(
tasks: Pipeline([(
"//#build".into(),
Spanned::new(RawTaskDefinition {
cache: Some(Spanned::new(true).with_range(84..88)),
cache: Some(Spanned::new(true).with_range(81..85)),
..RawTaskDefinition::default()
}).with_range(53..106)
}).with_range(50..103)
),
(
"//#test".into(),
Expand Down Expand Up @@ -876,7 +873,7 @@ mod tests {
)?;
turbo_json.text = None;
turbo_json.path = None;
for (_, task_definition) in turbo_json.pipeline.iter_mut() {
for (_, task_definition) in turbo_json.tasks.iter_mut() {
task_definition.path = None;
task_definition.text = None;
}
Expand Down Expand Up @@ -1057,7 +1054,7 @@ mod tests {
#[test]
fn test_turbo_task_pruning() {
let json = RawTurboJson::parse_from_serde(json!({
"pipeline": {
"tasks": {
"//#top": {},
"build": {},
"a#build": {},
Expand All @@ -1067,7 +1064,7 @@ mod tests {
.unwrap();
let pruned_json = json.prune_tasks(&["a"]);
let expected: RawTurboJson = RawTurboJson::parse_from_serde(json!({
"pipeline": {
"tasks": {
"//#top": {},
"build": {},
"a#build": {},
Expand All @@ -1076,8 +1073,8 @@ mod tests {
.unwrap();
// We do this comparison manually so we don't compare the `task_name_range`
// fields, which are expected to be different
let pruned_pipeline = pruned_json.pipeline.unwrap();
let expected_pipeline = expected.pipeline.unwrap();
let pruned_pipeline = pruned_json.tasks.unwrap();
let expected_pipeline = expected.tasks.unwrap();
for (
(pruned_task_name, pruned_pipeline_entry),
(expected_task_name, expected_pipeline_entry),
Expand All @@ -1098,7 +1095,7 @@ mod tests {
#[test_case("junk", None ; "invalid value")]
fn test_parsing_output_logs_mode(output_logs: &str, expected: Option<OutputLogsMode>) {
let json: Result<RawTurboJson, _> = RawTurboJson::parse_from_serde(json!({
"pipeline": {
"tasks": {
"build": {
"outputLogs": output_logs,
}
Expand All @@ -1108,7 +1105,7 @@ mod tests {
let actual = json
.as_ref()
.ok()
.and_then(|j| j.pipeline.as_ref())
.and_then(|j| j.tasks.as_ref())
.and_then(|pipeline| pipeline.0.get(&TaskName::from("build")))
.and_then(|build| build.value.output_logs.clone())
.map(|mode| mode.into_inner());
Expand Down
Loading

0 comments on commit b8e21df

Please sign in to comment.