diff --git a/docs/automatic_linting.md b/docs/automatic_linting.md index 06be5705..d1a1e046 100644 --- a/docs/automatic_linting.md +++ b/docs/automatic_linting.md @@ -10,4 +10,4 @@ To enable automatic linting with the YAML language server, you need to add the f # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json ``` -**Alternatively**, if you prefer not to add this line to your file, you can install the [JSON Schema Store Catalog extension](https://marketplace.visualstudio.com/items?itemName=remcohaszing.schemastore). This extension will also enable automatic linting for your recipe files. \ No newline at end of file +**Alternatively**, if you prefer not to add this line to your file, you can install the [JSON Schema Store Catalog extension](https://marketplace.visualstudio.com/items?itemName=remcohaszing.schemastore). This extension will also enable automatic linting for your recipe files. diff --git a/docs/build_options.md b/docs/build_options.md index fe47072a..c56de4c1 100644 --- a/docs/build_options.md +++ b/docs/build_options.md @@ -8,25 +8,80 @@ There are some specialized build options to control various features: These are all found under the `build` key in the `recipe.yaml`. -## Always include and always copy files +## Include only certain files in the package -There are some options that control the inclusion of files in the final package. +Sometimes you may want to include only a subset of the files installed by the +build process in your package. For this, the `files` key can be used. Only _new_ +files are considered for inclusion (ie. files that were not in the host +environment beforehand). -The `always_include_files` option can be used to include files even if they are -already in the environment as part of some other host dependency. This is normally -"clobbering" and should be used with caution (since packages should not have any overlapping files). +```yaml title="recipe.yaml" +build: + # select files to be included in the package + # this can be used to remove files from the package, even if they are installed in the + # environment + files: list of globs +``` + +For example, to only include the header files in a package, you could use: + +```yaml title="recipe.yaml" +build: + files: + - include/**/*.h +``` + +Glob patterns throughout the recipe file can also use a flexible `include` / +`exclude` pair, such as: + +```yaml title="recipe.yaml" +build: + files: + include: + - include/**/*.h + exclude: + - include/**/private.h +``` + +### Glob evaluation + +Glob patterns are used throughout the build options to specify files. The +patterns are matched against the relative path of the file in the build +directory. Patterns can contain `*` to match any number of characters, `?` to +match a single character, and `**` to match any number of directories. + +For example: + +- `*.txt` matches all files ending in `.txt` +- `**/*.txt` matches all files ending in `.txt` in any directory +- `**/test_*.txt` matches all files starting with `test_` and ending in `.txt` + in any directory +- `foo/` matches all files under the `foo` directory + +The globs are always evaluted relative to the prefix directory. If you have no +`include` globs, but an `exclude` glob, then all files are included except those +that match the `exclude` glob. This is equivalent to `include: ['**']`. -The `always_copy_files` option can be used to copy files instead of linking them. -This is useful for files that might be modified inside the environment (e.g. configuration files). -Normally, files are linked from a central cache into the environment to save space – that means -that files modified in one environment will be modified in all environments. This is not always -desirable, and in that case you can use the `always_copy_files` option. +## Always include and always copy files -??? note "How `always_copy_files` works" - The `always_copy_files` option works by setting the `no_link` option in the - `info/paths.json` to `true` for the files in question. This means that the - files are copied instead of linked when the package is installed. +There are some options that control the inclusion of files in the final package. +The `always_include_files` option can be used to include files even if they are +already in the environment as part of some other host dependency. This is +normally "clobbering" and should be used with caution (since packages should not +have any overlapping files). + +The `always_copy_files` option can be used to copy files instead of linking +them. This is useful for files that might be modified inside the environment +(e.g. configuration files). Normally, files are linked from a central cache into +the environment to save space – that means that files modified in one +environment will be modified in all environments. This is not always desirable, +and in that case you can use the `always_copy_files` option. + +??? note "How `always_copy_files` works" The `always_copy_files` option works by +setting the `no_link` option in the `info/paths.json` to `true` for the +files in question. This means that the files are copied instead of linked +when the package is installed. ```yaml title="recipe.yaml" build: @@ -38,19 +93,6 @@ build: always_copy_files: list of globs ``` -!!! note "Glob patterns" - Glob patterns are used througout the build options to specify files. The - patterns are matched against the relative path of the file in the build - directory. - Patterns can contain `*` to match any number of characters, `?` to match a - single character, and `**` to match any number of directories. - - For example: - - - `*.txt` matches all files ending in `.txt` - - `**/*.txt` matches all files ending in `.txt` in any directory - - `**/test_*.txt` matches all files starting with `test_` and ending in `.txt` in any directory - ## Merge build and host environments In very rare cases you might want to merge the build and host environments to @@ -135,7 +177,6 @@ build: # used to prefer this variant less down_prioritize_variant: integer (defaults to 0, higher is less preferred) - ``` ## Dynamic linking configuration @@ -160,8 +201,8 @@ If you want to stop `rattler-build` from relocating the binaries, you can set `binary_relocation` to `false`. If you want to only relocate some binaries, you can select the relevant ones with a glob pattern. -To read more about `rpath`s and how rattler-build creates relocatable binary packages, -see the [internals](internals.md) docs. +To read more about `rpath`s and how rattler-build creates relocatable binary +packages, see the [internals](internals.md) docs. If you link against some libraries (possibly even outside of the prefix, in a system location), then you can use the `missing_dso_allowlist` to allow linking @@ -169,12 +210,12 @@ against these and suppress any warnings. This list is pre-populated with a list of known system libraries on the different operating systems. As part of the post-processing, `rattler-build` checks for overlinking and -overdepending. "Overlinking" is when a binary links against a library that is not -specified in the run requirements. This is usually a mistake because the library -would not be present in the environment when the package is installed. +overdepending. "Overlinking" is when a binary links against a library that is +not specified in the run requirements. This is usually a mistake because the +library would not be present in the environment when the package is installed. -Conversely, "overdepending" is when a library is part of the run requirements, but -is not actually used by any of the binaries/libraries in the package. +Conversely, "overdepending" is when a library is part of the run requirements, +but is not actually used by any of the binaries/libraries in the package. ```yaml title="recipe.yaml" build: diff --git a/docs/highlevel.md b/docs/highlevel.md index a2ec0f76..dae124ed 100644 --- a/docs/highlevel.md +++ b/docs/highlevel.md @@ -19,7 +19,7 @@ dependencies are. From the recipe file, `rattler-build` executes several steps: 2. **Fetch source**: - Retrieve specified source files, such as `.tar.gz` files, `git` repositories, local paths. + Retrieve specified source files, such as `.tar.gz` files, `git` repositories, local paths. Additionally, this step will apply patches that can be specified alongside the source file. 3. **Install build environments**: diff --git a/docs/tutorials/python.md b/docs/tutorials/python.md index b637eb99..da237dab 100644 --- a/docs/tutorials/python.md +++ b/docs/tutorials/python.md @@ -1,10 +1,10 @@ # Writing a Python package -Writing a Python package is fairly straightforward, especially for "Python-only" packages. +Writing a Python package is fairly straightforward, especially for "Python-only" packages. In the second example we will build a package for `numpy` which contains compiled code. ## A Python-only package -The following recipe uses the `noarch: python` setting to build a `noarch` package that can be installed on any platform without modification. +The following recipe uses the `noarch: python` setting to build a `noarch` package that can be installed on any platform without modification. This is very handy for packages that are pure Python and do not contain any compiled extensions. Additionally, `noarch: python` packages work with a range of Python versions (contrary to packages with compiled extensions that are tied to a specific Python version). @@ -72,7 +72,7 @@ rattler-build build --recipe ./ipywidgets ## A Python package with compiled extensions We will build a package for `numpy` – which contains compiled code. -Since compiled code is `python` version-specific, we will need to specify the `python` version explicitly. +Since compiled code is `python` version-specific, we will need to specify the `python` version explicitly. The best way to do this is with a "variant_config.yaml" file: ```yaml title="variant_config.yaml" diff --git a/src/linux/link.rs b/src/linux/link.rs index 17dff1f3..e23c2fc6 100644 --- a/src/linux/link.rs +++ b/src/linux/link.rs @@ -1,6 +1,5 @@ //! Relink shared objects to use an relative path prefix -use globset::GlobSet; use goblin::elf::{Dyn, Elf}; use goblin::elf64::header::ELFMAG; use goblin::strtab::Strtab; @@ -14,6 +13,7 @@ use std::io::Read; use std::path::{Path, PathBuf}; use crate::post_process::relink::{RelinkError, Relinker}; +use crate::recipe::parser::GlobVec; use crate::system_tools::{SystemTools, Tool}; use crate::utils::to_lexical_absolute; @@ -135,7 +135,7 @@ impl Relinker for SharedObject { prefix: &Path, encoded_prefix: &Path, custom_rpaths: &[String], - rpath_allowlist: Option<&GlobSet>, + rpath_allowlist: &GlobVec, system_tools: &SystemTools, ) -> Result<(), RelinkError> { if !self.has_dynamic { @@ -172,7 +172,7 @@ impl Relinker for SharedObject { let resolved = self.resolve_rpath(rpath, prefix, encoded_prefix); if resolved.starts_with(encoded_prefix) { final_rpaths.push(rpath.clone()); - } else if rpath_allowlist.map(|g| g.is_match(rpath)).unwrap_or(false) { + } else if rpath_allowlist.is_match(rpath) { tracing::info!("Rpath in allow list: {}", rpath.display()); final_rpaths.push(rpath.clone()); } @@ -197,10 +197,7 @@ impl Relinker for SharedObject { "$ORIGIN/{}", relative_path.to_string_lossy() ))); - } else if rpath_allowlist - .map(|glob| glob.is_match(rpath)) - .unwrap_or(false) - { + } else if rpath_allowlist.is_match(rpath) { tracing::info!("rpath ({:?}) for {:?} found in allowlist", rpath, self.path); final_rpaths.push(rpath.clone()); } else { @@ -402,7 +399,6 @@ fn builtin_relink(elf_path: &Path, new_rpath: &[PathBuf]) -> Result<(), RelinkEr #[cfg(test)] mod test { use super::*; - use globset::{Glob, GlobSetBuilder}; use std::{fs, path::Path}; use tempfile::tempdir_in; @@ -426,10 +422,7 @@ mod test { let binary_path = tmp_dir.join("zlink"); fs::copy(prefix.join("zlink"), &binary_path)?; - let globset = GlobSetBuilder::new() - .add(Glob::new("/usr/lib/custom**").unwrap()) - .build() - .unwrap(); + let globvec = GlobVec::from_vec(vec!["/usr/lib/custom**"], None); // default rpaths of the test binary are: // - /rattler-build_zlink/host_env_placehold/lib @@ -441,7 +434,7 @@ mod test { &prefix, encoded_prefix, &[], - Some(&globset), + &globvec, &SystemTools::default(), )?; let object = SharedObject::new(&binary_path)?; @@ -487,7 +480,7 @@ mod test { &prefix, encoded_prefix, &[String::from("lib/")], - None, + &GlobVec::default(), &SystemTools::default(), )?; let object = SharedObject::new(&binary_path)?; diff --git a/src/macos/link.rs b/src/macos/link.rs index ab9ee353..fe023ff5 100644 --- a/src/macos/link.rs +++ b/src/macos/link.rs @@ -1,5 +1,4 @@ //! Relink a dylib to use relative paths for rpaths -use globset::GlobSet; use goblin::mach::Mach; use memmap2::MmapMut; use scroll::Pread; @@ -10,6 +9,7 @@ use std::io::Read; use std::path::{Path, PathBuf}; use crate::post_process::relink::{RelinkError, Relinker}; +use crate::recipe::parser::GlobVec; use crate::system_tools::{SystemTools, Tool}; use crate::utils::to_lexical_absolute; @@ -149,7 +149,7 @@ impl Relinker for Dylib { prefix: &Path, encoded_prefix: &Path, custom_rpaths: &[String], - rpath_allowlist: Option<&GlobSet>, + rpath_allowlist: &GlobVec, system_tools: &SystemTools, ) -> Result<(), RelinkError> { let mut changes = DylibChanges::default(); @@ -177,7 +177,7 @@ impl Relinker for Dylib { let resolved = self.resolve_rpath(rpath, prefix, encoded_prefix); if resolved.starts_with(encoded_prefix) { final_rpaths.push(rpath.clone()); - } else if rpath_allowlist.map(|g| g.is_match(rpath)).unwrap_or(false) { + } else if rpath_allowlist.is_match(rpath) { tracing::info!("Rpath in allow list: {}", rpath.display()); final_rpaths.push(rpath.clone()); } @@ -203,7 +203,7 @@ impl Relinker for Dylib { final_rpaths.push(new_rpath.clone()); // changes.change_rpath.insert(rpath.clone(), new_rpath); // modified = true; - } else if rpath_allowlist.map(|g| g.is_match(rpath)).unwrap_or(false) { + } else if rpath_allowlist.is_match(rpath) { tracing::info!("Allowlisted rpath: {}", rpath.display()); final_rpaths.push(rpath.clone()); } else { @@ -533,11 +533,11 @@ mod tests { use tempfile::tempdir_in; use super::{install_name_tool, RelinkError}; - use crate::post_process::relink::Relinker; use crate::{ macos::link::{Dylib, DylibChanges}, system_tools::SystemTools, }; + use crate::{post_process::relink::Relinker, recipe::parser::GlobVec}; #[test] fn test_relink_builtin() -> Result<(), RelinkError> { @@ -663,7 +663,7 @@ mod tests { tmp_prefix, &encoded_prefix, &[], - None, + &GlobVec::default(), &SystemTools::default(), ) .unwrap(); diff --git a/src/package_test/content_test.rs b/src/package_test/content_test.rs index 2a0b2025..ac5bbe32 100644 --- a/src/package_test/content_test.rs +++ b/src/package_test/content_test.rs @@ -31,7 +31,7 @@ impl PackageContentsTest { target_platform: &Platform, ) -> Result, globset::Error> { let mut result = Vec::new(); - for include in self.include.globs() { + for include in self.include.include_globs() { let glob = if target_platform.is_windows() { format!("Library/include/{include}") } else { @@ -54,7 +54,7 @@ impl PackageContentsTest { ) -> Result, globset::Error> { let mut result = Vec::new(); - for bin in self.bin.globs() { + for bin in self.bin.include_globs() { let globset = if target_platform.is_windows() { // This is usually encoded as `PATHEXT` in the environment let path_ext = "{,.exe,.bat,.cmd,.com,.ps1}"; @@ -89,7 +89,7 @@ impl PackageContentsTest { if target_platform.is_windows() { // Windows is special because it requires both a `.dll` and a `.bin` file - for lib in self.lib.globs() { + for lib in self.lib.include_globs() { if lib.glob().ends_with(".dll") { result.push(( lib.glob().to_string(), @@ -120,7 +120,7 @@ impl PackageContentsTest { } } } else { - for lib in self.lib.globs() { + for lib in self.lib.include_globs() { let globset = if target_platform.is_osx() { if lib.glob().ends_with(".dylib") || lib.glob().ends_with(".a") { GlobSet::builder() @@ -173,7 +173,7 @@ impl PackageContentsTest { "lib/python*/site-packages" }; - for site_package in self.site_packages.globs() { + for site_package in self.site_packages.include_globs() { let mut globset = GlobSet::builder(); if site_package.glob().contains('/') { @@ -205,7 +205,7 @@ impl PackageContentsTest { pub fn files_as_globs(&self) -> Result, globset::Error> { let mut result = Vec::new(); - for file in self.files.globs() { + for file in self.files.include_globs() { let globset = GlobSet::builder().add(file.clone()).build()?; result.push((file.glob().to_string(), globset)); } @@ -362,7 +362,7 @@ mod tests { #[test] fn test_include_globs() { let package_contents = PackageContentsTest { - include: GlobVec::from_vec(vec!["foo", "bar"]), + include: GlobVec::from_vec(vec!["foo", "bar"], None), ..Default::default() }; @@ -374,7 +374,7 @@ mod tests { test_glob_matches(&globs, paths).unwrap(); let package_contents = PackageContentsTest { - include: GlobVec::from_vec(vec!["foo", "bar"]), + include: GlobVec::from_vec(vec!["foo", "bar"], None), ..Default::default() }; diff --git a/src/package_test/serialize_test.rs b/src/package_test/serialize_test.rs index 22015341..2a1b70d2 100644 --- a/src/package_test/serialize_test.rs +++ b/src/package_test/serialize_test.rs @@ -64,7 +64,7 @@ impl CommandsTest { &output.build_configuration.directories.recipe_dir, folder, ) - .with_parse_globs(globs.iter().map(AsRef::as_ref)) + .with_globvec(globs) .use_gitignore(true) .run()?; @@ -77,7 +77,7 @@ impl CommandsTest { &output.build_configuration.directories.work_dir, folder, ) - .with_parse_globs(globs.iter().map(AsRef::as_ref)) + .with_globvec(globs) .use_gitignore(true) .run()?; diff --git a/src/packaging.rs b/src/packaging.rs index c5ca37f5..42fb5103 100644 --- a/src/packaging.rs +++ b/src/packaging.rs @@ -82,8 +82,6 @@ fn copy_license_files( if output.recipe.about().license_file.is_empty() { Ok(None) } else { - let license_globs = output.recipe.about().license_file.clone(); - let licenses_folder = tmp_dir_path.join("info/licenses/"); fs::create_dir_all(&licenses_folder)?; @@ -91,7 +89,7 @@ fn copy_license_files( &output.build_configuration.directories.recipe_dir, &licenses_folder, ) - .with_parse_globs(license_globs.iter().map(AsRef::as_ref)) + .with_globvec(&output.recipe.about().license_file) .use_gitignore(false) .run()?; @@ -102,7 +100,7 @@ fn copy_license_files( &output.build_configuration.directories.work_dir, &licenses_folder, ) - .with_parse_globs(license_globs.iter().map(AsRef::as_ref)) + .with_globvec(&output.recipe.about().license_file) .use_gitignore(false) .run()?; @@ -359,6 +357,7 @@ impl Output { let files_after = Files::from_prefix( &self.build_configuration.directories.host_prefix, self.recipe.build().always_include_files(), + self.recipe.build().files(), )?; package_conda(self, tool_configuration, &files_after) diff --git a/src/packaging/file_finder.rs b/src/packaging/file_finder.rs index 25da9f07..d4f95320 100644 --- a/src/packaging/file_finder.rs +++ b/src/packaging/file_finder.rs @@ -1,6 +1,5 @@ use content_inspector::ContentType; use fs_err as fs; -use globset::GlobSet; use rattler_conda_types::PrefixRecord; use std::{ collections::{HashMap, HashSet}, @@ -10,7 +9,7 @@ use std::{ use tempfile::TempDir; use walkdir::WalkDir; -use crate::metadata::Output; +use crate::{metadata::Output, recipe::parser::GlobVec}; use super::{file_mapper, PackagingError}; @@ -65,7 +64,11 @@ impl Files { /// Find all files in the given (host) prefix and remove all previously installed files (based on the PrefixRecord /// of the conda environment). If always_include is Some, then all files matching the glob pattern will be included /// in the new_files set. - pub fn from_prefix(prefix: &Path, always_include: Option<&GlobSet>) -> Result { + pub fn from_prefix( + prefix: &Path, + always_include: &GlobVec, + files: &GlobVec, + ) -> Result { if !prefix.exists() { return Ok(Files { new_files: HashSet::new(), @@ -94,10 +97,15 @@ impl Files { let mut difference = current_files .difference(&previous_files) + // If we have an files glob, we only include files that match the glob + .filter(|f| { + files.is_empty() + || files.is_match(f.strip_prefix(prefix).expect("File should be in prefix")) + }) .cloned() .collect::>(); - if let Some(always_include) = always_include { + if !always_include.is_empty() { for file in current_files { let file_without_prefix = file.strip_prefix(prefix).expect("File should be in prefix"); diff --git a/src/packaging/metadata.rs b/src/packaging/metadata.rs index 88bcfebf..c716d046 100644 --- a/src/packaging/metadata.rs +++ b/src/packaging/metadata.rs @@ -383,10 +383,7 @@ impl Output { )?; let digest = compute_file_digest::(p)?; - let no_link = always_copy_files - .as_ref() - .map(|g| g.is_match(&relative_path)) - .unwrap_or(false); + let no_link = always_copy_files.is_match(&relative_path); paths_json.paths.push(PathsEntry { sha256: Some(digest), relative_path, diff --git a/src/post_process/checks.rs b/src/post_process/checks.rs index 111bcc80..2c551343 100644 --- a/src/post_process/checks.rs +++ b/src/post_process/checks.rs @@ -316,11 +316,7 @@ pub fn perform_linking_checks( } // Check if we allow overlinking. - if dynamic_linking - .missing_dso_allowlist() - .map(|v| v.is_match(lib)) - .unwrap_or(false) - { + if dynamic_linking.missing_dso_allowlist().is_match(lib) { tracing::info!( "{lib:?} is missing in run dependencies for {:?}, \ yet it is included in the allow list. Skipping...", diff --git a/src/post_process/python.rs b/src/post_process/python.rs index ae1d8dab..8cd03d8b 100644 --- a/src/post_process/python.rs +++ b/src/post_process/python.rs @@ -5,7 +5,6 @@ //! - Compiling `.py` files to `.pyc` files //! - Replacing the contents of `.dist-info/INSTALLER` files with "conda" use fs_err as fs; -use globset::GlobSet; use rattler::install::{get_windows_launcher, python_entry_point_template, PythonInfo}; use rattler_conda_types::Platform; use std::collections::HashSet; @@ -15,6 +14,7 @@ use std::process::Command; use crate::metadata::Output; use crate::packaging::{PackagingError, TempFiles}; +use crate::recipe::parser::GlobVec; use crate::utils::to_forward_slash_lossy; pub fn python_bin(prefix: &Path, target_platform: &Platform) -> PathBuf { @@ -31,7 +31,7 @@ pub fn compile_pyc( output: &Output, paths: &HashSet, base_path: &Path, - skip_paths: Option<&GlobSet>, + skip_paths: &GlobVec, ) -> Result, PackagingError> { let build_config = &output.build_configuration; let python_interpreter = if output.build_configuration.cross_compilation() { @@ -97,7 +97,7 @@ pub fn compile_pyc( } }); - if let Some(skip_paths) = skip_paths { + if !skip_paths.is_empty() { py_files.retain(|p| { !skip_paths.is_match( p.strip_prefix(base_path) @@ -163,12 +163,7 @@ pub fn python(temp_files: &TempFiles, output: &Output) -> Result, + rpath_allowlist: &GlobVec, system_tools: &SystemTools, ) -> Result<(), RelinkError>; } diff --git a/src/recipe/parser/about.rs b/src/recipe/parser/about.rs index 24978366..97943b21 100644 --- a/src/recipe/parser/about.rs +++ b/src/recipe/parser/about.rs @@ -19,7 +19,7 @@ use crate::{ validate_keys, }; -use super::FlattenErrors; +use super::{FlattenErrors, GlobVec}; /// About information. #[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] @@ -40,8 +40,8 @@ pub struct About { #[serde(skip_serializing_if = "Option::is_none")] pub license_family: Option, /// The license file(s) of the package. - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub license_file: Vec, + #[serde(default, skip_serializing_if = "GlobVec::is_empty")] + pub license_file: GlobVec, /// The license URL of the package. #[serde(skip_serializing_if = "Option::is_none")] pub license_url: Option, diff --git a/src/recipe/parser/build.rs b/src/recipe/parser/build.rs index b37c0731..880168da 100644 --- a/src/recipe/parser/build.rs +++ b/src/recipe/parser/build.rs @@ -1,6 +1,5 @@ use std::str::FromStr; -use globset::GlobSet; use rattler_conda_types::{package::EntryPoint, NoArchType}; use serde::{Deserialize, Serialize}; @@ -103,10 +102,15 @@ pub struct Build { /// Variant ignore and use keys #[serde(default, skip_serializing_if = "VariantKeyUsage::is_default")] pub(super) variant: VariantKeyUsage, + /// Prefix detection settings #[serde(default, skip_serializing_if = "PrefixDetection::is_default")] pub(super) prefix_detection: PrefixDetection, + /// Post-process operations for regex based replacements #[serde(default, skip_serializing_if = "Vec::is_empty")] pub(super) post_process: Vec, + /// Include files in the package + #[serde(default, skip_serializing_if = "GlobVec::is_empty")] + pub(super) files: GlobVec, } /// Post process operations for regex based replacements @@ -164,13 +168,18 @@ impl Build { } /// Get the always copy files settings. - pub fn always_copy_files(&self) -> Option<&GlobSet> { - self.always_copy_files.globset() + pub fn always_copy_files(&self) -> &GlobVec { + &self.always_copy_files } /// Get the always include files settings. - pub fn always_include_files(&self) -> Option<&GlobSet> { - self.always_include_files.globset() + pub fn always_include_files(&self) -> &GlobVec { + &self.always_include_files + } + + /// Get the include files settings. + pub fn files(&self) -> &GlobVec { + &self.files } /// Get the prefix detection settings. @@ -211,7 +220,8 @@ impl TryConvertNode for RenderedMappingNode { merge_build_and_host_envs, variant, prefix_detection, - post_process + post_process, + files } Ok(build) @@ -262,13 +272,13 @@ impl DynamicLinking { } /// Get the missing DSO allowlist. - pub fn missing_dso_allowlist(&self) -> Option<&GlobSet> { - self.missing_dso_allowlist.globset() + pub fn missing_dso_allowlist(&self) -> &GlobVec { + &self.missing_dso_allowlist } /// Get the rpath allow list. - pub fn rpath_allowlist(&self) -> Option<&GlobSet> { - self.rpath_allowlist.globset() + pub fn rpath_allowlist(&self) -> &GlobVec { + &self.rpath_allowlist } /// Get the overdepending behavior. diff --git a/src/recipe/parser/glob_vec.rs b/src/recipe/parser/glob_vec.rs index 0ffd9a5b..96779fe2 100644 --- a/src/recipe/parser/glob_vec.rs +++ b/src/recipe/parser/glob_vec.rs @@ -1,44 +1,111 @@ use std::fmt::{self, Debug, Formatter}; +use std::ops::Deref; use std::path::Path; use globset::{Glob, GlobSet}; -use serde::ser::SerializeSeq; +use serde::ser::{SerializeMap, SerializeSeq}; use serde::{Deserialize, Serialize}; use crate::_partialerror; use crate::recipe::custom_yaml::{ - HasSpan, RenderedNode, RenderedScalarNode, RenderedSequenceNode, TryConvertNode, + HasSpan, RenderedMappingNode, RenderedNode, RenderedScalarNode, RenderedSequenceNode, + TryConvertNode, }; use crate::recipe::error::{ErrorKind, PartialParsingError}; +/// Wrapper type to simplify serialization of Vec +#[derive(Debug, Clone, PartialEq, Eq, Default)] +struct InnerGlobVec(Vec); + +impl Deref for InnerGlobVec { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl InnerGlobVec { + fn globset(&self) -> Result { + let mut globset_builder = globset::GlobSetBuilder::new(); + for glob in self.iter() { + globset_builder.add(glob.clone()); + } + globset_builder.build() + } +} + +impl From> for InnerGlobVec { + fn from(vec: Vec) -> Self { + let vec = vec + .into_iter() + .map(|glob| to_glob(&glob).expect("glob parsing failed")) + .collect(); + Self(vec) + } +} + +impl From> for InnerGlobVec { + fn from(vec: Vec) -> Self { + Self(vec) + } +} + +fn to_glob(glob: &str) -> Result { + if glob.ends_with('/') && !glob.contains('*') { + // we treat folders as globs that match everything in the folder + Glob::new(&format!("{}**", glob)) + } else { + Glob::new(glob) + } +} + /// A vector of globs that is also immediately converted to a globset /// to enhance parser errors. #[derive(Default, Clone)] -pub struct GlobVec(Vec, Option); +pub struct GlobVec { + include: InnerGlobVec, + exclude: InnerGlobVec, + include_globset: GlobSet, + exclude_globset: GlobSet, +} impl PartialEq for GlobVec { fn eq(&self, other: &Self) -> bool { - self.0 == other.0 + self.include == other.include && self.exclude == other.exclude } } impl Eq for GlobVec {} -impl Serialize for GlobVec { +impl Serialize for InnerGlobVec { fn serialize(&self, serializer: S) -> Result { - let mut seq = serializer.serialize_seq(Some(self.0.len()))?; - for glob in self.0.iter() { + let mut seq = serializer.serialize_seq(Some(self.len()))?; + for glob in self.iter() { seq.serialize_element(glob.glob())?; } seq.end() } } +impl Serialize for GlobVec { + fn serialize(&self, serializer: S) -> Result { + if self.exclude.is_empty() { + self.include.serialize(serializer) + } else { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("include", &self.include)?; + map.serialize_entry("exclude", &self.exclude)?; + map.end() + } + } +} + impl Debug for GlobVec { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_list() - .entries(self.0.iter().map(|glob| glob.glob())) + .entries(self.include.iter().map(|glob| glob.glob())) .finish() } } @@ -48,113 +115,176 @@ impl<'de> Deserialize<'de> for GlobVec { where D: serde::Deserializer<'de>, { - let mut raw_globs: Vec = Vec::deserialize(deserializer)?; - let mut globs = Vec::with_capacity(raw_globs.len()); - for raw in raw_globs.drain(..) { - let glob = Glob::new(&raw).map_err(serde::de::Error::custom)?; - globs.push(glob); + #[derive(Deserialize)] + #[serde(untagged)] + enum GlobVecInput { + List(Vec), + Map { + include: Vec, + exclude: Vec, + }, } - if globs.is_empty() { - Ok(Self(globs, None)) - } else { - let mut globset_builder = globset::GlobSetBuilder::new(); - for glob in globs.iter() { - globset_builder.add(glob.clone()); - } - let globset = globset_builder.build().map_err(serde::de::Error::custom)?; + let input = GlobVecInput::deserialize(deserializer)?; + let (include, exclude) = match input { + GlobVecInput::List(list) => (list, Vec::new()), + GlobVecInput::Map { include, exclude } => (include, exclude), + }; - Ok(Self(globs, Some(globset))) - } + GlobVec::new(include.into(), exclude.into()) + .map_err(|e| serde::de::Error::custom(e.to_string())) } } impl GlobVec { + /// Create a new GlobVec from a vector of globs + fn new(include: InnerGlobVec, exclude: InnerGlobVec) -> Result { + let include_globset = include.globset()?; + let exclude_globset = exclude.globset()?; + Ok(Self { + include, + exclude, + include_globset, + exclude_globset, + }) + } + /// Returns true if the globvec is empty pub fn is_empty(&self) -> bool { - self.0.is_empty() + self.include.is_empty() && self.exclude.is_empty() } /// Returns an iterator over the globs - pub fn globs(&self) -> impl Iterator { - self.0.iter() + pub fn include_globs(&self) -> &Vec { + &self.include } - /// Returns the globset if it exists - pub fn globset(&self) -> Option<&GlobSet> { - self.1.as_ref() + /// Returns an iterator over the globs + pub fn exclude_globs(&self) -> &Vec { + &self.exclude } - /// Returns true if the path matches any of the globs + /// Returns true if the path matches any include glob and does not match any exclude glob + /// If there are no globs at all, we match nothing. + /// If there is no include glob, we match everything except the exclude globs. pub fn is_match(&self, path: &Path) -> bool { - if let Some(globset) = self.1.as_ref() { - globset.is_match(path) - } else { - false + // if both include & exclude are empty, we match nothing + if self.is_empty() { + return false; } + // if include is empty, it matches everything. Otherwise we check! + let is_match = self.include.is_empty() || self.include_globset.is_match(path); + // if exclude is empty, it matches everything. Otherwise we check! + is_match && (self.exclude.is_empty() || !self.exclude_globset.is_match(path)) } /// Only used for testing #[cfg(test)] - pub fn from_vec(vec: Vec<&str>) -> Self { - let mut glob_vec = Vec::with_capacity(vec.len()); - for glob in vec.into_iter() { - glob_vec.push(Glob::new(glob).unwrap()); - } - - if glob_vec.is_empty() { - Self(glob_vec, None) - } else { - let mut globset_builder = globset::GlobSetBuilder::new(); - for glob in glob_vec.iter() { - globset_builder.add(glob.clone()); - } - let globset = globset_builder.build().unwrap(); - - Self(glob_vec, Some(globset)) + pub fn from_vec(include: Vec<&str>, exclude: Option>) -> Self { + let include_vec: Vec = include + .into_iter() + .map(|glob| to_glob(glob).unwrap()) + .collect(); + let exclude_vec: Vec = exclude + .unwrap_or_default() + .into_iter() + .map(|glob| to_glob(glob).unwrap()) + .collect(); + + let include = InnerGlobVec(include_vec); + let globset = include.globset().unwrap(); + let exclude = InnerGlobVec(exclude_vec); + let exclude_globset = exclude.globset().unwrap(); + + Self { + include, + exclude, + include_globset: globset, + exclude_globset: exclude_globset, } } } impl TryConvertNode for RenderedNode { fn try_convert(&self, name: &str) -> Result> { - self.as_sequence() - .ok_or_else(|| { - vec![_partialerror!( - *self.span(), - ErrorKind::ExpectedSequence, - label = format!("expected a list of globs strings for '{}'", name) - )] - }) - .and_then(|s| s.try_convert(name)) + match self { + RenderedNode::Sequence(sequence) => sequence.try_convert(name), + RenderedNode::Mapping(mapping) => mapping.try_convert(name), + RenderedNode::Scalar(scalar) => scalar.try_convert(name), + _ => Err(vec![_partialerror!( + *self.span(), + ErrorKind::ExpectedSequence, + label = "expected a list of globs strings" + )]), + } + } +} + +fn to_vector_of_globs( + sequence: &RenderedSequenceNode, +) -> Result, Vec> { + let mut vec = Vec::with_capacity(sequence.len()); + for item in sequence.iter() { + let str: String = item.try_convert("globs")?; + vec.push( + to_glob(&str) + .map_err(|err| vec![_partialerror!(*item.span(), ErrorKind::GlobParsing(err),)])?, + ); + } + Ok(vec) +} + +impl TryConvertNode for RenderedScalarNode { + fn try_convert(&self, _name: &str) -> Result> { + let vec = vec![to_glob(self.as_str()) + .map_err(|err| vec![_partialerror!(*self.span(), ErrorKind::GlobParsing(err),)])?]; + GlobVec::new(vec.into(), InnerGlobVec::default()) + .map_err(|err| vec![_partialerror!(*self.span(), ErrorKind::GlobParsing(err),)]) } } impl TryConvertNode for RenderedSequenceNode { fn try_convert(&self, _name: &str) -> Result> { - let mut vec = Vec::with_capacity(self.len()); - for item in self.iter() { - let str: String = item.try_convert(_name)?; - vec.push( - Glob::new(&str).map_err(|err| { - vec![_partialerror!(*item.span(), ErrorKind::GlobParsing(err),)] - })?, - ); - } + let vec = to_vector_of_globs(self)?; + GlobVec::new(vec.into(), InnerGlobVec::default()) + .map_err(|err| vec![_partialerror!(*self.span(), ErrorKind::GlobParsing(err),)]) + } +} - if vec.is_empty() { - Ok(GlobVec(vec, None)) - } else { - let mut globset_builder = globset::GlobSetBuilder::new(); - for glob in vec.iter() { - globset_builder.add(glob.clone()); +impl TryConvertNode for RenderedMappingNode { + fn try_convert(&self, name: &str) -> Result> { + // find the `include` and `exclude` keys + let mut include = Vec::new(); + let mut exclude = Vec::new(); + + for (key, value) in self.iter() { + let key_str = key.as_str(); + match (key_str, value) { + ("include", RenderedNode::Sequence(seq)) => { + include = to_vector_of_globs(seq)?; + } + ("exclude", RenderedNode::Sequence(seq)) => { + exclude = to_vector_of_globs(seq)?; + } + ("include" | "exclude", _) => { + return Err(vec![_partialerror!( + *value.span(), + ErrorKind::ExpectedSequence, + label = "expected a list of globs strings for `include` or `exclude`" + )]); + } + _ => { + return Err(vec![_partialerror!( + *key.span(), + ErrorKind::InvalidField(key_str.to_string().into()), + help = format!("valid options for {} are `include` and `exclude`", name) + )]); + } } - let globset = globset_builder - .build() - .map_err(|err| vec![_partialerror!(*self.span(), ErrorKind::GlobParsing(err),)])?; - - Ok(GlobVec(vec, Some(globset))) } + + GlobVec::new(include.into(), exclude.into()) + .map_err(|err| vec![_partialerror!(*self.span(), ErrorKind::GlobParsing(err),)]) } } @@ -252,14 +382,69 @@ mod tests { .unwrap(); let tests_node = yaml_root.as_mapping().unwrap().get("globs").unwrap(); let globvec: GlobVec = tests_node.try_convert("globs").unwrap(); - assert_eq!(globvec.0.len(), 3); - assert_eq!(globvec.1.as_ref().unwrap().len(), 3); + assert_eq!(globvec.include.len(), 3); + assert_eq!(globvec.include_globset.len(), 3); let as_yaml = serde_yaml::to_string(&globvec).unwrap(); insta::assert_snapshot!(&as_yaml); let parsed_again: GlobVec = serde_yaml::from_str(&as_yaml).unwrap(); - assert_eq!(parsed_again.0.len(), 3); - assert_eq!(parsed_again.1.as_ref().unwrap().len(), 3); + assert_eq!(parsed_again.include.len(), 3); + assert_eq!(parsed_again.include_globset.len(), 3); + + let yaml = r#"globs: + include: ["foo/", "bar", "baz/**/qux"] + exclude: ["foo/bar", "bar/*.txt"] + "#; + + let yaml_root = RenderedNode::parse_yaml(0, yaml) + .map_err(|err| vec![err]) + .unwrap(); + let tests_node = yaml_root.as_mapping().unwrap().get("globs").unwrap(); + let globvec: GlobVec = tests_node.try_convert("globs").unwrap(); + assert_eq!(globvec.include.len(), 3); + assert_eq!(globvec.include_globset.len(), 3); + assert_eq!(globvec.exclude.len(), 2); + assert_eq!(globvec.exclude_globset.len(), 2); + + let as_yaml = serde_yaml::to_string(&globvec).unwrap(); + insta::assert_snapshot!(&as_yaml); + let parsed_again: GlobVec = serde_yaml::from_str(&as_yaml).unwrap(); + assert_eq!(parsed_again.include.len(), 3); + assert_eq!(parsed_again.include_globset.len(), 3); + assert_eq!(parsed_again.exclude.len(), 2); + assert_eq!(parsed_again.exclude_globset.len(), 2); + } + + #[test] + fn test_glob_match_folder() { + let globvec = GlobVec::from_vec(vec!["foo/"], None); + assert!(globvec.is_match(Path::new("foo/bar"))); + assert!(globvec.is_match(Path::new("foo/bla"))); + assert!(globvec.is_match(Path::new("foo/bla/bar"))); + assert!(!globvec.is_match(Path::new("bar"))); + assert!(!globvec.is_match(Path::new("bla"))); + } + + #[test] + fn test_glob_match_all_except() { + let globvec = GlobVec::from_vec(vec!["**"], Some(vec!["*.txt"])); + assert!(!globvec.is_match(Path::new("foo/bar.txt"))); + assert!(globvec.is_match(Path::new("foo/bla"))); + assert!(globvec.is_match(Path::new("foo/bla/bar"))); + assert!(!globvec.is_match(Path::new("bar.txt"))); + assert!(globvec.is_match(Path::new("bla"))); + + // empty include should be the same + let globvec = GlobVec::from_vec(vec![], Some(vec!["*.txt"])); + assert!(!globvec.is_match(Path::new("foo/bar.txt"))); + assert!(globvec.is_match(Path::new("foo/bla"))); + assert!(globvec.is_match(Path::new("foo/bla/bar"))); + assert!(!globvec.is_match(Path::new("bar.txt"))); + assert!(globvec.is_match(Path::new("bla"))); + + // empty everything should match nothing + let globvec = GlobVec::from_vec(vec![], None); + assert!(!globvec.is_match(Path::new("foo/bar.txt"))); } #[test] diff --git a/src/recipe/parser/snapshots/rattler_build__recipe__parser__glob_vec__tests__parsing_globvec-2.snap b/src/recipe/parser/snapshots/rattler_build__recipe__parser__glob_vec__tests__parsing_globvec-2.snap new file mode 100644 index 00000000..fb247828 --- /dev/null +++ b/src/recipe/parser/snapshots/rattler_build__recipe__parser__glob_vec__tests__parsing_globvec-2.snap @@ -0,0 +1,11 @@ +--- +source: src/recipe/parser/glob_vec.rs +expression: "&as_yaml" +--- +include: +- foo/** +- bar +- baz/**/qux +exclude: +- foo/bar +- bar/*.txt diff --git a/src/recipe/parser/test.rs b/src/recipe/parser/test.rs index 2687d2a8..5590616e 100644 --- a/src/recipe/parser/test.rs +++ b/src/recipe/parser/test.rs @@ -30,14 +30,13 @@ pub struct CommandsTestRequirements { /// The files that should be copied to the test directory (they are stored in the package) #[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)] pub struct CommandsTestFiles { - // TODO parse as globs /// Files to be copied from the source directory to the test directory. - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub source: Vec, + #[serde(default, skip_serializing_if = "GlobVec::is_empty")] + pub source: GlobVec, /// Files to be copied from the recipe directory to the test directory. - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub recipe: Vec, + #[serde(default, skip_serializing_if = "GlobVec::is_empty")] + pub recipe: GlobVec, } /// A test that executes a script in a freshly created environment diff --git a/src/recipe/snapshots/rattler_build__recipe__parser__tests__complete_recipe.snap b/src/recipe/snapshots/rattler_build__recipe__parser__tests__complete_recipe.snap index 0871d3cf..b15156c1 100644 --- a/src/recipe/snapshots/rattler_build__recipe__parser__tests__complete_recipe.snap +++ b/src/recipe/snapshots/rattler_build__recipe__parser__tests__complete_recipe.snap @@ -79,9 +79,19 @@ build: post_process: - files: - '*.cmake' - - bla/ + - bla/** regex: complicated(.*)regex replacement: simple_string + files: + include: + - include/* + - '**/foo.txt' + - x.txt + - out/** + exclude: + - foo/* + - bar.txt + - baz/** requirements: build: - gcc_linux-64 @@ -109,6 +119,42 @@ requirements: - python from_package: - python +tests: +- test_type: command + script: + - echo FOO + requirements: + run: + - bash + build: + - bash + files: + source: + - include/** + - src/*.c + - src_file.txt +- test_type: package_contents + files: + - test.txt + - license.txt + site_packages: + - numpy + - numpy/foo/__init__.py + bin: + - test + - test.exe + - foo* + lib: + - test + - test.so.* + include: + - test.h + - test.h* +- test_type: python + imports: + - numpy + - pypy + pip_check: false about: homepage: https://cool-website.com/ repository: https://github.com/some-repo/some-package diff --git a/src/recipe/snapshots/rattler_build__recipe__parser__tests__recipe_windows.snap b/src/recipe/snapshots/rattler_build__recipe__parser__tests__recipe_windows.snap index f570ef74..a3747042 100644 --- a/src/recipe/snapshots/rattler_build__recipe__parser__tests__recipe_windows.snap +++ b/src/recipe/snapshots/rattler_build__recipe__parser__tests__recipe_windows.snap @@ -144,6 +144,7 @@ Recipe { ignore_binary_files: false, }, post_process: [], + files: [], }, requirements: Requirements { build: [ diff --git a/src/recipe/snapshots/rattler_build__recipe__parser__tests__unix_recipe.snap b/src/recipe/snapshots/rattler_build__recipe__parser__tests__unix_recipe.snap index 0354c40b..2de961be 100644 --- a/src/recipe/snapshots/rattler_build__recipe__parser__tests__unix_recipe.snap +++ b/src/recipe/snapshots/rattler_build__recipe__parser__tests__unix_recipe.snap @@ -144,6 +144,7 @@ Recipe { ignore_binary_files: false, }, post_process: [], + files: [], }, requirements: Requirements { build: [ diff --git a/src/source/copy_dir.rs b/src/source/copy_dir.rs index 57434d1f..a253e7cc 100644 --- a/src/source/copy_dir.rs +++ b/src/source/copy_dir.rs @@ -2,14 +2,16 @@ use std::{ collections::{HashMap, HashSet}, path::{Path, PathBuf}, - sync::Arc, }; use fs_err::create_dir_all; +use globset::Glob; use ignore::WalkBuilder; use rayon::iter::{ParallelBridge, ParallelIterator}; +use crate::recipe::parser::GlobVec; + use super::SourceError; /// The copy options for the copy_dir function. @@ -46,8 +48,7 @@ impl Default for CopyOptions { pub(crate) struct CopyDir<'a> { from_path: &'a Path, to_path: &'a Path, - include_globs: Vec<&'a str>, - exclude_globs: Vec<&'a str>, + globvec: GlobVec, use_gitignore: bool, use_git_global: bool, hidden: bool, @@ -59,8 +60,7 @@ impl<'a> CopyDir<'a> { Self { from_path, to_path, - include_globs: Vec::new(), - exclude_globs: Vec::new(), + globvec: GlobVec::default(), use_gitignore: false, use_git_global: false, hidden: false, @@ -68,54 +68,8 @@ impl<'a> CopyDir<'a> { } } - /// Parse the iterator of &str as globs - /// - /// This is a conveniance helper for parsing an iterator of &str as include and exclude globs. - /// - /// # Note - /// - /// Uses '~' as negation character (exclude globs) - pub fn with_parse_globs(mut self, globs: I) -> Self - where - I: IntoIterator, - { - let (exclude_globs, include_globs): (Vec<_>, Vec<_>) = globs - .into_iter() - .partition(|g| g.trim_start().starts_with('~')); - - self.include_globs.extend(include_globs); - self.exclude_globs - .extend(exclude_globs.into_iter().map(|g| g.trim_start_matches('~'))); - self - } - - #[allow(unused)] - pub fn with_include_glob(mut self, include: &'a str) -> Self { - self.include_globs.push(include); - self - } - - #[allow(unused)] - pub fn with_include_globs(mut self, includes: I) -> Self - where - I: IntoIterator, - { - self.include_globs.extend(includes); - self - } - - #[allow(unused)] - pub fn with_exclude_glob(mut self, exclude: &'a str) -> Self { - self.exclude_globs.push(exclude); - self - } - - #[allow(unused)] - pub fn with_exclude_globs(mut self, excludes: I) -> Self - where - I: IntoIterator, - { - self.exclude_globs.extend(excludes); + pub fn with_globvec(mut self, globvec: &GlobVec) -> Self { + self.globvec = globvec.clone(); self } @@ -150,21 +104,14 @@ impl<'a> CopyDir<'a> { self } - pub fn run(self) -> Result, SourceError> { + pub fn run(self) -> Result { // Create the to path because we're going to copy the contents only create_dir_all(self.to_path)?; - let (folders, globs) = self - .include_globs - .into_iter() - .partition::, _>(|glob| glob.ends_with('/') && !glob.contains('*')); - - let folders = Arc::new(folders.into_iter().map(PathBuf::from).collect::>()); - let mut result = CopyDirResult { copied_paths: Vec::with_capacity(0), // do not allocate as we overwrite this anyways - include_globs: make_glob_match_map(globs)?, - exclude_globs: make_glob_match_map(self.exclude_globs)?, + include_globs: make_glob_match_map(self.globvec.include_globs())?, + exclude_globs: make_glob_match_map(self.globvec.exclude_globs())?, }; let copied_pathes = WalkBuilder::new(self.from_path) @@ -179,13 +126,13 @@ impl<'a> CopyDir<'a> { Err(e) => return Some(Err(e)), }; - // if the entry is a directory, ignore it for the final output - if entry + let is_dir = entry .file_type() .as_ref() .map(|ft| ft.is_dir()) - .unwrap_or(false) - { + .unwrap_or(false); + // if the entry is a directory, ignore it for the final output + if is_dir { // if the dir is empty, check if we should create it anyways if entry.path().read_dir().ok()?.next().is_some() || !result.include_globs().is_empty() @@ -206,6 +153,7 @@ impl<'a> CopyDir<'a> { components.iter().collect() }; + // include everything let include = result.include_globs().is_empty(); let include = include @@ -217,9 +165,6 @@ impl<'a> CopyDir<'a> { .count() != 0; - let include = - include || folders.clone().iter().any(|f| stripped_path.starts_with(f)); - let exclude = result .exclude_globs_mut() .iter_mut() @@ -371,22 +316,22 @@ where Ok(()) } -pub(crate) struct CopyDirResult<'a> { +pub(crate) struct CopyDirResult { copied_paths: Vec, - include_globs: HashMap, Match>, - exclude_globs: HashMap, Match>, + include_globs: HashMap, + exclude_globs: HashMap, } -impl<'a> CopyDirResult<'a> { +impl CopyDirResult { pub fn copied_paths(&self) -> &[PathBuf] { &self.copied_paths } - pub fn include_globs(&self) -> &HashMap, Match> { + pub fn include_globs(&self) -> &HashMap { &self.include_globs } - fn include_globs_mut(&mut self) -> &mut HashMap, Match> { + fn include_globs_mut(&mut self) -> &mut HashMap { &mut self.include_globs } @@ -395,11 +340,11 @@ impl<'a> CopyDirResult<'a> { } #[allow(unused)] - pub fn exclude_globs(&self) -> &HashMap, Match> { + pub fn exclude_globs(&self) -> &HashMap { &self.exclude_globs } - fn exclude_globs_mut(&mut self) -> &mut HashMap, Match> { + fn exclude_globs_mut(&mut self) -> &mut HashMap { &mut self.exclude_globs } @@ -409,32 +354,16 @@ impl<'a> CopyDirResult<'a> { } } -fn make_glob_match_map(globs: Vec<&str>) -> Result, SourceError> { +fn make_glob_match_map(globs: &[Glob]) -> Result, SourceError> { globs - .into_iter() - .map(|gl| { - let glob = Glob::new(gl)?; - let match_ = Match::new(&glob); - Ok((glob, match_)) + .iter() + .map(|glob| { + let matcher = Match::new(glob); + Ok(((*glob).clone(), matcher)) }) .collect() } -#[derive(Hash, Eq, PartialEq)] -pub(crate) struct Glob<'a> { - s: &'a str, - g: globset::Glob, -} - -impl<'a> Glob<'a> { - fn new(s: &'a str) -> Result { - Ok(Self { - s, - g: globset::Glob::new(s)?, - }) - } -} - pub(crate) struct Match { matcher: globset::GlobMatcher, matched: bool, @@ -443,7 +372,7 @@ pub(crate) struct Match { impl Match { fn new(glob: &Glob) -> Self { Self { - matcher: glob.g.compile_matcher(), + matcher: glob.compile_matcher(), matched: false, } } @@ -468,6 +397,8 @@ impl Match { mod test { use std::{collections::HashSet, fs, fs::File}; + use crate::recipe::parser::GlobVec; + #[test] fn test_copy_dir() { let tmp_dir = tempfile::TempDir::new().unwrap(); @@ -501,7 +432,7 @@ mod test { let dest_dir_2 = tmp_dir_path.as_path().join("test_copy_dir_dest_2"); // ignore all txt files let copy_dir = super::CopyDir::new(&dir, &dest_dir_2) - .with_include_glob("*.txt") + .with_globvec(&GlobVec::from_vec(vec!["*.txt"], None)) .use_gitignore(false) .run() .unwrap(); @@ -510,13 +441,14 @@ mod test { assert_eq!(copy_dir.copied_paths()[0], dest_dir_2.join("test.txt")); let dest_dir_3 = tmp_dir_path.as_path().join("test_copy_dir_dest_3"); + // ignore all txt files let copy_dir = super::CopyDir::new(&dir, &dest_dir_3) - .with_exclude_glob("*.txt") + .with_globvec(&GlobVec::from_vec(vec![], Some(vec!["*.txt"]))) .use_gitignore(false) .run() .unwrap(); - + println!("{:?}", copy_dir.copied_paths()); assert_eq!(copy_dir.copied_paths().len(), 2); let expected = [ dest_dir_3.join("test_dir/test.md"), @@ -539,7 +471,7 @@ mod test { let dest_dir = tempfile::TempDir::new().unwrap(); let copy_dir = super::CopyDir::new(tmp_dir.path(), dest_dir.path()) - .with_include_glob("test_copy_dir/") + .with_globvec(&GlobVec::from_vec(vec!["test_copy_dir/"], None)) .use_gitignore(false) .run() .unwrap(); @@ -548,8 +480,10 @@ mod test { fs_err::remove_dir_all(&dest_dir).unwrap(); fs_err::create_dir_all(&dest_dir).unwrap(); let copy_dir = super::CopyDir::new(tmp_dir.path(), dest_dir.path()) - .with_include_glob("test_copy_dir/") - .with_exclude_glob("*.rst") + .with_globvec(&GlobVec::from_vec( + vec!["test_copy_dir/test_1.txt"], + Some(vec!["*.rst"]), + )) .use_gitignore(false) .run() .unwrap(); @@ -562,7 +496,7 @@ mod test { fs_err::remove_dir_all(&dest_dir).unwrap(); fs_err::create_dir_all(&dest_dir).unwrap(); let copy_dir = super::CopyDir::new(tmp_dir.path(), dest_dir.path()) - .with_include_glob("test_copy_dir/test_1.txt") + .with_globvec(&GlobVec::from_vec(vec!["test_copy_dir/test_1.txt"], None)) .use_gitignore(false) .run() .unwrap(); diff --git a/src/windows/link.rs b/src/windows/link.rs index f0757467..3e5d5a83 100644 --- a/src/windows/link.rs +++ b/src/windows/link.rs @@ -10,7 +10,10 @@ use fs_err::File; use goblin::pe::{header::DOS_MAGIC, PE}; use scroll::Pread; -use crate::post_process::relink::{RelinkError, Relinker}; +use crate::{ + post_process::relink::{RelinkError, Relinker}, + recipe::parser::GlobVec, +}; #[derive(Debug)] struct Dll { @@ -100,7 +103,7 @@ impl Relinker for Dll { _prefix: &Path, _encoded_prefix: &Path, _custom_rpaths: &[String], - _rpath_allowlist: Option<&globset::GlobSet>, + _rpath_allowlist: &GlobVec, _system_tools: &crate::system_tools::SystemTools, ) -> Result<(), crate::post_process::relink::RelinkError> { // On Windows, we don't need to relink anything diff --git a/test-data/recipes/include_files/recipe.yaml b/test-data/recipes/include_files/recipe.yaml new file mode 100644 index 00000000..cdfeb568 --- /dev/null +++ b/test-data/recipes/include_files/recipe.yaml @@ -0,0 +1,18 @@ +package: + name: include_files + version: 1.0.0 + +build: + files: + include: + - include/* + exclude: + - "*.exe" + + script: | + mkdir -p $PREFIX/include + mkdir -p $PREFIX/lib + touch $PREFIX/include/include_file.h + touch $PREFIX/include/include_file.c + touch $PREFIX/include/include_file.exe + touch $PREFIX/lib/lib_foo.so diff --git a/test-data/recipes/test-parsing/single_output.yaml b/test-data/recipes/test-parsing/single_output.yaml index 22d99eff..8b34b6ba 100644 --- a/test-data/recipes/test-parsing/single_output.yaml +++ b/test-data/recipes/test-parsing/single_output.yaml @@ -37,6 +37,16 @@ build: - false noarch: generic merge_build_and_host_envs: true + files: + include: + - include/* + - "**/foo.txt" + - x.txt + - out/** + exclude: + - foo/* + - bar.txt + - baz/ script: env: TEST: MYENV_VAR @@ -121,6 +131,41 @@ requirements: by_name: - python +tests: + - script: | + echo FOO + requirements: + run: + - bash + build: + - bash + files: + source: + - include/ + - src/*.c + - src_file.txt + - package_contents: + lib: + - test + - test.so.* + include: + - test.h + - test.h* + bin: + - test + - test.exe + - foo* + site_packages: + - numpy + - numpy/foo/__init__.py + files: + - test.txt + - license.txt + - python: + imports: + - numpy + - pypy + pip_check: false about: homepage: https://cool-website.com diff --git a/test/end-to-end/test_simple.py b/test/end-to-end/test_simple.py index c4bdc5bd..7728ec54 100644 --- a/test/end-to-end/test_simple.py +++ b/test/end-to-end/test_simple.py @@ -855,3 +855,23 @@ def test_post_link(rattler_build: RattlerBuild, recipes: Path, tmp_path: Path): pp = paths["paths"] assert len(pp) == 1 assert pp[0]["_path"] == "bin/.postlink-post-link.sh" + + +@pytest.mark.skipif( + os.name == "nt", reason="recipe does not support execution on windows" +) +def test_include_files(rattler_build: RattlerBuild, recipes: Path, tmp_path: Path): + path_to_recipe = recipes / "include_files" + args = rattler_build.build_args( + path_to_recipe, + tmp_path, + ) + rattler_build(*args) + + pkg = get_extracted_package(tmp_path, "include_files") + + paths = json.loads((pkg / "info/paths.json").read_text()) + pp = paths["paths"] + assert len(pp) == 2 + assert pp[0]["_path"] == "include/include_file.c" + assert pp[1]["_path"] == "include/include_file.h"