diff --git a/.ci/build.sh b/.ci/build.sh deleted file mode 100755 index 43f1ab9c..00000000 --- a/.ci/build.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -set -e - -case "$1" in - format) - cargo +nightly fmt --all -- --check - ;; - build) - cargo build --all - cargo test --all --all-targets - # https://github.com/rust-lang/cargo/issues/6669 - cargo test --all --doc - ;; - doc) - cargo doc --all - ;; - *) - echo "unknown mode" >&2 - exit 1 - ;; -esac diff --git a/.ci/release.sh b/.ci/release.sh deleted file mode 100755 index 5f9ceed3..00000000 --- a/.ci/release.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -set -eu -set -o pipefail -(cd starlark && cargo package && cargo publish) -(cd starlark-repl && cargo package && cargo publish) diff --git a/.ci/setup.sh b/.ci/setup.sh deleted file mode 100755 index 308296a2..00000000 --- a/.ci/setup.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -e - -case "$1" in - format) - echo "Installing rustfmt..." - rustup component add --toolchain nightly rustfmt-preview - which rustfmt || cargo install --force rustfmt-nightly - cargo +nightly fmt -- --version - ;; -esac diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index a8addd66..00000000 --- a/.editorconfig +++ /dev/null @@ -1,6 +0,0 @@ -[*] -end_of_line = lf -insert_final_newline = true -charset = utf-8 -indent_style = space -indent_size = 4 diff --git a/.gitignore b/.gitignore deleted file mode 100644 index fa8d85ac..00000000 --- a/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -Cargo.lock -target diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e40a29e7..00000000 --- a/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: rust -rust: - - stable - - beta - - nightly -env: ACTION=build -before_script: ./.ci/setup.sh "${ACTION}" -script: ./.ci/build.sh "${ACTION}" -matrix: - include: - - name: "Rust: format" - env: ACTION=format - rust: nightly - - name: "Rust: doc" - env: ACTION=doc - rust: stable - allow_failures: - - rust: nightly - env: ACTION=build - fast_finish: true -# Kill cache because otherwise travis randomly times out too often -#cache: cargo diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index ae319c70..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,23 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution, -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index c95acada..00000000 --- a/Cargo.toml +++ /dev/null @@ -1,2 +0,0 @@ -[workspace] -members = ["starlark", "starlark-repl", "starlark-test"] diff --git a/README.md b/README.md index b8135f8a..cdfa550f 100644 --- a/README.md +++ b/README.md @@ -1,89 +1 @@ -# Starlark in Rust -_An implementation in Rust of the Starlark language_ - -[![Build -Status](https://travis-ci.org/google/starlark-rust.svg?branch=master)](https://travis-ci.org/google/starlark-rust) - -**Disclaimer:** This is not an officially supported Google product. This project is supported -on a best-effort basis and [welcome contributions](CONTRIBUTING.md). - -[Starlark](https://github.com/bazelbuild/starlark), formerly codenamed Skylark, is a non-Turing -complete language based on Python that was made for the [Bazel build system](https://bazel.build) to -define compilation plugin. - -Starlark has at least 3 implementations: a [Java one for Bazel]( -https://github.com/bazelbuild/bazel/tree/master/src/main/java/com/google/devtools/skylark), -a [Go one](https://github.com/google/skylark) and this one. - -This interpreter was made using the [specification from the go version]( -https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md) -and the Python 3 documentation when things were unclear. - -This interpreter does not support most of the go extensions (e.g. bitwise -operator or floating point). It optionally includes a `set` type -(by explicitly including `starlark::linked_hash_set::global()` environment), -as an extension which is not specified in [the -official Starlark specification](https://github.com/bazelbuild/starlark/blob/master/spec.md), but note that this -is just an insertion-order-preserving set, and does not have optimisations for nesting as can be found in the -starlark Java implementation's [depset](https://docs.bazel.build/versions/master/skylark/lib/depset.html) implementation. -It uses signed 64-bit integers. - -## Usage - -### Crate - -You can depend on the `starlark` crate, it is documented using [docs.rs](https://docs.rs/crate/starlark). -Examples are listed under [starlark/examples](starlark/examples). You can run the examples -using `cargo run --example`, such as - -```sh -echo "str([x * 2 for x in range(10)])" | cargo run --example starlark-simple-cli -``` - -### Command line REPL - -A command line interpreter is also provided by this project under [starlark-repl](starlark-repl), -it can interpret files passed at the command line and also start a REPL (Read-Eval-Print Loop). -The usage of this program is: - -``` -Starlark in Rust interpreter - -USAGE: - starlark-rust [FLAGS] [OPTIONS] [file]... - -FLAGS: - -a, --ast Parse and print AST instead of evaluating. - -b, --build-file Parse the build file format instead of full Starlark. See https://docs.rs/starlark/0.3.0- - pre/starlark/eval/index.html#build_file - -h, --help Prints help information - -r, --repl Run a REPL after files have been parsed. - -V, --version Prints version information - -OPTIONS: - -c Starlark command to run after files have been parsed. - -ARGS: - ... Files to interpret -``` - -## Development - -### Build - -This project build with [Cargo](https://doc.rust-lang.org/stable/cargo/). Simply -run `cargo test` to test it, `cargo build --release` to build a release version -and `cargo run` to run the command-line interpreter. - -### Possible improvements and optimizations - -* Errors: - - When an identifier is not found, we can suggest close identifier / keyword. - - Fix suggestions maybe? - - Better error spans. - - Recoverable errors (don't stop at the first error, continue parsing). -* Evaluation: - - Static rewrite of the AST before evaluation (e.g. for constant values) -* Awesome feature: - - Implement a debugging protocol server side (compatible with the Java one, - see (bazelbuild/vscode-bazel#6)). +# This repository has been replaced by https://github.com/facebookexperimental/starlark-rust and is no longer maintained. \ No newline at end of file diff --git a/rustfmt.toml b/rustfmt.toml deleted file mode 100644 index caa5583c..00000000 --- a/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -unstable_features = true -edition = "2018" diff --git a/starlark-repl/Cargo.toml b/starlark-repl/Cargo.toml deleted file mode 100644 index 0f162995..00000000 --- a/starlark-repl/Cargo.toml +++ /dev/null @@ -1,42 +0,0 @@ -[package] -name = "starlark-repl" -edition = "2018" -version = "0.3.2-pre" -authors = [ - "Damien Martin-Guillerez ", - "Stepan Koltsov ", -] - -description = "A REPL for the implementation in Rust of the Starlark language." -documentation = "https://docs.rs/crate/starlark-repl" -homepage = "https://github.com/google/starlark-rust" -repository = "https://github.com/google/starlark-rust" -readme = "README.md" -keywords = ["starlark", "skylark", "bazel", "language", "interpreter"] -categories = ["development-tools"] -license = "Apache-2.0" - -[badges] -travis-ci = { repository = "google/starlark-rust", branch = "master" } -maintenance = { status = "passively-maintained" } - -[dependencies] -codemap = "0.1.1" -codemap-diagnostic = "0.1.1" -linefeed = "0.5.3" -starlark = { path = "../starlark" } -structopt = "0.3.0" - -[dev-dependencies] -assert_cmd = "0.10.2" -predicates = "1" -# 3.0.5 bumps rand to 0.6 which causes problems with other deps. -tempfile = ">=3, <3.0.5" - -[lib] -bench = false - -[[bin]] -name = "starlark-rust" -bench = false -path = "bin/starlark-rust.rs" diff --git a/starlark-repl/LICENSE b/starlark-repl/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/starlark-repl/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/starlark-repl/README.md b/starlark-repl/README.md deleted file mode 100644 index 932f8939..00000000 --- a/starlark-repl/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Starlark in Rust - REPL -_A REPL for the Starlark language in Rust_ - -**Disclaimer:** This is not an officially supported Google product. This project is supported -on a best-effort basis and [welcome contributions](CONTRIBUTING.md). - -[Starlark](https://github.com/bazelbuild/starlark), formerly codenamed Skylark, is a non-Turing -complete language based on Python that was made for the [Bazel build system](https://bazel.build) to -define compilation plugin. - -This REPL uses [starlark](https://crates.io/crates/starlark) crates. - -## Usage - -A command line interpreter is provided by this project, it can interpret files -passed at the command line and also start a REPL (Read-Eval-Print Loop). -The usage of this program is: - -``` -Starlark in Rust interpreter - -USAGE: - starlark-rust [FLAGS] [OPTIONS] [file]... - -FLAGS: - -a, --ast Parse and print AST instead of evaluating. - -b, --build-file Parse the build file format instead of full Starlark. See https://docs.rs/starlark/0.3.0- - pre/starlark/eval/index.html#build_file - -h, --help Prints help information - -r, --repl Run a REPL after files have been parsed. - -V, --version Prints version information - -OPTIONS: - -c Starlark command to run after files have been parsed. - -ARGS: - ... Files to interpret -``` diff --git a/starlark-repl/bin/starlark-rust.rs b/starlark-repl/bin/starlark-rust.rs deleted file mode 100644 index 7346bfad..00000000 --- a/starlark-repl/bin/starlark-rust.rs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! A command line interpreter for Starlark, provide a REPL. - -extern crate structopt; - -use starlark::eval::interactive::{eval, eval_file, EvalError}; -use starlark::stdlib::global_environment_for_repl_and_tests; -use starlark::syntax::dialect::Dialect; -use starlark::values::Value; -use starlark_repl::{print_function, repl}; -use std::process::exit; -use structopt::clap::AppSettings; -use structopt::StructOpt; - -const EXIT_CODE_FAILURE: i32 = 2; - -#[derive(Debug, StructOpt)] -#[structopt( - name = "starlark-repl", - about = "Starlark in Rust interpreter", - global_settings(&[AppSettings::ColoredHelp]), -)] -pub struct Opt { - #[structopt( - short = "b", - long, - help = concat!( - "Parse the build file format instead of full Starlark. See https://docs.rs/starlark/", - env!("CARGO_PKG_VERSION"), - "/starlark/eval/index.html#build_file", - ) - )] - build_file: bool, - - #[structopt( - short = "c", - help = "Starlark command to run after files have been parsed." - )] - command: Option, - - #[structopt( - short = "r", - long, - conflicts_with = "command", - help = "Run a REPL after files have been parsed." - )] - repl: bool, - - #[structopt(name = "FILE", help = "Files to interpret")] - files: Vec, -} - -fn main() { - let opt = Opt::from_args(); - - let command = opt.command; - - let (mut global, mut type_values) = global_environment_for_repl_and_tests(); - - print_function(&mut global, &mut type_values); - global.freeze(); - - let dialect = if opt.build_file { - Dialect::Build - } else { - Dialect::Bzl - }; - let free_args_empty = opt.files.is_empty(); - for i in opt.files.into_iter() { - maybe_print_or_exit(eval_file( - &i, - dialect, - &mut global.child(&i), - &type_values, - global.clone(), - )); - } - if opt.repl || (free_args_empty && command.is_none()) { - println!("Welcome to Starlark REPL, press Ctrl+D to exit."); - repl(&mut global, &type_values, dialect); - } - if let Some(command) = command { - maybe_print_or_exit(eval( - "[command flag]", - &command, - dialect, - &mut global.child("[command flag]"), - &type_values, - global.clone(), - )); - } -} - -fn maybe_print_or_exit(result: Result, EvalError>) { - match result { - Ok(Some(value)) => println!("{}", value.to_repr()), - Err(err) => { - err.write_to_stderr(); - exit(EXIT_CODE_FAILURE); - } - Ok(None) => {} - } -} diff --git a/starlark-repl/src/lib.rs b/starlark-repl/src/lib.rs deleted file mode 100644 index bb66a6ed..00000000 --- a/starlark-repl/src/lib.rs +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! a Read-Eval-Print Loop (REPL) for Starlark. -//! -//! Starlark, formerly codenamed Skylark, is a non-Turing complete language based on Python that -//! was made for the [Bazel build system](https://bazel.build) to define compilation plugin. -//! -//! See the [starlark](https://docs.rs/crate/starlark) crate documentation for more information -//! about Starlark. -//! -//! # Usage -//! -//! One can call the [repl] method to run the repl inside a program or simply run the [starlark-repl] -//! binary: -//! ```sh -//! $ starlark-repl --help -//! [Starlark in Rust interpretor] -//! -//! Usage: starlark-repl [options] [file1..filen] -//! -//! -//! Options: -//! -b, --build_file Parse the build file format instead of full Starlark. -//! -h, --help Show the usage of this program. -//! -r, --repl Run a REPL after files have been parsed. -//! ``` -use codemap; - -#[macro_use] -extern crate starlark; - -use codemap_diagnostic::{ColorConfig, Emitter}; -use linefeed::{Interface, ReadResult}; -use starlark::environment::{Environment, TypeValues}; -use starlark::eval::eval_lexer; -use starlark::eval::simple::SimpleFileLoader; -use starlark::syntax::dialect::Dialect; -use starlark::syntax::lexer::{BufferedLexer, LexerIntoIter, LexerItem}; -use starlark::values::none::NoneType; -use starlark::values::Value; -use std::env; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -fn print_eval, T2: LexerIntoIter>( - map: Arc>, - filename: &str, - content: &str, - lexer: T2, - dialect: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader_env: Environment, -) { - match eval_lexer( - &map, - filename, - content, - dialect, - lexer, - env, - type_values, - &SimpleFileLoader::new(&map.clone(), file_loader_env), - ) { - Ok(v) => { - if v.get_type() != "NoneType" { - println!("{}", v.to_repr()) - } - } - Err(p) => Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[p]), - } -} - -starlark_module! {print_function => - /// print: print an object string representation to stderr. - /// - /// Examples: - /// ```python - /// print("some message") # Will print "some message" to stderr - /// ``` - print(*args) { - let mut r = String::new(); - let mut first = true; - for arg in args { - if !first { - r.push_str(" "); - } - first = false; - r.push_str(&arg.to_str()); - } - eprintln!("{}", r); - Ok(Value::new(NoneType::None)) - } -} - -/// A REPL (Read-Eval-Print Loop) for Starlark. -/// -/// This method run a REPL until the user hit Ctrl+D. It can be used for interactive use where the -/// parent enviroment offer side-effect methods. -/// -/// # Parameters: -/// -/// * global_environment: the parent enviroment for the loop. -/// * dialect: Starlark language dialect. -/// * ast: print AST instead of evaluating. -pub fn repl(global_environment: &mut Environment, type_values: &TypeValues, dialect: Dialect) { - let map = Arc::new(Mutex::new(codemap::CodeMap::new())); - let reader = Interface::new("Starlark").unwrap(); - let mut env = global_environment.child("repl"); - let mut n = 0; - - // Linefeed default history size is unlimited, - // but since we write history to disk, we better limit it. - reader.set_history_size(100_000); - - let histfile = env::var_os("STARLARK_RUST_HISTFILE").map(PathBuf::from); - - if let Some(ref histfile) = histfile { - if histfile.exists() { - if let Err(e) = reader.load_history(histfile) { - eprintln!("Failed to load history from {}: {}", histfile.display(), e); - } - } - } - - reader.set_prompt(">>> ").unwrap(); - - while let Ok(ReadResult::Input(input)) = reader.read_line() { - if !input.is_empty() { - reader.set_prompt("... ").unwrap(); - n += 1; - let input = input + "\n"; - let mut lexer = BufferedLexer::new(&input); - let mut content = input; - while lexer.need_more() { - if let Ok(ReadResult::Input(input)) = reader.read_line() { - let input = input + "\n"; - content += &input; - lexer.input(&input); - } else { - break; - } - } - let mut hist = content.clone(); - hist.pop(); - reader.add_history(hist); - print_eval( - map.clone(), - &format!("<{}>", n), - &content, - lexer, - dialect, - &mut env, - type_values, - global_environment.clone(), - ) - } - reader.set_prompt(">>> ").unwrap(); - } - - println!(); - - if let Some(ref histfile) = histfile { - if let Err(e) = reader.save_history(histfile) { - eprintln!("Failed to save history to {}: {}", histfile.display(), e); - } - } - - println!("Goodbye!"); -} diff --git a/starlark-repl/tests/integration.rs b/starlark-repl/tests/integration.rs deleted file mode 100644 index 4d614196..00000000 --- a/starlark-repl/tests/integration.rs +++ /dev/null @@ -1,80 +0,0 @@ -use assert_cmd::prelude::*; -use predicates::str::contains; -use std::io::Write; -use std::process::Command; - -// Copied from starlark::environment - not currently public because of uncertainty around how to -// expose it. -const NOT_FOUND_ERROR_CODE: &str = "CM01"; - -#[test] -fn outputs_last_command_value() { - Command::main_binary() - .unwrap() - .arg("-c") - .arg("5\n1 + 1") - .assert() - .success() - .stdout("2\n"); -} - -#[test] -fn outputs_last_file_values() { - let f1 = make_file("0"); - let f2 = make_file(""); - let f3 = make_file("None"); - let f4 = make_file("2\n3\n\"Hello\""); - - Command::main_binary() - .unwrap() - .arg(f1.path()) - .arg(f2.path()) - .arg(f3.path()) - .arg(f4.path()) - .assert() - .success() - .stdout("0\n\"Hello\"\n"); -} - -#[test] -fn error_in_command() { - Command::main_binary() - .unwrap() - .arg("-c") - .arg("x") - .assert() - .code(2) - .stderr(contains(NOT_FOUND_ERROR_CODE)); -} - -#[test] -fn error_in_file() { - let f = make_file("x"); - - Command::main_binary() - .unwrap() - .arg(f.path()) - .assert() - .code(2) - .stderr(contains(NOT_FOUND_ERROR_CODE)); -} - -#[test] -fn files_environments_are_isolated() { - let f1 = make_file("x = 1"); - let f2 = make_file("x"); - - Command::main_binary() - .unwrap() - .arg(f1.path()) - .arg(f2.path()) - .assert() - .code(2) - .stderr(contains(NOT_FOUND_ERROR_CODE)); -} - -fn make_file(content: &str) -> tempfile::NamedTempFile { - let mut file = tempfile::NamedTempFile::new().unwrap(); - writeln!(file, "{}", content).unwrap(); - file -} diff --git a/starlark-test/Cargo.toml b/starlark-test/Cargo.toml deleted file mode 100644 index 82bfdeb3..00000000 --- a/starlark-test/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -name = "starlark-test" -edition = "2018" -version = "0.0.0" -authors = [ - "Damien Martin-Guillerez ", - "Stepan Koltsov ", -] -build = "build.rs" -publish = false - -description = "Tests for starlark crate." -documentation = "https://docs.rs/crate/starlark" -homepage = "https://github.com/google/starlark-rust" -repository = "https://github.com/google/starlark-rust" -readme = "README.md" -keywords = ["starlark", "skylark", "bazel", "language", "interpreter"] -categories = ["development-tools"] -license = "Apache-2.0" - -[dependencies] -starlark = { path = "../starlark" } -codemap = "0.1.1" -codemap-diagnostic = "0.1.1" -linked-hash-map = "0.5.1" - -[lib] -bench = false -test = false -doctest = false -doc = false diff --git a/starlark-test/benches/rust-benches/bubble_sort.sky b/starlark-test/benches/rust-benches/bubble_sort.sky deleted file mode 100644 index f338be44..00000000 --- a/starlark-test/benches/rust-benches/bubble_sort.sky +++ /dev/null @@ -1,11 +0,0 @@ -def bubble_sort(array): - array = list(array) - for i in range(len(array)): - # TODO: https://github.com/google/starlark-rust/issues/98 - for j in range((len(array) - i) - 1): - if array[j] > array[j + 1]: - array[j], array[j + 1] = array[j + 1], array[j] - return array - -def bench(): - assert_eq([2, 3, 4, 5, 6, 7, 9], bubble_sort([9, 3, 5, 4, 7, 2, 6])) diff --git a/starlark-test/benches/rust-benches/empty.sky b/starlark-test/benches/rust-benches/empty.sky deleted file mode 100644 index 20e2b194..00000000 --- a/starlark-test/benches/rust-benches/empty.sky +++ /dev/null @@ -1,4 +0,0 @@ -# Benching evaluation of empty function - -def bench(): - pass diff --git a/starlark-test/benches/rust-benches/integer_add.sky b/starlark-test/benches/rust-benches/integer_add.sky deleted file mode 100644 index a5fc6e37..00000000 --- a/starlark-test/benches/rust-benches/integer_add.sky +++ /dev/null @@ -1,23 +0,0 @@ -def bench(): - return ( - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16) + - 0 - ) diff --git a/starlark-test/benches/rust-benches/list.sky b/starlark-test/benches/rust-benches/list.sky deleted file mode 100644 index 910b0637..00000000 --- a/starlark-test/benches/rust-benches/list.sky +++ /dev/null @@ -1,2 +0,0 @@ -def bench(): - return list([1, 2, 3]) diff --git a/starlark-test/benches/rust-benches/named_args.sky b/starlark-test/benches/rust-benches/named_args.sky deleted file mode 100644 index 616b9ce6..00000000 --- a/starlark-test/benches/rust-benches/named_args.sky +++ /dev/null @@ -1,16 +0,0 @@ -def takes_named_args(**kwargs): - return len(kwargs) - -def bench(): - return takes_named_args( - a0=0, - a1=1, - a2=2, - a3=3, - a4=4, - a5=5, - a6=6, - a7=7, - a8=8, - a9=9, - ) diff --git a/starlark-test/benches/rust_benches.rs b/starlark-test/benches/rust_benches.rs deleted file mode 100644 index 49291f9d..00000000 --- a/starlark-test/benches/rust_benches.rs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// `#[feature(test)]` only works in nightly -#![cfg(rustc_nightly)] -#![feature(test)] - -extern crate test; -use starlark_test::do_bench; -use test::Bencher; - -include!(concat!(env!("OUT_DIR"), "/benches/rust-benches.rs")); diff --git a/starlark-test/build.rs b/starlark-test/build.rs deleted file mode 100644 index 475dc618..00000000 --- a/starlark-test/build.rs +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -use std::fs; -use std::fs::File; -use std::io::prelude::*; -use std::path::Path; -use std::{env, process}; - -fn main() { - let nightly = query_rust_version_is_nightly(); - - test_cases("tests/java-testcases", &TestOrBench::Test); - test_cases("tests/rust-testcases", &TestOrBench::Test); - test_cases("tests/go-testcases", &TestOrBench::Test); - if nightly { - println!("cargo:rustc-cfg=rustc_nightly"); - // Benches only work in nightly - test_cases("benches/rust-benches", &TestOrBench::Bench); - } -} - -fn version_is_nightly(version: &str) -> bool { - version.contains("nightly") -} - -fn query_rust_version_is_nightly() -> bool { - let rustc = env::var("RUSTC").expect("RUSTC unset"); - - let mut child = process::Command::new(rustc) - .args(&["--version"]) - .stdin(process::Stdio::null()) - .stdout(process::Stdio::piped()) - .spawn() - .expect("spawn rustc"); - - let mut rustc_version = String::new(); - - child - .stdout - .as_mut() - .expect("stdout") - .read_to_string(&mut rustc_version) - .expect("read_to_string"); - assert!(child.wait().expect("wait").success()); - - version_is_nightly(&rustc_version) -} - -enum TestOrBench { - Test, - Bench, -} - -/// Load a file and convert it to a vector of string (separated by ---) to be evaluated separately. -fn read_input(path: &Path) -> Vec<(usize, String)> { - let mut content = String::new(); - let mut file = File::open(path).unwrap(); - file.read_to_string(&mut content).unwrap(); - let mut v: Vec<(usize, String)> = content - .split("\n---\n") - .map(|x| (0, x.to_owned())) - .collect(); - let mut idx = 0; - for mut el in &mut v { - el.0 = idx; - idx += el.1.chars().filter(|x| *x == '\n').count() + 2 // 2 = separator new lines - } - v -} - -fn format_test_content(path: &Path) -> String { - let test_name = path.file_stem().unwrap().to_str().unwrap(); - let mut r = String::new(); - for (offset, content) in read_input(path).into_iter() { - let content = std::iter::repeat("\n").take(offset).collect::() + &content; - r.push_str(&format!( - r#" -#[test] -fn test_{}_{}() {{ - do_conformance_test("{}", {:?}) -}} -"#, - test_name, - offset + 1, - path.to_str().unwrap(), - content, - )); - } - r -} - -fn format_test_or_bench_content(path: &Path, test_or_bench: &TestOrBench) -> String { - let test_name = path.file_stem().unwrap().to_str().unwrap(); - match test_or_bench { - TestOrBench::Test => format_test_content(path), - TestOrBench::Bench => format!( - r#" -#[bench] -fn bench_{}(bencher: &mut Bencher) {{ - do_bench(bencher, "{}") -}} -"#, - test_name, - path.to_str().unwrap(), - ), - } -} - -fn test_cases(path: &str, test_or_bench: &TestOrBench) { - println!("cargo:rerun-if-changed={}", path); - let outfile_path = Path::new(&env::var("OUT_DIR").unwrap()).join(format!("{}.rs", path)); - fs::create_dir_all(outfile_path.parent().unwrap()).unwrap(); - let mut outfile = File::create(outfile_path).unwrap(); - let cargo_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - let base = Path::new(&cargo_dir); - let d = base.join(path); - let paths = fs::read_dir(d).unwrap(); - for p in paths { - let path_entry = p.unwrap().path(); - if path_entry.extension().unwrap().to_str().unwrap() != "md" { - // Exclude markdown files - let content = - format_test_or_bench_content(path_entry.strip_prefix(base).unwrap(), test_or_bench); - outfile.write(content.as_bytes()).unwrap(); - } - } -} diff --git a/starlark-test/src/lib.rs b/starlark-test/src/lib.rs deleted file mode 100644 index 4ee8dad9..00000000 --- a/starlark-test/src/lib.rs +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Utility to test the tests and benches - -#![cfg_attr(rustc_nightly, feature(test))] - -#[cfg(rustc_nightly)] -extern crate test; - -use codemap::CodeMap; -use codemap_diagnostic::ColorConfig; -use codemap_diagnostic::Diagnostic; -use codemap_diagnostic::Emitter; -use codemap_diagnostic::Level; -use linked_hash_map::LinkedHashMap; -use starlark::environment::Environment; -use starlark::environment::TypeValues; -use starlark::eval::call_stack::CallStack; -use starlark::eval::eval; -use starlark::eval::noload; -use starlark::eval::EvalException; -use starlark::eval::FileLoader; -use starlark::stdlib::global_environment_for_repl_and_tests; -use starlark::syntax::dialect::Dialect; -use starlark::values::error::ValueError; -use std::collections::HashMap; -use std::fs::File; -use std::io::prelude::*; -use std::io::{self, Write}; -use std::sync::{Arc, Mutex}; -#[cfg(rustc_nightly)] -use test::Bencher; - -fn assert_diagnostic( - d: Diagnostic, - expected: &str, - path: &str, - offset: usize, - map: &Arc>, -) -> bool { - let expected = expected.to_lowercase(); - let msg = if d.spans.is_empty() || d.spans[0].label.is_none() { - d.message.clone() - } else { - let label = d.spans[0].label.clone(); - let error_code = d.code.clone().unwrap_or_else(|| "".to_owned()); - format!("[{}] {} ({})", error_code, d.message, label.unwrap()) - }; - if !msg.to_lowercase().contains(&expected) { - io::stderr() - .write_all( - &format!( - "Expected error '{}' at {}:{}, got {}\n", - expected, path, offset, msg, - ) - .into_bytes(), - ) - .unwrap(); - Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[d]); - false - } else { - true - } -} - -#[derive(Default)] -struct ParsedTest { - error: Option<(usize, String)>, - files: HashMap, -} - -fn parse_test(content: &str) -> ParsedTest { - let mut r = ParsedTest::default(); - let mut current_file = "main.sky".to_owned(); - for (i, line) in content.lines().enumerate() { - let file_prefix = "# file: "; - if line.starts_with(file_prefix) { - current_file = line[file_prefix.len()..].to_owned(); - } else { - r.files - .entry(current_file.clone()) - .or_default() - .push_str(&format!("{}\n", line)); - - if let Some(x) = line.find("###") { - assert!(r.error.is_none(), "test may contain at most one error"); - r.error = Some((i + 1, line.get(x + 3..).unwrap().trim().to_owned())) - } - } - } - r -} - -#[derive(Clone)] -struct HashMapFileLoader { - parent: Environment, - files: HashMap, - map: Arc>, -} - -impl FileLoader for HashMapFileLoader { - fn load(&self, path: &str, type_values: &TypeValues) -> Result { - let mut env = self.parent.child(path); - let content = match self.files.get(path) { - Some(content) => content, - None => { - return Err(EvalException::DiagnosedError(Diagnostic { - level: Level::Bug, - message: format!("file not found"), - code: None, - spans: Vec::new(), - })) - } - }; - eval( - &self.map, - path, - content, - Dialect::Bzl, - &mut env, - type_values, - self, - )?; - Ok(env) - } -} - -pub fn do_conformance_test(path: &str, content: &str) { - let map = Arc::new(Mutex::new(CodeMap::new())); - let (global, type_values) = global_environment_for_repl_and_tests(); - global.freeze(); - let mut prelude = global.child("PRELUDE"); - noload::eval( - &map, - "PRELUDE", - r#" -def assert_eq(x, y): - if x != y: - fail("%r != %r" % (x, y)) - -def assert_(cond, msg="assertion failed"): - if not cond: - fail(msg) -"#, - starlark::syntax::dialect::Dialect::Bzl, - &mut prelude, - &type_values, - ) - .unwrap(); - prelude.freeze(); - - let test = parse_test(content); - - let build = test.files.get("main.sky").expect(&format!( - "test must contain main.sky file: {:?}", - test.files.keys().collect::>() - )); - - match eval( - &map, - path, - build, - starlark::syntax::dialect::Dialect::Bzl, - &mut prelude.child(path), - &type_values, - &HashMapFileLoader { - parent: prelude.clone(), - files: test.files.clone(), - map: map.clone(), - }, - ) { - Err(p) => match &test.error { - Some((offset, err)) => { - if !assert_diagnostic(p, err, "test", *offset, &map) { - panic!(); - } - } - None => { - Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[p]); - panic!(); - } - }, - _ => { - if let Some((offset, err)) = test.error { - io::stderr() - .write_all( - &format!( - "Expected error '{}' at {}:{}, got success", - err, path, offset - ) - .into_bytes(), - ) - .unwrap(); - panic!(); - } - } - } -} - -#[cfg(not(rustc_nightly))] -pub struct Bencher {} - -#[cfg(not(rustc_nightly))] -impl Bencher { - pub fn iter(&mut self, mut _inner: F) - where - F: FnMut() -> T, - { - // Bencher included here to typecheck `do_bench` function - // in stable and also to mute unused imports warnings - panic!("Bencher available only in nightly"); - } -} - -pub fn do_bench(bencher: &mut Bencher, path: &str) { - let mut content = String::new(); - let mut file = File::open(path).unwrap(); - file.read_to_string(&mut content).unwrap(); - drop(file); - - let map = Arc::new(Mutex::new(CodeMap::new())); - let (global, type_values) = global_environment_for_repl_and_tests(); - global.freeze(); - let mut prelude = global.child("PRELUDE"); - noload::eval( - &map, - "PRELUDE", - r#" -def assert_eq(x, y): - if x != y: - fail("%r != %r" % (x, y)) - -def assert_(cond, msg="assertion failed"): - if not cond: - fail(msg) -"#, - starlark::syntax::dialect::Dialect::Bzl, - &mut prelude, - &type_values, - ) - .unwrap(); - prelude.freeze(); - - let mut env = prelude.child("run"); - match noload::eval(&map, path, &content, Dialect::Bzl, &mut env, &type_values) { - Ok(_) => (), - Err(p) => { - Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[p]); - panic!(); - } - } - - env.freeze(); - - let bench_func = env.get("bench").expect("bench function is not found"); - - bencher.iter(|| { - match bench_func.call( - &mut CallStack::default(), - &type_values, - Vec::new(), - LinkedHashMap::new(), - None, - None, - ) { - Ok(r) => r, - Err(ValueError::DiagnosedError(e)) => { - Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[e]); - panic!(); - } - Err(e) => { - panic!("{:?}", e); - } - } - }); -} diff --git a/starlark-test/tests/README.md b/starlark-test/tests/README.md deleted file mode 100644 index 801b6dff..00000000 --- a/starlark-test/tests/README.md +++ /dev/null @@ -1,9 +0,0 @@ -## Rust Starlark implementation test - -* `go-testcases` are taken from - [Go implementation](https://github.com/google/starlark-go/tree/master/starlark/testdata) -* `java-testcases` are taken from - [Java implementation](https://github.com/bazelbuild/bazel/tree/master/src/test/starlark/testdata) -* `rust-testcases` are written for this project - -Certain Java and Go testcases were modified or removed to match this implementation. diff --git a/starlark-test/tests/go-testcases/assign.sky b/starlark-test/tests/go-testcases/assign.sky deleted file mode 100644 index e76e8325..00000000 --- a/starlark-test/tests/go-testcases/assign.sky +++ /dev/null @@ -1,205 +0,0 @@ -# Tests of Skylark assignment. - -# This is a "chunked" file: each "---" effectively starts a new file. - -a, b, c = 1, 2, 3 -assert_eq(a, 1) -assert_eq(b, 2) -assert_eq(c, 3) - ---- -(x,) = 1 ### The type 'int' is not iterable ---- -a, b, c = 1, 2 ### Unpacked ---- -a, b = 1, 2, 3 ### Unpacked ---- -a, b = (1,) ### Unpacked ---- -(a,) = [1, 2, 3] ### Unpacked ---- -[a, b, c] = [1, 2, 3] -assert_eq(a, 1) -assert_eq(b, 2) -assert_eq(c, 3) ---- -[a, b, c,] = 1 ### The type 'int' is not iterable ---- -[a, b, c] = 1, 2 ### Unpacked ---- -[a, b] = 1, 2, 3 ### Unpacked ---- -[a, b] = (1,) ### Unpacked ---- -[a, b, c] = (1, 2, 3) -assert_eq(a, 1) -assert_eq(b, 2) -assert_eq(c, 3) - -(d, e, f) = [1, 2, 3] -assert_eq(d, 1) -assert_eq(e, 2) -assert_eq(f, 3) - -[g, h, (i, j)] = (1, 2, [3, 4]) -assert_eq(g, 1) -assert_eq(h, 2) -assert_eq(i, 3) -assert_eq(j, 4) - -(k, l, [m, n]) = [1, 2, (3, 4)] -assert_eq(k, 1) -assert_eq(l, 2) -assert_eq(m, 3) -assert_eq(n, 4) - ---- -def assignment(): - a = [1, 2, 3] - a[1] = 5 - assert_eq(a, [1, 5, 3]) - a[-2] = 2 - assert_eq(a, [1, 2, 3]) - assert_eq("%d %d" % (5, 7), "5 7") - x={} - x[1] = 2 - x[1] += 3 - assert_eq(x[1], 5) - -assignment() ---- -x = {} -x[(1, "abc", {})] = 1 ### not hashable ---- -# augmented assignment - -def f(): - x = 1 - x += 1 - assert_eq(x, 2) - x *= 3 - assert_eq(x, 6) -f() - ---- -# effects of evaluating LHS occur only once - -count = [0] # count[0] is the number of calls to f - -def f(): - count[0] += 1 - return count[0] - -x = [1, 2, 3] -x[f()] += 1 - -assert_eq(x, [1, 3, 3]) # sole call to f returned 1 -assert_eq(count[0], 1) # f was called only once - ---- -# Order of evaluation. - -calls = [] - -def f(name, result): - calls.append(name) - return result - -# The right side is evaluated before the left in an ordinary assignment. -calls.clear() -f("array", [0])[f("index", 0)] = f("rhs", 0) -assert_eq(calls, ["rhs", "array", "index"]) - -calls.clear() -f("lhs1", [0])[0], f("lhs2", [0])[0] = f("rhs1", 0), f("rhs2", 0) -assert_eq(calls, ["rhs1", "rhs2", "lhs1", "lhs2"]) - -# Left side is evaluated first (and only once) in an augmented assignment. -calls.clear() -f("array", [0])[f("index", 0)] += f("addend", 1) -assert_eq(calls, ["array", "index", "addend"]) - ---- -# global referenced before assignment - -def f(): - return g ### Variable was not found - -f() - -g = 1 - ---- -printok = [False] - -def use_before_def(): - print(x) - -use_before_def() ### Variable was not found - ---- -x = [1] -x.extend([2]) # ok - -def f(): - x += [4] ### Local variable referenced before assignment - -f() - ---- - -z += 3 ### Augmented assignment is a binding and not allowed on a global variable - ---- -# It's ok to define a global that shadows a built-in. - - -assert_eq(type(list), "function") -list = [] -assert_eq(type(list), "list") - ---- -# Is that something specific to Go? Is it actually in the java implem too? - -# All 'in x' expressions in a comprehension are evaluated -# in the comprehension's lexical block. -# -# By contrast, Python yields [[1, 2], [1, 2]] because it evaluates -# the first 'in x' in the environment enclosing the comprehension. -x = [[1, 2]] -_ = [x for x in x for y in x] # # # "local variable x referenced before assignment" - ---- -# A comprehension establishes a single new lexical block, -# not one per 'for' clause. -x = [1, 2] -_ = [x for _ in [3] for x in x] # # # "local variable x referenced before assignment" - ---- -# assign singleton sequence to 1-tuple -(x,) = (1,) -assert_eq(x, 1) -(y,) = [1] -assert_eq(y, 1) - -# assign 1-tuple to variable -z = (1,) -assert_eq(type(z), "tuple") -assert_eq(len(z), 1) -assert_eq(z[0], 1) - ---- -# destucturing assigmnent in a for loop. -def f(): - res = [] - for (x, y), z in [(["a", "b"], 3), (["c", "d"], 4)]: - res.append((x, y, z)) - return res -assert_eq(f(), [("a", "b", 3), ("c", "d", 4)]) - -def g(): - a = {} - for i, a[i] in [("one", 1), ("two", 2)]: - pass - return a -assert_eq(g(), {"one": 1, "two": 2}) diff --git a/starlark-test/tests/go-testcases/bool.sky b/starlark-test/tests/go-testcases/bool.sky deleted file mode 100644 index 32cdf807..00000000 --- a/starlark-test/tests/go-testcases/bool.sky +++ /dev/null @@ -1,44 +0,0 @@ -# Tests of Skylark 'bool' - -# truth -assert_(True) -assert_(not False) - -# bool conversion -assert_eq([bool(), bool(1), bool(0), bool("hello"), bool("")], - [False, True, False, True, False]) - -# comparison -assert_(None == None) -assert_(None != False) -assert_(None != True) -assert_eq(1==1, True) -assert_eq(1==2, False) -assert_(False == False) -assert_(True == True) - -# ordered comparison -assert_(False < True) -assert_(False <= True) -assert_(False <= False) -assert_(True > False) -assert_(True >= False) -assert_(True >= True) - -# conditional expression -assert_eq(1 if 3 > 2 else 0, 1) -assert_eq(1 if "foo" else 0, 1) -assert_eq(1 if "" else 0, 0) - -# short-circuit evaluation of 'and' and 'or': -# 'or' yields the first true operand, or the last if all are false. -assert_eq(0 or "" or [] or 0, 0) -assert_eq(0 or "" or [] or 123 or 1//0, 123) ---- -0 or "" or [] or 0 or 1//0 ### division by zero ---- -# 'and' yields the first false operand, or the last if all are true. -assert_eq(1 and "a" and [1] and 123, 123) -assert_eq(1 and "a" and [1] and 0 and 1//0, 0) ---- -1 and "a" and [1] and 123 and 1//0 ### division by zero diff --git a/starlark-test/tests/go-testcases/builtins.sky b/starlark-test/tests/go-testcases/builtins.sky deleted file mode 100644 index 74a68632..00000000 --- a/starlark-test/tests/go-testcases/builtins.sky +++ /dev/null @@ -1,207 +0,0 @@ -# Tests of Skylark built-in functions - -# len -assert_eq(len([1, 2, 3]), 3) -assert_eq(len((1, 2, 3)), 3) -assert_eq(len({1: 2}), 1) ---- -len(1) ### len() not supported for type int ---- - -# and, or -assert_eq(123 or "foo", 123) -assert_eq(0 or "foo", "foo") -assert_eq(123 and "foo", "foo") -assert_eq(0 and "foo", 0) -none = None -_1 = none and none[0] # rhs is not evaluated -_2 = (not none) or none[0] # rhs is not evaluated - -# any, all -assert_(all([])) -assert_(all([1, True, "foo"])) -assert_(not all([1, True, ""])) -assert_(not any([])) -assert_(any([0, False, "foo"])) -assert_(not any([0, False, ""])) - -# in -assert_(3 in [1, 2, 3]) -assert_(4 not in [1, 2, 3]) -assert_(3 in (1, 2, 3)) -assert_(4 not in (1, 2, 3)) ---- -3 in "foo" ### Type of parameters mismatch ---- -assert_(123 in {123: ""}) -assert_(456 not in {123:""}) ---- -assert_([] not in {123: ""}) ### Not hashable ---- - -# sorted -assert_eq(sorted([42, 123, 3]), [3, 42, 123]) -assert_eq(sorted([42, 123, 3], reverse=True), [123, 42, 3]) -assert_eq(sorted(["wiz", "foo", "bar"]), ["bar", "foo", "wiz"]) -assert_eq(sorted(["wiz", "foo", "bar"], reverse=True), ["wiz", "foo", "bar"]) -sorted([1, 2, None, 3]) ### compare not supported for types ---- -sorted([1, "one"]) ### compare not supported for types ---- -# custom key function -assert_eq(sorted(["two", "three", "four"], key=len), - ["two", "four", "three"]) -assert_eq(sorted(["two", "three", "four"], key=len, reverse=True), - ["three", "four", "two"]) -sorted([1, 2, 3], key=None) ### call() not supported for type NoneType ---- -# sort is stable -pairs = [(4, 0), (3, 1), (4, 2), (2, 3), (3, 4), (1, 5), (2, 6), (3, 7)] -def first(t): return t[0] -assert_eq(sorted(pairs, key=first), - [(1, 5), - (2, 3), (2, 6), - (3, 1), (3, 4), (3, 7), - (4, 0), (4, 2)]) - -sorted(1) ### The type 'int' is not iterable - ---- - -# reversed -assert_eq(reversed([1, 144, 81, 16]), [16, 81, 144, 1]) - -# dict -assert_eq(dict([(1, 2), (3, 4)]), {1: 2, 3: 4}) -assert_eq(dict([(1, 2), (3, 4)], foo="bar"), {1: 2, 3: 4, "foo": "bar"}) -assert_eq(dict({1:2, 3:4}), {1: 2, 3: 4}) -assert_eq(dict({1:2, 3:4}.items()), {1: 2, 3: 4}) - -# range -assert_eq("range", type(range(10))) -assert_eq("range(10)", str(range(0, 10, 1))) -assert_eq("range(1, 10)", str(range(1, 10))) -assert_eq(range(0, 5, 10), range(0, 5, 11)) -assert_eq("range(0, 10, -1)", str(range(0, 10, -1))) ---- -{range(10): 10} ### CV04 ---- -assert_(bool(range(1, 2))) -assert_(not(range(2, 1))) # an empty range is false -assert_eq([x*x for x in range(5)], [0, 1, 4, 9, 16]) -assert_eq(list(range(5)), [0, 1, 2, 3, 4]) -assert_eq(list(range(-5)), []) -assert_eq(list(range(2, 5)), [2, 3, 4]) -assert_eq(list(range(5, 2)), []) -assert_eq(list(range(-2, -5)), []) -assert_eq(list(range(-5, -2)), [-5, -4, -3]) -assert_eq(list(range(2, 10, 3)), [2, 5, 8]) -assert_eq(list(range(10, 2, -3)), [10, 7, 4]) -assert_eq(list(range(-2, -10, -3)), [-2, -5, -8]) -assert_eq(list(range(-10, -2, 3)), [-10, -7, -4]) -assert_eq(list(range(10, 2, -1)), [10, 9, 8, 7, 6, 5, 4, 3]) -assert_eq(list(range(5)[1:]), [1, 2, 3, 4]) -assert_eq(len(range(5)[1:]), 4) -assert_eq(list(range(5)[:2]), [0, 1]) -assert_eq(list(range(10)[1:]), [1, 2, 3, 4, 5, 6, 7, 8, 9]) -assert_eq(list(range(10)[1:9:2]), [1, 3, 5, 7]) -assert_eq(list(range(10)[1:10:2]), [1, 3, 5, 7, 9]) -assert_eq(list(range(10)[1:11:2]), [1, 3, 5, 7, 9]) -assert_eq(list(range(10)[::-2]), [9, 7, 5, 3, 1]) -assert_eq(list(range(0, 10, 2)[::2]), [0, 4, 8]) -assert_eq(list(range(0, 10, 2)[::-2]), [8, 4, 0]) -# Works fine in Starlark Rust -# assert.fails(lambda: range(3000000000), "3000000000 out of range") # signed 32-bit values only -assert_eq(len(range(0x7fffffff)), 0x7fffffff) # O(1) -# Two ranges compare equal if they denote the same sequence: -assert_eq(range(0), range(2, 1, 3)) # [] -assert_eq(range(0, 3, 2), range(0, 4, 2)) # [0, 2] -assert_(range(1, 10) != range(2, 10)) -# Should not be comparable -# assert.fails(lambda: range(0) < range(0), "range < range not implemented") -# in -assert_(1 in range(3)) ---- -assert_(True not in range(3)) # The go implementation does not support that but python returns True. ---- -assert_("one" not in range(10)) ---- -range(0, 0, 2)[:][0] ### Index out of bound ---- - -# list -assert_eq(sorted(list({"a": 1, "b": 2})), ['a', 'b']) - -# min, max -assert_eq(min(5, -2, 1, 7, 3), -2) -assert_eq(max(5, -2, 1, 7, 3), 7) -assert_eq(min([5, -2, 1, 7, 3]), -2) -assert_eq(min("one", "two", "three", "four"), "four") -assert_eq(max("one", "two", "three", "four"), "two") ---- -min() ### min() expect a non empty iterable ---- -min(1) ### not iterable ---- -min([]) ### empty ---- -def m1(x): return x*x -def m2(x): return -x -assert_eq(min(5, -2, 1, 7, 3, key=m1), 1) # min absolute value -assert_eq(min(5, -2, 1, 7, 3, key=m2), 7) # min negated value - -# enumerate -assert_eq(enumerate([False, True, None], 42), [(42, False), (43, True), (44, None)]) - -# zip -assert_eq(zip(), []) -assert_eq(zip([]), []) -assert_eq(zip([1, 2, 3]), [(1,), (2,), (3,)]) -z1 = [1] -assert_eq(zip(z1), [(1,)]) -z1.append(2) -assert_eq(zip(z1), [(1,), (2,)]) -zip(z1, 1) ### 'int' is not iterable -z1.append(3) - -# dir for builtin_function_or_method -assert_eq(dir(None), []) -assert_eq(dir({})[:3], ["clear", "get", "items"]) # etc -assert_eq(dir(1), []) -assert_eq(dir([])[:3], ["append", "clear", "extend"]) # etc - -# hasattr, getattr, dir -# hasfields is an application-defined type defined in eval_test.go. -## We would need to define that type to works, so ignore that test for now -## TODO: Add hasfields for those test -# hf = hasfields() -# assert_eq(dir(hf), []) -# assert_(not hasattr(hf, "x")) -# --- -# getattr(hf, "x") # # # no .x field or method -# --- -# assert_eq(getattr(hf, "x", 42), 42) -# hf.x = 1 -# assert_(hasattr(hf, "x")) -# assert_eq(getattr(hf, "x"), 1) -# assert_eq(hf.x, 1) -# hf.x = 2 -# assert_eq(getattr(hf, "x"), 2) -# assert_eq(hf.x, 2) -# built-in types can have attributes (methods) too. -myset = dict() -assert_eq(dir(myset), ["union"]) -assert_(hasattr(myset, "union")) -assert_(not hasattr(myset, "onion")) -assert_eq(str(getattr(myset, "union")), "") ---- -myset = dict() -getattr(myset, "onion") ### .onion not supported for type dict ---- -myset = dict() -assert_eq(getattr(myset, "onion", 42), 42) - -# repr -assert_eq(repr(1), "1") -assert_eq(repr("x"), '"x"') -assert_eq(repr(["x", 1]), '["x", 1]') diff --git a/starlark-test/tests/go-testcases/control.sky b/starlark-test/tests/go-testcases/control.sky deleted file mode 100644 index 99758ee7..00000000 --- a/starlark-test/tests/go-testcases/control.sky +++ /dev/null @@ -1,52 +0,0 @@ -# Tests of Skylark control flow - -def controlflow(): - # elif - x = 0 - if True: - x=1 - elif False: - fail("else of true") - else: - fail("else of else of true") - assert_(x) - - x = 0 - if False: - fail("then of false") - elif True: - x = 1 - else: - fail("else of true") - assert_(x) - - x = 0 - if False: - fail("then of false") - elif False: - fail("then of false") - else: - x = 1 - assert_(x) -controlflow() - -def loops(): - y = "" - for x in [1, 2, 3, 4, 5]: - if x == 2: - continue - if x == 4: - break - y = y + str(x) - return y -assert_eq(loops(), "13") - -# return -g = 123 -def f(x): - for g in (1, 2, 3): - if g == x: - return g -assert_eq(f(2), 2) -assert_eq(f(4), None) # falling off end => return None -assert_eq(g, 123) # unchanged by local use of g in function diff --git a/starlark-test/tests/go-testcases/dict.sky b/starlark-test/tests/go-testcases/dict.sky deleted file mode 100644 index ddbbfdec..00000000 --- a/starlark-test/tests/go-testcases/dict.sky +++ /dev/null @@ -1,207 +0,0 @@ -# Tests of Skylark 'dict' - -# literals -assert_eq({}, {}) -assert_eq({"a": 1}, {"a": 1}) -assert_eq({"a": 1,}, {"a": 1}) - -# truth -assert_({False: False}) -assert_(not {}) - -# dict + dict (undocumented and deprecated; see b/36360157). -assert_eq({"a": 1, "b": 2} + {"a": 3, "c": 4}, {"a": 3, "b": 2, "c": 4}) - -# dict comprehension -assert_eq({x: x*x for x in range(3)}, {0: 0, 1: 1, 2: 4}) - -# dict.pop -x6 = {"a": 1, "b": 2} -assert_eq(x6.pop("a"), 1) -assert_eq(str(x6), '{"b": 2}') -x6.pop("c") ### not found ---- -x6 = {"b": 2} -assert_eq(x6.pop("c", 3), 3) -assert_eq(x6.pop("b"), 2) -assert_eq(len(x6), 0) - ---- -# dict.popitem -x7 = {"a": 1, "b": 2} -assert_eq([x7.popitem(), x7.popitem()], [("a", 1), ("b", 2)]) -assert_eq(len(x7), 0) -x7.popitem() ### empty dict ---- - -# dict.keys, dict.values -x8 = {"a": 1, "b": 2} -assert_eq(x8.keys(), ["a", "b"]) -assert_eq(x8.values(), [1, 2]) - -# equality -assert_eq({"a": 1, "b": 2}, {"a": 1, "b": 2}) -assert_eq({"a": 1, "b": 2,}, {"a": 1, "b": 2}) -assert_eq({"a": 1, "b": 2}, {"b": 2, "a": 1}) - -# insertion order is preserved -assert_eq(dict([("a", 0), ("b", 1), ("c", 2), ("b", 3)]).keys(), ["a", "b", "c"]) -assert_eq(dict([("b", 0), ("a", 1), ("b", 2), ("c", 3)]).keys(), ["b", "a", "c"]) -assert_eq(dict([("b", 0), ("a", 1), ("b", 2), ("c", 3)])["b"], 2) -# ...even after rehashing (which currently occurs after key 'i'): -small = dict([("a", 0), ("b", 1), ("c", 2)]) -small.update([("d", 4), ("e", 5), ("f", 6), ("g", 7), ("h", 8), ("i", 9), ("j", 10), ("k", 11)]) -assert_eq(small.keys(), ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"]) - ---- -# duplicate keys are not permitted in dictionary expressions (see b/35698444). -{"aa": 1, "bb": 2, "cc": 3, "bb": 4} # TODO(dmarting): duplicate key: "bb" ---- - -# index -x9 = {} -x9["a"] ### Not found ---- -x9 = {} -x9["a"] = 1 -assert_eq(x9["a"], 1) -assert_eq(x9, {"a": 1}) ---- -x9 = {} -x9[[]] = 2 ### Not hashable ---- - -x9a = {} -x9a[1, 2] = 3 # unparenthesized tuple is allowed here -assert_eq(x9a.keys()[0], (1, 2)) - -# dict.get -x9 = {"a": 1} -assert_eq(x9.get("a"), 1) -assert_eq(x9.get("b"), None) -assert_eq(x9.get("a", 2), 1) -assert_eq(x9.get("b", 2), 2) - ---- -# dict.clear -x11 = {"a": 1} -assert_("a" in x10) -assert_eq(x10["a"], 1) -x10.clear() -assert_("a" not in x10) -x10["a"] ### Not found ---- - -# dict.setdefault -x12 = {"a": 1} -assert_eq(x12.setdefault("a"), 1) -assert_eq(x12["a"], 1) -assert_eq(x12.setdefault("b"), None) -assert_eq(x12["b"], None) -assert_eq(x12.setdefault("c", 2), 2) -assert_eq(x12["c"], 2) -assert_eq(x12.setdefault("c", 3), 2) -assert_eq(x12["c"], 2) - -# dict.update -x13 = {"a": 1} -x13.update(a=2, b=3) -assert_eq(x13, {"a": 2, "b": 3}) -x13.update([("b", 4), ("c", 5)]) -assert_eq(x13, {"a": 2, "b": 4, "c": 5}) -x13.update({"c": 6, "d": 7}) -assert_eq(x13, {"a": 2, "b": 4, "c": 6, "d": 7}) - -# dict as a sequence -# -# for loop -x14 = {1:2, 3:4} -def keys(dict): - keys = [] - for k in dict: keys.append(k) - return keys -assert_eq(keys(x14), [1, 3]) -# -# comprehension -assert_eq([x for x in x14], [1, 3]) -# -# varargs -def varargs(*args): return args -x15 = {"one": 1} -assert_eq(varargs(*x15), ["one"]) - -# kwargs parameter does not alias the **kwargs dict -def kwargs(**kwargs): return kwargs -x16 = kwargs(**x15) -assert_eq(x16, x15) -x15["two"] = 2 # mutate -assert_(x16 != x15) ---- -# iterator invalidation -def iterator1(): - dict = {1:1, 2:1} - for k in dict: - dict[2*k] = dict[k] -iterator1() ### Cannot mutate an iterable while iterating ---- -def iterator2(): - dict = {1:1, 2:1} - for k in dict: - dict.pop(k) -iterator2() ### Cannot mutate an iterable while iterating ---- -def f(d): - d[3] = 3 - -def iterator3(): - dict = {1:1, 2:1} - _ = [f(dict) for x in dict] -iterator3() ### Cannot mutate an iterable while iterating ---- -# This assignment is not a modification-during-iteration: -# the sequence x should be completely iterated before -# the assignment occurs. -def f(): - x = {1:2, 2:4} - a, x[0] = x - # There are two possible outcomes, depending on iteration order: - if not (a == 1 and x == {0: 2, 1: 2, 2: 4} or - a == 2 and x == {0: 1, 1: 2, 2: 4}): - fail("unexpected results: a=%s x=%s" % (a, x)) -f() - -# Regression test for a bug in hashtable.delete -def test_delete(): - d = {} - - # delete tail first - d["one"] = 1 - d["two"] = 2 - assert_eq(str(d), '{"one": 1, "two": 2}') - d.pop("two") - assert_eq(str(d), '{"one": 1}') - d.pop("one") - assert_eq(str(d), '{}') - - # delete head first - d["one"] = 1 - d["two"] = 2 - assert_eq(str(d), '{"one": 1, "two": 2}') - d.pop("one") - assert_eq(str(d), '{"two": 2}') - d.pop("two") - assert_eq(str(d), '{}') - - # delete middle - d["one"] = 1 - d["two"] = 2 - d["three"] = 3 - assert_eq(str(d), '{"one": 1, "two": 2, "three": 3}') - d.pop("two") - assert_eq(str(d), '{"one": 1, "three": 3}') - d.pop("three") - assert_eq(str(d), '{"one": 1}') - d.pop("one") - assert_eq(str(d), '{}') - -test_delete() diff --git a/starlark-test/tests/go-testcases/function.sky b/starlark-test/tests/go-testcases/function.sky deleted file mode 100644 index 0637bde7..00000000 --- a/starlark-test/tests/go-testcases/function.sky +++ /dev/null @@ -1,105 +0,0 @@ -# Tests of Skylark 'function' - -# TODO(adonovan): -# - add some introspection functions for looking at function values -# and test that functions have correct position, free vars, names of locals, etc. -# - move the hard-coded tests of parameter passing from eval_test.go to here. - ---- -# recursion detection, simple -def fib(x): - if x < 2: - return x - return fib(x-2) + fib(x-1) -fib(10) ### Recursive call ---- - -# call of function not through its name -# (regression test for parsing suffixes of primary expressions) -hf = {"x": [len]} -assert_eq(hf["x"][0]("abc"), 3) -def fone(): return 1 -def f(): - return fone -assert_eq(f()(), 1) -assert_eq(["abc"][0][0].upper(), "A") - -# functions may be recursively defined, -# so long as they don't dynamically recur. -calls = [] -def yin(x): - calls.append("yin") - if x: - yang(False) - -def yang(x): - calls.append("yang") - if x: - yin(False) - -yin(True) -assert_eq(calls, ["yin", "yang"]) - -calls.clear() -yang(True) -assert_eq(calls, ["yang", "yin"]) - - ---- -# Default values of function parameters are mutable. - -def f(x=[0]): - return x - -assert_eq(f(), [0]) - -f().append(1) -assert_eq(f(), [0, 1]) - ---- -# Missing parameters are correctly reported -# in functions of more than 64 parameters. - -def f(a, b, c, d, e, f, g, h, - i, j, k, l, m, n, o, p, - q, r, s, t, u, v, w, x, - y, z, A, B, C, D, E, F, - G, H, I, J, K, L, M, N, - O, P, Q, R, S, T, U, V, - W, X, Y, Z, aa, bb, cc, dd, - ee, ff, gg, hh, ii, jj, kk, ll, - mm): - pass - -f( - 1, 2, 3, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64) ### not enough parameters ---- - -def f(a, b, c, d, e, f, g, h, - i, j, k, l, m, n, o, p, - q, r, s, t, u, v, w, x, - y, z, A, B, C, D, E, F, - G, H, I, J, K, L, M, N, - O, P, Q, R, S, T, U, V, - W, X, Y, Z, aa, bb, cc, dd, - ee, ff, gg, hh, ii, jj, kk, ll, - mm): - pass - -f( - 1, 2, 3, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, - mm = 100) ### Extraneous parameter diff --git a/starlark-test/tests/go-testcases/int.sky b/starlark-test/tests/go-testcases/int.sky deleted file mode 100644 index 60322bb4..00000000 --- a/starlark-test/tests/go-testcases/int.sky +++ /dev/null @@ -1,169 +0,0 @@ -# Tests of Skylark 'int' - -# basic arithmetic -assert_eq(0 - 1, -1) -assert_eq(0 + 1, +1) -assert_eq(1 + 1, 2) -assert_eq(5 + 7, 12) -assert_eq(5 * 7, 35) -assert_eq(5 - 7, -2) - -# truth -assert_(123) -assert_(-1) -assert_(not 0) - -# floored division -# (For real division, see float.sky.) -assert_eq(100 // 7, 14) -assert_eq(100 // -7, -15) -assert_eq(-100 // 7, -15) # NB: different from Go/Java -assert_eq(-100 // -7, 14) # NB: different from Go/Java -assert_eq(98 // 7, 14) -assert_eq(98 // -7, -14) -assert_eq(-98 // 7, -14) -assert_eq(-98 // -7, 14) - -# remainder -assert_eq(100 % 7, 2) -assert_eq(100 % -7, -5) # NB: different from Go/Java -assert_eq(-100 % 7, 5) # NB: different from Go/Java -assert_eq(-100 % -7, -2) -assert_eq(98 % 7, 0) -assert_eq(98 % -7, 0) -assert_eq(-98 % 7, 0) -assert_eq(-98 % -7, 0) - -# compound assignment -def compound(): - x = 1 - x += 1 - assert_eq(x, 2) - x -= 3 - assert_eq(x, -1) - x *= 39 - assert_eq(x, -39) - x //= 4 - assert_eq(x, -10) - x /= -2 - assert_eq(x, 5) - x %= 3 - assert_eq(x, 2) - -compound() - -# int conversion -# See float.sky for float-to-int conversions. -# We follow Python 3 here, but I can't see the method in its madness. -# int from bool/int/float -assert_eq(int(False), 0) -assert_eq(int(True), 1) -assert_eq(int(3), 3) ---- -int(3, base=10) ### non-string with explicit base ---- -int(True, 10) ### non-string with explicit base ---- -# int from string, base implicitly 10 -# The original number here was taking 72bits, Does go version has int that large? -assert_eq(int("10000000000000000"), 100000000 * 100000000) -assert_eq(int("-10000000000000000"), -100000000 * 100000000) -assert_eq(int("123"), 123) -assert_eq(int("-123"), -123) -assert_eq(int("0123"), 123) # not octal -assert_eq(int("-0123"), -123) -# The followup conversions are considered errors in the go version -# but are actually ok according to the java implementation. The go -# version use base = 10 by default whereas the java implementation -# use base = 0 by default. -assert_eq(int("0x12"), 0x12) -assert_eq(int("0o123"), 0o123) -assert_eq(int("-0x12"), -0x12) -assert_eq(int("-0o123"), -0o123) -# int from string, explicit base -assert_eq(int("11", base=9), 10) -assert_eq(int("-11", base=9), -10) -assert_eq(int("10011", base=2), 19) -assert_eq(int("-10011", base=2), -19) -assert_eq(int("123", 8), 83) -assert_eq(int("-123", 8), -83) -assert_eq(int("0123", 8), 83) # redundant zeros permitted -assert_eq(int("-0123", 8), -83) -assert_eq(int("00123", 8), 83) -assert_eq(int("-00123", 8), -83) -assert_eq(int("0o123", 8), 83) -assert_eq(int("-0o123", 8), -83) -assert_eq(int("123", 7), 66) # 1*7*7 + 2*7 + 3 -assert_eq(int("-123", 7), -66) -assert_eq(int("12", 16), 18) -assert_eq(int("-12", 16), -18) -assert_eq(int("0x12", 16), 18) -assert_eq(int("-0x12", 16), -18) -assert_eq(0x10000001 * 0x10000001, 0x100000020000001) -assert_eq(int("1010", 2), 10) -assert_eq(int("111111101", 2), 509) -assert_eq(int("0b0101", 0), 5) -assert_eq(int("0b00000", 0), 0) -assert_eq(11111111 * 11111111, 123456787654321) ---- -int("0x123", 8) ### Not a base 8 integer ---- -int("-0x123", 8) ### Not a base 8 integer ---- -int("0o123", 16) ### Not a base 16 integer ---- -int("-0o123", 16) ### Not a base 16 integer ---- -int("0x110", 2) ### Not a base 2 integer ---- -# int from string, auto detect base -assert_eq(int("123", 0), 123) -assert_eq(int("+123", 0), +123) -assert_eq(int("-123", 0), -123) -assert_eq(int("0x12", 0), 18) -assert_eq(int("+0x12", 0), +18) -assert_eq(int("-0x12", 0), -18) -assert_eq(int("0o123", 0), 83) -assert_eq(int("+0o123", 0), +83) -assert_eq(int("-0o123", 0), -83) -# The go implementation doesn't support the following, why? -assert_eq(int("0123", 0), 123) -assert_eq(int("-0123", 0), -123) - -# comparisons -# TODO(adonovan): test: < > == != etc -assert_(-2 < -1) -assert_(-1 < 0) -assert_(0 < 1) -assert_(1 < 2) -assert_(2 >= 2) -assert_(2 > 1) -assert_(1 >= 1) -assert_(1 > 0) -assert_(0 >= 0) -assert_(0 > -1) -assert_(-1 >= -1) -assert_(-1 > -2) - -# precision -maxint64 = 9223372036854775807 # = 2^63 -minint64 = -maxint64 - 1 # = -2^64 -assert_eq(str(maxint64), "9223372036854775807") -# Overflow -# assert_eq(str(maxint64+1), "9223372036854775808") -assert_eq(str(minint64), "-9223372036854775808") -# Overflow -# assert_eq(str(minint64-1), "-9223372036854775809") -# Overflow -# assert_eq(str(minint64 * minint64), "85070591730234615865843651857942052864") - -# string formatting -assert_eq("%o %x %d" % (0o755, 0xDEADBEEF, 42), "755 deadbeef 42") -nums = [-95, -1, 0, +1, +95] -assert_eq(' '.join(["%o" % x for x in nums]), "-137 -1 0 1 137") -assert_eq(' '.join(["%d" % x for x in nums]), "-95 -1 0 1 95") -assert_eq(' '.join(["%i" % x for x in nums]), "-95 -1 0 1 95") -assert_eq(' '.join(["%x" % x for x in nums]), "-5f -1 0 1 5f") -assert_eq(' '.join(["%X" % x for x in nums]), "-5F -1 0 1 5F") -assert_eq("%o %x %d" % (123, 123, 123), "173 7b 123") -assert_eq("%d" % True, "1") diff --git a/starlark-test/tests/go-testcases/list.sky b/starlark-test/tests/go-testcases/list.sky deleted file mode 100644 index fac359ad..00000000 --- a/starlark-test/tests/go-testcases/list.sky +++ /dev/null @@ -1,253 +0,0 @@ -# Tests of Skylark 'list' - -# literals -assert_eq([], []) -assert_eq([1], [1]) -assert_eq([1,], [1]) -assert_eq([1, 2], [1, 2]) -assert_([1, 2, 3] != [1, 2, 4]) - -# truth -assert_([0]) -assert_(not []) - -# indexing, x[i] ---- -abc = list("abc".split_codepoints()) -abc[-4] ### out of bound ---- -abc = list("abc".split_codepoints()) -assert_eq(abc[-3], "a") -assert_eq(abc[-2], "b") -assert_eq(abc[-1], "c") -assert_eq(abc[0], "a") -assert_eq(abc[1], "b") -assert_eq(abc[2], "c") ---- -abc = list("abc".split_codepoints()) -abc[3] ### out of bound ---- -# x[i] = ... -x3 = [0, 1, 2] -x3[1] = 2 -x3[2] += 3 -assert_eq(x3, [0, 2, 5]) -def f2(): x3[3] = 4 -f2() ### Out of bound ---- - -# list + list -assert_eq([1, 2, 3] + [3, 4, 5], [1, 2, 3, 3, 4, 5]) ---- -[1, 2] + (3, 4) ### Type of parameters mismatch ---- -(1, 2) + [3, 4] ### Type of parameters mismatch ---- - -# list * int, int * list -abc = list("abc".split_codepoints()) -assert_eq(abc * 0, []) -assert_eq(abc * -1, []) -assert_eq(abc * 1, abc) -assert_eq(abc * 3, ["a", "b", "c", "a", "b", "c", "a", "b", "c"]) -assert_eq(0 * abc, []) -assert_eq(-1 * abc, []) -assert_eq(1 * abc, abc) -assert_eq(3 * abc, ["a", "b", "c", "a", "b", "c", "a", "b", "c"]) - -# list comprehensions -assert_eq([2 * x for x in [1, 2, 3]], [2, 4, 6]) -assert_eq([2 * x for x in [1, 2, 3] if x > 1], [4, 6]) -assert_eq([(x, y) for x in [1, 2] for y in [3, 4]], - [(1, 3), (1, 4), (2, 3), (2, 4)]) -assert_eq([(x, y) for x in [1, 2] if x == 2 for y in [3, 4]],[(2, 3), (2, 4)]) -assert_eq([2 * x for x in (1, 2, 3)], [2, 4, 6]) -assert_eq([x for x in "abc".split_codepoints()], ["a", "b", "c"]) -assert_eq([x for x in {"a": 1, "b": 2}], ["a", "b"]) -assert_eq([(y, x) for x, y in {1: 2, 3: 4}.items()], [(2, 1), (4, 3)]) -# corner cases of parsing: -assert_eq([x for x in range(12) if x%2 == 0 if x%3 == 0], [0, 6]) - -# list function -assert_eq(list(), []) -assert_eq(list("ab".split_codepoints()), ["a", "b"]) - -# A list comprehension defines a separate lexical block, -# whether at top-level... -a = [1, 2] -b = [a for a in [3, 4]] -assert_eq(a, [1, 2]) -assert_eq(b, [3, 4]) -# ...or local to a function. -def listcompblock(): - c = [1, 2] - d = [c for c in [3, 4]] - assert_eq(c, [1, 2]) - assert_eq(d, [3, 4]) -listcompblock() - -# list.pop -x4 = [1,2,3,4,5] -assert_eq(x4.pop(), 5) -assert_eq(x4, [1,2,3,4]) -assert_eq(x4.pop(1), 2) -assert_eq(x4, [1,3,4]) -assert_eq(x4.pop(0), 1) -assert_eq(x4, [3,4]) - -# TODO(adonovan): test uses of list as sequence -# (for loop, comprehension, library functions). - -# x += y for lists is equivalent to x.extend(y). -# y may be a sequence. -# TODO: Test that side-effects of 'x' occur only once. -def list_extend(): - a = [1, 2, 3] - b = a - a = a + [4] # creates a new list - assert_eq(a, [1, 2, 3, 4]) - assert_eq(b, [1, 2, 3]) # b is unchanged - - a = [1, 2, 3] - b = a - # += <=> a = a + [4], hence creating a new list. The go implem does it differently... - a += [4] # create a new list - assert_eq(a, [1, 2, 3, 4]) - assert_eq(b, [1, 2, 3]) # b is unchanged - - a = [1, 2, 3] - b = a - a.extend([4]) # updates existing list - assert_eq(a, [1, 2, 3, 4]) - assert_eq(b, [1, 2, 3, 4]) # alias observes the change -list_extend() - ---- -# Unlike list.extend(iterable), list += iterable makes its LHS name local. -a_list = [] -def f4(): - a_list += [1] ### Local variable referenced before assignment -f4() ---- -# list += -def f5(): - x = [] - x += 1 -f5() ### Type of parameters mismatch ---- - -# append -x5 = [1, 2, 3] -x5.append(4) -x5.append("abc") -assert_eq(x5, [1, 2, 3, 4, "abc"]) - -# extend -x5a = [1, 2, 3] -x5a.extend("abc".split_codepoints()) # string -x5a.extend((True, False)) # tuple -assert_eq(x5a, [1, 2, 3, "a", "b", "c", True, False]) - -# list.insert -def insert_at(index): - x = list(range(3)) - x.insert(index, 42) - return x -assert_eq(insert_at(-99), [42, 0, 1, 2]) -assert_eq(insert_at(-2), [0, 42, 1, 2]) -assert_eq(insert_at(-1), [0, 1, 42, 2]) -assert_eq(insert_at( 0), [42, 0, 1, 2]) -assert_eq(insert_at( 1), [0, 42, 1, 2]) -assert_eq(insert_at( 2), [0, 1, 42, 2]) -assert_eq(insert_at( 3), [0, 1, 2, 42]) -assert_eq(insert_at( 4), [0, 1, 2, 42]) - -# list.remove ---- -def remove(v): - x = [3, 1, 4, 1] - x.remove(v) - return x -assert_eq(remove(3), [1, 4, 1]) -assert_eq(remove(1), [3, 4, 1]) -assert_eq(remove(4), [3, 1, 1]) -[3, 1, 4, 1].remove(42) ### Not found ---- - -# list.index -bananas = list("bananas".split_codepoints()) -assert_eq(bananas.index('a'), 1) # bAnanas -bananas.index('d') ### not found ---- -bananas = list("bananas".split_codepoints()) -# start -assert_eq(bananas.index('a', -1000), 1) # bAnanas -assert_eq(bananas.index('a', 0), 1) # bAnanas -assert_eq(bananas.index('a', 1), 1) # bAnanas -assert_eq(bananas.index('a', 2), 3) # banAnas -assert_eq(bananas.index('a', 3), 3) # banAnas -assert_eq(bananas.index('b', 0), 0) # Bananas -assert_eq(bananas.index('n', -3), 4) # banaNas -assert_eq(bananas.index('s', -2), 6) # bananaS -# start, end -assert_eq(bananas.index('s', -1000, 7), 6) # bananaS ---- -bananas = list("bananas".split_codepoints()) -bananas.index('n', -2) ### not found ---- -bananas = list("bananas".split_codepoints()) -bananas.index('b', 1) ### not found ---- -bananas = list("bananas".split_codepoints()) -bananas.index('s', -1000, 6) ### not found ---- -bananas = list("bananas".split_codepoints()) -bananas.index('d', -1000, 1000) ### not found ---- - -# slicing, x[i:j:k] -bananas = list("bananas".split_codepoints()) -assert_eq(bananas[6::-2], list("snnb".split_codepoints())) -assert_eq(bananas[5::-2], list("aaa".split_codepoints())) -assert_eq(bananas[4::-2], list("nnb".split_codepoints())) -assert_eq(bananas[99::-2], list("snnb".split_codepoints())) -assert_eq(bananas[100::-2], list("snnb".split_codepoints())) -# TODO(adonovan): many more tests ---- -# iterator invalidation -def iterator1(): - list = [0, 1, 2] - for x in list: - list[x] = 2 * x - return list -# Updating elements of a list while iterating is allowed in the go -# implementation but following the specification -# (https://github.com/bazelbuild/starlark/blob/815aed90b552fa70adca4dc18d73082fae83b538/design.md#no-mutation-during-iteration) -# a immutable list element like a number does not match the definition of -# "deep content" so it is disallowed in our implementation. -iterator1() ### Cannot mutate an iterable while iterating ---- -def iterator2(): - list = [0, 1, 2] - for x in list: - list.remove(x) -iterator2() ### Cannot mutate an iterable while iterating ---- -def iterator3(): - list = [0, 1, 2] - for x in list: - list.append(3) -iterator3() ### Cannot mutate an iterable while iterating ---- -def iterator4(): - list = [0, 1, 2] - for x in list: - list.extend([3, 4]) -iterator4() ### Cannot mutate an iterable while iterating ---- -def fff(x): - x.append(4) -def iterator5(): - list = [1, 2, 3] - _ = [fff(list) for x in list] -iterator5() ### Cannot mutate an iterable while iterating diff --git a/starlark-test/tests/go-testcases/misc.sky b/starlark-test/tests/go-testcases/misc.sky deleted file mode 100644 index 5ef706d1..00000000 --- a/starlark-test/tests/go-testcases/misc.sky +++ /dev/null @@ -1,89 +0,0 @@ -# Miscellaneous tests of Skylark evaluation. -# This is a "chunked" file: each "---" effectively starts a new file. - -# TODO(adonovan): move these tests into more appropriate files. -# TODO(adonovan): test coverage: -# - stmts: pass; if cond fail; += and failures; -# for x fail; for x not iterable; for can't assign; for -# error in loop body -# - subassign fail -# - x[i]=x fail in both operands; frozen x; list index not int; boundscheck -# - x.f = ... -# - failure in list expr [...]; tuple expr; dict expr (bad key) -# - cond expr semantics; failures -# - x[i] failures in both args; dict and iterator key and range checks; -# unhandled operand types -# - +: list/list, int/int, string/string, tuple+tuple, dict/dict; -# - * and ** calls: various errors -# - call of non-function -# - slice x[ijk] -# - comprehension: unhashable dict key; -# scope of vars (local and toplevel); noniterable for clause -# - unknown unary op -# - ordering of values -# - freeze, transitivity of its effect. -# - add an application-defined type to the environment so we can test it. -# - even more: -# -# eval -# pass statement -# assign to tuple l-value -- illegal -# assign to list l-value -- illegal -# assign to field -# tuple + tuple -# call with *args, **kwargs -# slice with step -# tuple slice -# interpolate with %c, %% - -def lam(): None - -# Ordered comparisons require values of the same type. -None < False ### compare not supported for types NoneType and bool ---- -False < list ### compare not supported for types bool and function ---- -list < {} ### compare not supported for types function and dict ---- -0 < [] ### compare not supported for types int and list ---- -[] < "" ### compare not supported for types list and str ---- -"" < () ### compare not supported for types string and tuple - ---- -# cyclic data structures - -cyclic = [1, 2, 3] # list cycle -cyclic[1] = cyclic ### Unsupported recursive data structure ---- -cyclic2 = [1, 2, 3] -cyclic2[1] = cyclic2 ### Unsupported recursive data structure ---- - -cyclic3 = [1, [2, 3]] # list-list cycle -cyclic3[1][0] = cyclic3 ### Unsupported recursive data structure ---- -cyclic4 = {"x": 1} -cyclic4["x"] = cyclic4 ### Unsupported recursive data structure ---- -cyclic5 = [0, {"x": 1}] # list-dict cycle -cyclic5[1]["x"] = cyclic5 ### Unsupported recursive data structure ---- -cyclic6 = [0, {"x": 1}] -cyclic6[1]["x"] = cyclic6 ### Unsupported recursive data structure ---- -# was a parse error: -assert_eq(("ababab"[2:]).replace("b", "c"), "acac") -assert_eq("ababab"[2:].replace("b", "c"), "acac") - -# test parsing of line continuation, at toplevel and in expression. -three = 1 + \ - 2 -assert_eq(1 + \ - 2, three) - ---- -# A regression test for error position information. - -_ = {}.get(1, default=2) ### Extraneous parameter diff --git a/starlark-test/tests/go-testcases/string.sky b/starlark-test/tests/go-testcases/string.sky deleted file mode 100644 index 948512dd..00000000 --- a/starlark-test/tests/go-testcases/string.sky +++ /dev/null @@ -1,392 +0,0 @@ -# Tests of Skylark 'string' - -# raw string literals: -assert_eq(r'a\bc', "a\\bc") - -# truth -assert_("abc") -assert_("\0") -assert_(not "") - -# str + str -assert_eq("a"+"b"+"c", "abc") - -# str * int, int * str -assert_eq("abc" * 0, "") -assert_eq("abc" * -1, "") -assert_eq("abc" * 1, "abc") -assert_eq("abc" * 5, "abcabcabcabcabc") -assert_eq(0 * "abc", "") -assert_eq(-1 * "abc", "") -assert_eq(1 * "abc", "abc") -assert_eq(5 * "abc", "abcabcabcabcabc") - -# len -# Note that the go implen return a number of bytes and python3 a number o char... -assert_eq(len("Hello, 世界!"), 10) # The go implem return 14 (bytes) which is not consistent with python 3. -assert_eq(len("𐐷"), 1) # U+10437 has a 4-byte UTF-8 encoding (and a 2-code UTF-16 encoding) - -# chr & ord -assert_eq(chr(65), "A") # 1-byte UTF-8 encoding -assert_eq(chr(1049), "Й") # 2-byte UTF-8 encoding -assert_eq(chr(0x1F63F), "😿") # 4-byte UTF-8 encoding ---- -chr(-1) ### not a valid UTF-8 codepoint ---- -chr(0x110000) ### not a valid UTF-8 codepoint ---- -assert_eq(ord("A"), 65) -assert_eq(ord("Й"), 1049) -assert_eq(ord("😿"), 0x1F63F) -assert_eq(ord("Й"), 1049) ---- -ord("abc") ### not a one character string ---- -ord("") ### not a one character string ---- -ord("😿"[1:]) ### not a one character string ---- - -# string.codepoints -assert_eq(type("abcЙ😿".codepoints()), "list") -assert_eq(str("abcЙ😿".codepoints()), '[97, 98, 99, 1049, 128575]') -assert_eq(list("abcЙ😿".codepoints()), [97, 98, 99, 1049, 128575]) -assert_eq(list("".codepoints()), []) - -# string.split_codepoints -assert_eq(type("abcЙ😿".split_codepoints()), "list") -assert_eq(str("abcЙ😿".split_codepoints()), '["a", "b", "c", "Й", "😿"]') -assert_eq(list("abcЙ😿".split_codepoints()), ["a", "b", "c", "Й", "😿"]) -assert_eq(list("".split_codepoints()), []) - -# string.elems -assert_eq(type("abcЙ😿".elems()), "list") -assert_eq(str("abcЙ😿".elems()), '[97, 98, 99, 208, 153, 240, 159, 152, 191]') -assert_eq(list("abcЙ😿".elems()), [97, 98, 99, 208, 153, 240, 159, 152, 191]) -assert_eq(list("".elems()), []) - -# indexing, x[i] -assert_eq("Hello, 世界!"[0], "H") -assert_eq("Hello, 世界!"[7], "世") -assert_eq("Hello, 世界!"[9], "!") ---- -"abc"[-4] ### out of bound ---- -assert_eq("abc"[-3], "a") -assert_eq("abc"[-2], "b") -assert_eq("abc"[-1], "c") -assert_eq("abc"[0], "a") -assert_eq("abc"[1], "b") -assert_eq("abc"[2], "c") ---- -"abc"[4] ### out of bound ---- - -# x[i] = ... -x2 = "abc" -def f(): x2[1] = 'B' -f() ### [] = not supported for types string and int ---- - -# slicing, x[i:j] -assert_eq("abc"[:], "abc") -assert_eq("abc"[-4:], "abc") -assert_eq("abc"[-3:], "abc") -assert_eq("abc"[-2:], "bc") -assert_eq("abc"[-1:], "c") -assert_eq("abc"[0:], "abc") -assert_eq("abc"[1:], "bc") -assert_eq("abc"[2:], "c") -assert_eq("abc"[3:], "") -assert_eq("abc"[4:], "") -assert_eq("abc"[:-4], "") -assert_eq("abc"[:-3], "") -assert_eq("abc"[:-2], "a") -assert_eq("abc"[:-1], "ab") -assert_eq("abc"[:0], "") -assert_eq("abc"[:1], "a") -assert_eq("abc"[:2], "ab") -assert_eq("abc"[:3], "abc") -assert_eq("abc"[:4], "abc") -assert_eq("abc"[1:2], "b") -assert_eq("abc"[2:1], "") -# non-unit strides -assert_eq("abcd"[0:4:1], "abcd") -assert_eq("abcd"[::2], "ac") -assert_eq("abcd"[1::2], "bd") -assert_eq("abcd"[4:0:-1], "dcb") -assert_eq("banana"[7::-2], "aaa") -assert_eq("banana"[6::-2], "aaa") -assert_eq("banana"[5::-2], "aaa") -assert_eq("banana"[4::-2], "nnb") -assert_eq("banana"[::-1], "ananab") -assert_eq("banana"[None:None:-2], "aaa") ---- -"banana"[:"":] ### Type of parameters mismatch ---- -"banana"[:"":True] ### Type of parameters mismatch ---- - -# in, not in -assert_("oo" in "food") -assert_("ox" not in "food") -assert_("" in "food") -assert_("" in "") ---- -1 in "" ### Type of parameters mismatch ---- -"" in 1 ### in not supported for types string and int ---- - -# ==, != -assert_eq("hello", "he"+"llo") -assert_("hello" != "Hello") - -# TODO(adonovan): ordered comparisons - -# string % tuple formatting -assert_eq("A %d %x Z" % (123, 456), "A 123 1c8 Z") -assert_eq("A %(foo)d %(bar)s Z" % {"foo": 123, "bar":"hi"}, "A 123 hi Z") -assert_eq("%s %r" % ("hi", "hi"), 'hi "hi"') -assert_eq("%%d %d" % 1, "%d 1") ---- -"%d %d" % 1 ### The type 'int' is not iterable ---- -"%d %d" % (1, 2, 3) ### too many arguments for format string ---- -# %c -assert_eq("%c" % 65, "A") -assert_eq("%c" % 0x3b1, "α") -assert_eq("%c" % "A", "A") -assert_eq("%c" % "α", "α") ---- -"%c" % "abc" ### requires a single-character string ---- -"%c" % 10000000 ### Invalid codepoint ---- -"%c" % -1 ### Invalid codepoint ---- -# TODO(adonovan): more tests - -# str.format -assert_eq("a{}b".format(123), "a123b") -assert_eq("a{}b{}c{}d{}".format(1, 2, 3, 4), "a1b2c3d4") -assert_eq("a{{b".format(), "a{b") -assert_eq("a}}b".format(), "a}b") -assert_eq("a{{b}}c".format(), "a{b}c") -assert_eq("a{x}b{y}c{}".format(1, x=2, y=3), "a2b3c1") ---- -"a{z}b".format(x=1) ### key not found ---- -"{-1}".format(1) ### key not found ---- -"{-0}".format(1) ### key not found ---- -'{0,1} and {1}'.format(1, 2) ### invalid character ',' ---- -"a{123}b".format() ### index out of bound ---- -"a{}b{}c".format(1) ### Not enough parameters ---- -assert_eq("a{010}b".format(0,1,2,3,4,5,6,7,8,9,10), "a10b") # index is decimal ---- -"a{}b{1}c".format(1, 2) ### Mixed manual and automatic field numbering ---- -assert_eq("a{!s}c".format("b"), "abc") -assert_eq("a{!r}c".format("b"), r'a"b"c') -assert_eq("a{x!r}c".format(x='b'), r'a"b"c') ---- -"{x!}".format(x=1) ### Invalid format string specifier ---- -"{x!:}".format(x=1) ### Invalid format string specifier ---- -'{a.b}'.format(1) ### Invalid character ---- -'{a[0]}'.format(1) ### Invalid character ---- -'{ {} }'.format(1) ### unmatched '{' ---- -'{{}'.format(1) ### standalone '}' ---- -'{}}'.format(1) ### standalone '}' ---- -'}}{'.format(1) ### unmatched '{' ---- -'}{{'.format(1) ### standalone '}' ---- - -# str.split, str.rsplit -assert_eq("a.b.c.d".split("."), ["a", "b", "c", "d"]) -assert_eq("a.b.c.d".rsplit("."), ["a", "b", "c", "d"]) -assert_eq("a.b.c.d".split(".", -1), ["a", "b", "c", "d"]) -assert_eq("a.b.c.d".rsplit(".", -1), ["a", "b", "c", "d"]) -assert_eq("a.b.c.d".split(".", 0), ["a.b.c.d"]) -assert_eq("a.b.c.d".rsplit(".", 0), ["a.b.c.d"]) -assert_eq("a.b.c.d".split(".", 1), ["a", "b.c.d"]) -assert_eq("a.b.c.d".rsplit(".", 1), ["a.b.c", "d"]) -assert_eq("a.b.c.d".split(".", 2), ["a", "b", "c.d"]) -assert_eq("a.b.c.d".rsplit(".", 2), ["a.b", "c", "d"]) - -# {,r}split on white space: -assert_eq(" a bc\n def \t ghi".split(), ["a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".split(None), ["a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".split(None, 0), ["a bc\n def \t ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 0), [" a bc\n def \t ghi"]) -assert_eq(" a bc\n def \t ghi".split(None, 1), ["a", "bc\n def \t ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 1), [" a bc\n def", "ghi"]) -assert_eq(" a bc\n def \t ghi".split(None, 2), ["a", "bc", "def \t ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 2), [" a bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".split(None, 3), ["a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 3), [" a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".split(None, 4), ["a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 4), ["a", "bc", "def", "ghi"]) -assert_eq(" a bc\n def \t ghi".rsplit(None, 5), ["a", "bc", "def", "ghi"]) - -assert_eq(" a bc\n def \t ghi ".split(None, 0), ["a bc\n def \t ghi "]) -assert_eq(" a bc\n def \t ghi ".rsplit(None, 0), [" a bc\n def \t ghi"]) -assert_eq(" a bc\n def \t ghi ".split(None, 1), ["a", "bc\n def \t ghi "]) -assert_eq(" a bc\n def \t ghi ".rsplit(None, 1), [" a bc\n def", "ghi"]) - -# Observe the algorithmic difference when splitting on spaces versus other delimiters. -assert_eq('--aa--bb--cc--'.split('-', 0), ['--aa--bb--cc--']) # contrast this -assert_eq(' aa bb cc '.split(None, 0), ['aa bb cc ']) # with this -assert_eq('--aa--bb--cc--'.rsplit('-', 0), ['--aa--bb--cc--']) # ditto this -assert_eq(' aa bb cc '.rsplit(None, 0), [' aa bb cc']) # and this -# -assert_eq('--aa--bb--cc--'.split('-', 1), ['', '-aa--bb--cc--']) -assert_eq('--aa--bb--cc--'.rsplit('-', 1), ['--aa--bb--cc-', '']) -assert_eq(' aa bb cc '.split(None, 1), ['aa', 'bb cc ']) -assert_eq(' aa bb cc '.rsplit(None, 1), [' aa bb', 'cc']) -# -assert_eq('--aa--bb--cc--'.split('-', -1), ['', '', 'aa', '', 'bb', '', 'cc', '', '']) -assert_eq('--aa--bb--cc--'.rsplit('-', -1), ['', '', 'aa', '', 'bb', '', 'cc', '', '']) -assert_eq(' aa bb cc '.split(None, -1), ['aa', 'bb', 'cc']) -assert_eq(' aa bb cc '.rsplit(None, -1), ['aa', 'bb', 'cc']) - -assert_eq("localhost:80".rsplit(":", 1)[-1], "80") - -# str.splitlines -assert_eq("\nabc\ndef".splitlines(), ["", "abc", "def"]) -assert_eq("\nabc\ndef\n".splitlines(), ["", "abc", "def"]) -assert_eq("\nabc\ndef".splitlines(True), ["\n", "abc\n", "def"]) -assert_eq("\nabc\ndef\n".splitlines(True), ["\n", "abc\n", "def\n"]) - -# str.{,l,r}strip -assert_eq(" \tfoo\n ".strip(), "foo") -assert_eq(" \tfoo\n ".lstrip(), "foo\n ") -assert_eq(" \tfoo\n ".rstrip(), " \tfoo") -# This syntax is undocumented... -# assert_eq(" \tfoo\n ".strip(""), "foo") -# assert_eq(" \tfoo\n ".lstrip(""), "foo\n ") -# assert_eq(" \tfoo\n ".rstrip(""), " \tfoo") -# assert_eq("blah.h".strip("b.h"), "la") -# assert_eq("blah.h".lstrip("b.h"), "lah.h") -# assert_eq("blah.h".rstrip("b.h"), "bla") - -# str.count -assert_eq("banana".count("a"), 3) -assert_eq("banana".count("a", 2), 2) -assert_eq("banana".count("a", -4, -2), 1) -assert_eq("banana".count("a", 1, 4), 2) -assert_eq("banana".count("a", 0, -100), 0) - -# str.{starts,ends}with -assert_("foo".endswith("oo")) -assert_(not "foo".endswith("x")) -assert_("foo".startswith("fo")) -assert_(not "foo".startswith("x")) ---- -"foo".startswith(1) ### type int while expected string ---- - -# str.replace -assert_eq("banana".replace("a", "o", 1), "bonana") -assert_eq("banana".replace("a", "o"), "bonono") -# TODO(adonovan): more tests - -# str.{,r}find -assert_eq("foofoo".find("oo"), 1) -assert_eq("foofoo".find("ox"), -1) -assert_eq("foofoo".find("oo", 2), 4) -assert_eq("foofoo".rfind("oo"), 4) -assert_eq("foofoo".rfind("ox"), -1) -assert_eq("foofoo".rfind("oo", 1, 4), 1) -assert_eq("foofoo".find(""), 0) -assert_eq("foofoo".rfind(""), 6) - -# str.{,r}partition -assert_eq("foo/bar/wiz".partition("/"), ("foo", "/", "bar/wiz")) -assert_eq("foo/bar/wiz".rpartition("/"), ("foo/bar", "/", "wiz")) -assert_eq("foo/bar/wiz".partition("."), ("foo/bar/wiz", "", "")) -assert_eq("foo/bar/wiz".rpartition("."), ("", "", "foo/bar/wiz")) ---- -"foo/bar/wiz".partition("") ### empty separator ---- -"foo/bar/wiz".rpartition("") ### empty separator ---- - -assert_eq('?'.join(["foo", "a/b/c.go".rpartition("/")[0]]), 'foo?a/b') - -# str.is{alpha,...} -def test_predicates(): - predicates = ["alnum", "alpha", "digit", "lower", "space", "title", "upper"] - table = { - "Hello, World!": "title", - "hello, world!": "lower", - "base64": "alnum lower", - "HAL-9000": "upper", - "Catch-22": "title", - "": "", - "\n\t\r": "space", - "abc": "alnum alpha lower", - "ABC": "alnum alpha upper", - "123": "alnum digit", - } - for str, want in table.items(): - got = ' '.join([name for name in predicates if getattr(str, "is"+name)()]) - if got != want: - fail("%r matched [%s], want [%s]" % (str, want, got)) -test_predicates() - -# Strings are not iterable. -# ok -assert_eq(len("abc"), 3) # len -assert_("a" in "abc") # str in str -assert_eq("abc"[1], "b") # indexing ---- -def args(*args): return args -args(*"abc") ### *args is not iterable ---- -list("abc") ### Not iterable ---- -tuple("abc") ### Not iterable ---- -enumerate("ab") ### Not iterable ---- -sorted("abc") ### Not iterable ---- -[].extend("bc") ### type ---- -",".join("abc") ### Not iterable ---- -dict(["ab"]) ### Non-pair element ---- -# The Java implementation does not correctly reject the following cases: -# (See Google Issue b/34385336) -# not ok -def for_string(): - for x in "abc": - pass -for_string() ### Not iterable ---- -[x for x in "abc"] ### Not iterable ---- -all("abc") ### Not iterable ---- -any("abc") ### Not iterable ---- -reversed("abc") ### Not iterable ---- -zip("ab", "cd") ### Not iterable - -# TODO(adonovan): tests for: {,r}index join {capitalize,lower,title,upper} diff --git a/starlark-test/tests/go-testcases/struct.sky b/starlark-test/tests/go-testcases/struct.sky deleted file mode 100644 index 63501de4..00000000 --- a/starlark-test/tests/go-testcases/struct.sky +++ /dev/null @@ -1,20 +0,0 @@ -# Tests of Starlark 'struct' extension. -# This is not a standard feature and the Go and Starlark APIs may yet change. - -assert_(str(struct), "") - -# struct is a constructor for "unbranded" structs. -s = struct(host = "localhost", port = 80) -assert_(s, s) -assert_(s, struct(host = "localhost", port = 80)) -assert_(s != struct(host = "localhost", port = 81)) -assert_(type(s), "struct") -assert_(str(s), 'struct(host = "localhost", port = 80)') -assert_(s.host, "localhost") -assert_(s.port, 80) -s.protocol ### protocol ---- -s = struct(host = "localhost", port = 80) -assert_(dir(s), ["host", "port"]) - -# The rest are tests for `gensym` which is not implemented diff --git a/starlark-test/tests/go-testcases/tuple.sky b/starlark-test/tests/go-testcases/tuple.sky deleted file mode 100644 index f30011f7..00000000 --- a/starlark-test/tests/go-testcases/tuple.sky +++ /dev/null @@ -1,53 +0,0 @@ -# Tests of Skylark 'tuple' - -# literal -assert_eq((), ()) -assert_eq((1), 1) -assert_eq((1,), (1,)) -assert_((1) != (1,)) -assert_eq((1, 2), (1, 2)) -assert_eq((1, 2, 3, 4, 5), (1, 2, 3, 4, 5)) -assert_((1, 2, 3) != (1, 2, 4)) - -# truth -assert_((False,)) -assert_((False, False)) -assert_(not ()) - -# indexing, x[i] -assert_eq(("a", "b")[0], "a") -assert_eq(("a", "b")[1], "b") - -# slicing, x[i:j] -assert_eq("abcd"[0:4:1], "abcd") -assert_eq("abcd"[::2], "ac") -assert_eq("abcd"[1::2], "bd") -assert_eq("abcd"[4:0:-1], "dcb") -banana = tuple("banana".split_codepoints()) -assert_eq(banana[7::-2], tuple("aaa".split_codepoints())) -assert_eq(banana[6::-2], tuple("aaa".split_codepoints())) -assert_eq(banana[5::-2], tuple("aaa".split_codepoints())) -assert_eq(banana[4::-2], tuple("nnb".split_codepoints())) - -# tuple -assert_eq(tuple(), ()) -assert_eq(tuple("abc".split_codepoints()), ("a", "b", "c")) -assert_eq(tuple(["a", "b", "c"]), ("a", "b", "c")) -assert_eq(tuple([1]), (1,)) ---- -tuple(1) ### type 'int' is not iterable ---- - -# tuple * int, int * tuple -abc = tuple("abc".split_codepoints()) -assert_eq(abc * 0, ()) -assert_eq(abc * -1, ()) -assert_eq(abc * 1, abc) -assert_eq(abc * 3, ("a", "b", "c", "a", "b", "c", "a", "b", "c")) -assert_eq(0 * abc, ()) -assert_eq(-1 * abc, ()) -assert_eq(1 * abc, abc) -assert_eq(3 * abc, ("a", "b", "c", "a", "b", "c", "a", "b", "c")) - -# TODO(adonovan): test use of tuple as sequence -# (for loop, comprehension, library functions). diff --git a/starlark-test/tests/go_conformance_tests.rs b/starlark-test/tests/go_conformance_tests.rs deleted file mode 100644 index 42fc8e07..00000000 --- a/starlark-test/tests/go_conformance_tests.rs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use starlark_test::do_conformance_test; - -include!(concat!(env!("OUT_DIR"), "/tests/go-testcases.rs")); diff --git a/starlark-test/tests/java-testcases/README.md b/starlark-test/tests/java-testcases/README.md deleted file mode 100644 index a89deaae..00000000 --- a/starlark-test/tests/java-testcases/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Conformance test cases - -Those files are coming from the Bazel repository at https://github.com/bazelbuild/bazel in the -[`src/test/skylark`](https://github.com/bazelbuild/bazel/tree/9b96c0b37da7dfbb6f590c3da65413d921b83eb5/src/test/skylark) -directory. diff --git a/starlark-test/tests/java-testcases/all_any.sky b/starlark-test/tests/java-testcases/all_any.sky deleted file mode 100644 index 3194b42a..00000000 --- a/starlark-test/tests/java-testcases/all_any.sky +++ /dev/null @@ -1,46 +0,0 @@ -# All with empty value -assert_eq(all(''.split_codepoints()), True) -assert_eq(all([]), True) - -# All with list -assert_eq(all('test'.split_codepoints()), True) -assert_eq(all([False]), False) -assert_eq(all([True, False]), False) -assert_eq(all([False, False]), False) -assert_eq(all([False, True]), False) -assert_eq(all(['', True]), False) -assert_eq(all([0, True]), False) -assert_eq(all([[], True]), False) -assert_eq(all([True, 't', 1]), True) - -# All with dict -assert_eq(all({1 : None}), True) -assert_eq(all({None : 1}), False) - -# Any with empty value -assert_eq(any(''.split_codepoints()), False) -assert_eq(any([]), False) - -# Any with list -assert_eq(any('test'.split_codepoints()), True) -assert_eq(any([False]), False) -assert_eq(any([0]), False) -assert_eq(any(['']), False) -assert_eq(any([[]]), False) -assert_eq(any([True, False]), True) -assert_eq(any([False, False]), False) -assert_eq(any([False, '', 0]), False) -assert_eq(any([False, '', 42]), True) - -# Any with dict -assert_eq(any({1 : None, '' : None}), True) -assert_eq(any({None : 1, '' : 2}), False) - ---- -all(None) ### type 'NoneType' is not iterable ---- -any(None) ### type 'NoneType' is not iterable ---- -any(1) ### type 'int' is not iterable ---- -all(1) ### type 'int' is not iterable diff --git a/starlark-test/tests/java-testcases/and_or_not.sky b/starlark-test/tests/java-testcases/and_or_not.sky deleted file mode 100644 index aa4ab784..00000000 --- a/starlark-test/tests/java-testcases/and_or_not.sky +++ /dev/null @@ -1,42 +0,0 @@ -assert_eq(8 or 9, 8) -assert_eq(0 or 9, 9) -assert_eq(8 and 9, 9) -assert_eq(0 and 9, 0) - -assert_eq(1 and 2 or 3, 2) -assert_eq(0 and 2 or 3, 3) -assert_eq(1 and 0 or 3, 3) - -assert_eq(1 or 2 and 3, 1) -assert_eq(0 or 2 and 3, 3) -assert_eq(0 or 0 and 3, 0) -assert_eq(1 or 0 and 3, 1) - -assert_eq(None and 1, None) -assert_eq("" or 9, 9) -assert_eq("abc" or 9, "abc") - -# check that fail() is not evaluated -assert_eq(8 or fail("do not execute"), 8) -assert_eq(0 and fail("do not execute"), 0) - -assert_eq(not 1, False) -assert_eq(not "", True) - -assert_eq(not 0 + 0, True) -assert_eq(not 2 - 1, False) - -assert_eq(not (0 and 0), True) -assert_eq(not (1 or 0), False) - -assert_eq(0 and not 0, 0) -assert_eq(not 0 and 0, 0) - -assert_eq(1 and not 0, True) -assert_eq(not 0 or 0, True) - -assert_eq(not 1 or 0, 0) -assert_eq(not 1 or 1, 1) - -assert_eq(not [], True) -assert_eq(not {"a": 1}, False) diff --git a/starlark-test/tests/java-testcases/equality.sky b/starlark-test/tests/java-testcases/equality.sky deleted file mode 100644 index 89a6009c..00000000 --- a/starlark-test/tests/java-testcases/equality.sky +++ /dev/null @@ -1,69 +0,0 @@ -# == operator -assert_eq(1 == 1, True) -assert_eq(1 == 2, False) -assert_eq('hello' == 'hel' + 'lo', True) -assert_eq('hello' == 'bye', False) -assert_eq(None == None, True) -assert_eq([1, 2] == [1, 2], True) -assert_eq([1, 2] == [2, 1], False) -assert_eq({'a': 1, 'b': 2} == {'b': 2, 'a': 1}, True) -assert_eq({'a': 1, 'b': 2} == {'a': 1}, False) -assert_eq({'a': 1, 'b': 2} == {'a': 1, 'b': 2, 'c': 3}, False) -assert_eq({'a': 1, 'b': 2} == {'a': 1, 'b': 3}, False) - -# != operator -assert_eq(1 != 1, False) -assert_eq(1 != 2, True) -assert_eq('hello' != 'hel' + 'lo', False) -assert_eq('hello' != 'bye', True) -assert_eq([1, 2] != [1, 2], False) -assert_eq([1, 2] != [2, 1], True) -assert_eq({'a': 1, 'b': 2} != {'b': 2, 'a': 1}, False) -assert_eq({'a': 1, 'b': 2} != {'a': 1}, True) -assert_eq({'a': 1, 'b': 2} != {'a': 1, 'b': 2, 'c': 3}, True) -assert_eq({'a': 1, 'b': 2} != {'a': 1, 'b': 3}, True); - -# equality precedence -assert_eq(1 + 3 == 2 + 2, True) -assert_eq(not 1 == 2, True) -assert_eq(not 1 != 2, False) -assert_eq(2 and 3 == 3 or 1, True) -assert_eq(2 or 3 == 3 and 1, 2); - -# < operator -assert_eq(1 <= 1, True) -assert_eq(1 < 1, False) -assert_eq('a' <= 'b', True) -assert_eq('c' < 'a', False); - -# <= and < operators -assert_eq(1 <= 1, True) -assert_eq(1 < 1, False) -assert_eq('a' <= 'b', True) -assert_eq('c' < 'a', False); - -# >= and > operators -assert_eq(1 >= 1, True) -assert_eq(1 > 1, False) -assert_eq('a' >= 'b', False) -assert_eq('c' > 'a', True); - -# list/tuple comparison -assert_eq([] < [1], True) -assert_eq([1] < [1, 1], True) -assert_eq([1, 1] < [1, 2], True) -assert_eq([1, 2] < [1, 2, 3], True) -assert_eq([1, 2, 3] <= [1, 2, 3], True) - -assert_eq(['a', 'b'] > ['a'], True) -assert_eq(['a', 'b'] >= ['a'], True) -assert_eq(['a', 'b'] < ['a'], False) -assert_eq(['a', 'b'] <= ['a'], False) - -assert_eq(('a', 'b') > ('a', 'b'), False) -assert_eq(('a', 'b') >= ('a', 'b'), True) -assert_eq(('a', 'b') < ('a', 'b'), False) -assert_eq(('a', 'b') <= ('a', 'b'), True) - -assert_eq([[1, 1]] > [[1, 1], []], False) -assert_eq([[1, 1]] < [[1, 1], []], True) diff --git a/starlark-test/tests/java-testcases/int.sky b/starlark-test/tests/java-testcases/int.sky deleted file mode 100644 index 25624bbb..00000000 --- a/starlark-test/tests/java-testcases/int.sky +++ /dev/null @@ -1,66 +0,0 @@ -# Tests of Skylark 'int' - -# basic arithmetic -assert_eq(0 - 1, -1) -assert_eq(1 + 1, 2) -assert_eq(5 + 7, 12) -assert_eq(5 * 7, 35) -assert_eq(5 - 7, -2) - -# truth -assert_(123) -assert_(-1) -assert_(not 0) - -# comparisons -assert_(5 > 2) -assert_(2 + 1 == 3) -assert_(2 + 1 >= 3) -assert_(not (2 + 1 > 3)) -assert_(2 + 2 <= 5) -assert_(not (2 + 1 < 3)) - -# division -assert_eq(100 // 7, 14) -assert_eq(100 // -7, -15) -assert_eq(-100 // 7, -15) # NB: different from Go / Java -assert_eq(-100 // -7, 14) # NB: different from Go / Java -assert_eq(98 // 7, 14) -assert_eq(98 // -7, -14) -assert_eq(-98 // 7, -14) -assert_eq(-98 // -7, 14) - -# remainder -assert_eq(100 % 7, 2) -assert_eq(100 % -7, -5) # NB: different from Go / Java -assert_eq(-100 % 7, 5) # NB: different from Go / Java -assert_eq(-100 % -7, -2) -assert_eq(98 % 7, 0) -assert_eq(98 % -7, 0) -assert_eq(-98 % 7, 0) -assert_eq(-98 % -7, 0) - -# precedence -assert_eq(5 - 7 * 2 + 3, -6) -assert_eq(4 * 5 // 2 + 5 // 2 * 4, 18) - -# compound assignment -def compound(): - x = 1 - x += 1 - assert_eq(x, 2) - x -= 3 - assert_eq(x, -1) - x *= 10 - assert_eq(x, -10) - x /= -2 - assert_eq(x, 5) - x %= 3 - assert_eq(x, 2) - -compound() - ---- -1 // 0 ### divide by zero ---- -1 % 0 ### divide by zero diff --git a/starlark-test/tests/java-testcases/int_constructor.sky b/starlark-test/tests/java-testcases/int_constructor.sky deleted file mode 100644 index 5e01857b..00000000 --- a/starlark-test/tests/java-testcases/int_constructor.sky +++ /dev/null @@ -1,44 +0,0 @@ -assert_eq(int('1'), 1) -assert_eq(int('-1234'), -1234) -assert_eq(int(42), 42) -assert_eq(int(-1), -1) -assert_eq(int(True), 1) -assert_eq(int(False), 0) -assert_eq(int('11', 2), 3) -assert_eq(int('11', 9), 10) -assert_eq(int('AF', 16), 175) -assert_eq(int('11', 36), 37) -assert_eq(int('az', 36), 395) -assert_eq(int('11', 10), 11) -assert_eq(int('11', 0), 11) -assert_eq(int('0b11', 0), 3) -assert_eq(int('0B11', 2), 3) -assert_eq(int('0o11', 0), 9) -assert_eq(int('0O11', 8), 9) -assert_eq(int('0XFF', 0), 255) -assert_eq(int('0xFF', 16), 255) - ---- -int('1.5') ### invalid digit found in string ---- -int('ab') ### invalid digit found in string ---- -int(None) ### Cannot int() on type NoneType ---- -int('123', 3) ### invalid digit found in string ---- -int('FF', 15) ### invalid digit found in string ---- -int('123', -1) ### int() base must be >= 2 and <= 36 ---- -int('123', 1) ### int() base must be >= 2 and <= 36 ---- -int('123', 37) ### int() base must be >= 2 and <= 36 ---- -int('0xFF', 8) ### invalid digit found in string ---- -int(True, 2) ### int() cannot convert non-string with explicit base ---- -int(1, 2) ### int() cannot convert non-string with explicit base ---- -int(True, 10) ### int() cannot convert non-string with explicit base diff --git a/starlark-test/tests/java-testcases/list_slices.sky b/starlark-test/tests/java-testcases/list_slices.sky deleted file mode 100644 index 6e928be5..00000000 --- a/starlark-test/tests/java-testcases/list_slices.sky +++ /dev/null @@ -1,62 +0,0 @@ -# Without step -assert_eq([0, 1, 2, 3][0:-1], [0, 1, 2]) -assert_eq([0, 1, 2, 3, 4, 5][2:4], [2, 3]) -assert_eq([0, 1, 2, 3, 4, 5][-2:-1], [4]) -assert_eq([][1:2], []) -assert_eq([0, 1, 2, 3][-10:10], [0, 1, 2, 3]) - -# With step -assert_eq([1, 2, 3, 4, 5][::1], [1, 2, 3, 4, 5]) -assert_eq([1, 2, 3, 4, 5][1::1], [2, 3, 4, 5]) -assert_eq([1, 2, 3, 4, 5][:2:1], [1, 2]) -assert_eq([1, 2, 3, 4, 5][1:3:1], [2, 3]) -assert_eq([1, 2, 3, 4, 5][-4:-2:1], [2, 3]) -assert_eq([1, 2, 3, 4, 5][-10:10:1], [1, 2, 3, 4, 5]) -assert_eq([1, 2, 3, 4, 5][::42], [1]) -assert_eq([][::1], []) -assert_eq([][::-1], []) -assert_eq([1, 2, 3, 4, 5, 6, 7][::3], [1, 4, 7]) -assert_eq([1, 2, 3, 4, 5, 6, 7, 8, 9][1:7:3], [2, 5]) -assert_eq([1, 2, 3][3:1:1], []) -assert_eq([1, 2, 3][1:3:-1], []) - -# Negative step -assert_eq([1, 2, 3, 4, 5][::-1], [5, 4, 3, 2, 1]) -assert_eq([1, 2, 3, 4, 5][4::-1], [5, 4, 3, 2, 1]) -assert_eq([1, 2, 3, 4, 5][:0:-1], [5, 4, 3, 2]) -assert_eq([1, 2, 3, 4, 5][3:1:-1], [4, 3]) -assert_eq([1, 2, 3, 4, 5][::-2], [5, 3, 1]) -assert_eq([1, 2, 3, 4, 5][::-10], [5]) - -# None -assert_eq([1, 2, 3][None:None:None], [1, 2, 3]) -assert_eq([1, 2, 3][None:None], [1, 2, 3]) -assert_eq([1, 2, 3][None:2:None], [1, 2]) - -# Tuples -assert_eq(()[1:2], ()) -assert_eq(()[::1], ()) -assert_eq((0, 1, 2, 3)[0:-1], (0, 1, 2)) -assert_eq((0, 1, 2, 3, 4, 5)[2:4], (2, 3)) -assert_eq((0, 1, 2, 3)[-10:10], (0, 1, 2, 3)) -assert_eq((1, 2, 3, 4, 5)[-10:10:1], (1, 2, 3, 4, 5)) -assert_eq((1, 2, 3, 4, 5, 6, 7, 8, 9)[1:7:3], (2, 5)) -assert_eq((1, 2, 3, 4, 5)[::-1], (5, 4, 3, 2, 1)) -assert_eq((1, 2, 3, 4, 5)[3:1:-1], (4, 3)) -assert_eq((1, 2, 3, 4, 5)[::-2], (5, 3, 1)) -assert_eq((1, 2, 3, 4, 5)[::-10], (5,)) - ---- -'123'['a'::] ### Type of parameters mismatch ---- -'123'[:'b':] ### Type of parameters mismatch ---- -(1, 2, 3)[1::0] ### Index 0 is out of bound ---- -[1, 2, 3][::0] ### Index 0 is out of bound ---- -[1, 2, 3][1::0] ### Index 0 is out of bound ---- -[1, 2, 3][:3:0] ### Index 0 is out of bound ---- -[1, 2, 3][1:3:0] ### Index 0 is out of bound diff --git a/starlark-test/tests/java-testcases/min_max.sky b/starlark-test/tests/java-testcases/min_max.sky deleted file mode 100644 index 7996c2ff..00000000 --- a/starlark-test/tests/java-testcases/min_max.sky +++ /dev/null @@ -1,47 +0,0 @@ -# min / max - -assert_eq(min("abcdefxyz".split_codepoints()), "a") -assert_eq(min("test", "xyz"), "test") - -assert_eq(min([4, 5], [1]), [1]) -assert_eq(min([1, 2], [3]), [1, 2]) -assert_eq(min([1, 5], [1, 6], [2, 4], [0, 6]), [0, 6]) -assert_eq(min([-1]), -1) -assert_eq(min([5, 2, 3]), 2) -assert_eq(min({1: 2, -1 : 3}), -1) -assert_eq(min({2: None}), 2) -assert_eq(min(-1, 2), -1) -assert_eq(min(5, 2, 3), 2) -assert_eq(min(1, 1, 1, 1, 1, 1), 1) -assert_eq(min([1, 1, 1, 1, 1, 1]), 1) - -assert_eq(max("abcdefxyz".split_codepoints()), "z") -assert_eq(max("test", "xyz"), "xyz") -assert_eq(max("test", "xyz"), "xyz") -assert_eq(max([1, 2], [5]), [5]) -assert_eq(max([-1]), -1) -assert_eq(max([5, 2, 3]), 5) -assert_eq(max({1: 2, -1 : 3}), 1) -assert_eq(max({2: None}), 2) -assert_eq(max(-1, 2), 2) -assert_eq(max(5, 2, 3), 5) -assert_eq(max(1, 1, 1, 1, 1, 1), 1) -assert_eq(max([1, 1, 1, 1, 1, 1]), 1) - ---- -min(1) ### type 'int' is not iterable ---- -min([]) ### Argument is an empty iterable, min() expect a non empty iterable ---- -assert_eq(min(1, "2", True), 1) ### compare not supported for types ---- -assert_eq(min([1, "2", True]), 1) ### compare not supported for types ---- -max(1) ### type 'int' is not iterable ---- -max([]) ### Argument is an empty iterable, max() expect a non empty iterable ---- -assert_eq(max(1, '2', True), '2') ### compare not supported for types ---- -assert_eq(max([1, '2', True]), '2') ### compare not supported for types ---- diff --git a/starlark-test/tests/java-testcases/string_format.sky b/starlark-test/tests/java-testcases/string_format.sky deleted file mode 100644 index 1bd32b36..00000000 --- a/starlark-test/tests/java-testcases/string_format.sky +++ /dev/null @@ -1,93 +0,0 @@ -assert_eq('abc'.format(), "abc") - -# named arguments -assert_eq('x{key}x'.format(key = 2), "x2x") -assert_eq('x{key}x'.format(key = 'abc'), "xabcx") -assert_eq('{a}{b}{a}{b}'.format(a = 3, b = True), "3True3True") -assert_eq('{a}{b}{a}{b}'.format(a = 3, b = True), "3True3True") -assert_eq('{s1}{s2}'.format(s1 = ['a'], s2 = 'a'), '["a"]a') -assert_eq('{a}'.format(a = '$'), "$") -assert_eq('{a}'.format(a = '$a'), "$a") -assert_eq('{a}$'.format(a = '$a'), "$a$") -assert_eq('{(}'.format(**{'(': 2}), "2") - -# curly brace escaping -assert_eq('{{}}'.format(), "{}") -assert_eq('{{}}'.format(42), "{}") -assert_eq('{{ }}'.format(), "{ }") -assert_eq('{{ }}'.format(42), "{ }") -assert_eq('{{{{}}}}'.format(), "{{}}") -assert_eq('{{{{}}}}'.format(42), "{{}}") -assert_eq('{{0}}'.format(42), "{0}") -assert_eq('{{}}'.format(42), "{}") -assert_eq('{{{}}}'.format(42), "{42}") -assert_eq('{{ '.format(42), "{ " ) -assert_eq(' }}'.format(42), " }") -assert_eq('{{ {}'.format(42), "{ 42") -assert_eq('{} }}'.format(42), "42 }") -assert_eq('{{0}}'.format(42), "{0}") -assert_eq('{{{0}}}'.format(42), "{42}") -assert_eq('{{ 0'.format(42), "{ 0") -assert_eq('0 }}'.format(42), "0 }") -assert_eq('{{ {0}'.format(42), "{ 42") -assert_eq('{0} }}'.format(42), "42 }") -assert_eq('{{test}}'.format(test = 42), "{test}") -assert_eq('{{{test}}}'.format(test = 42), "{42}") -assert_eq('{{ test'.format(test = 42), "{ test") -assert_eq('test }}'.format(test = 42), "test }") -assert_eq('{{ {test}'.format(test = 42), "{ 42") -assert_eq('{test} }}'.format(test = 42), "42 }") - - -# Automatic positionals -assert_eq('{}, {} {} {} test'.format('hi', 'this', 'is', 'a'), "hi, this is a test") -assert_eq('skip some {}'.format('arguments', 'obsolete', 'deprecated'), "skip some arguments") - -# with numbered positions -assert_eq('{0}, {1} {2} {3} test'.format('hi', 'this', 'is', 'a'), "hi, this is a test") -assert_eq('{3}, {2} {1} {0} test'.format('a', 'is', 'this', 'hi'), "hi, this is a test") -assert_eq('skip some {0}'.format('arguments', 'obsolete', 'deprecated'), "skip some arguments") -assert_eq('{0} can be reused: {0}'.format('this', 'obsolete'), "this can be reused: this") - -# Mixed fields -assert_eq('{test} and {}'.format(2, test = 1), "1 and 2") -assert_eq('{test} and {0}'.format(2, test = 1), "1 and 2") - ---- -'{{}'.format(1) ### Standalone '}' ---- -'{}}'.format(1) ### Standalone '}' ---- -'{0}'.format() ### Index 0 is out of bound ---- -'{0} and {1}'.format('this') ### Index 1 is out of bound ---- -'{0} and {2}'.format('this', 'that') ### Index 2 is out of bound ---- -'{-0} and {-1}'.format('this', 'that') ### Key '-0' was not found ---- -'{0,1} and {1}'.format('this', 'that') ### Invalid character ',' inside replacement field ---- -'{0.1} and {1}'.format('this', 'that') ### Invalid character '.' inside replacement field ---- -'{}'.format() ### Not enough parameters in format string ---- -'{} and {}'.format('this') ### Not enough parameters in format string ---- -'{test} and {}'.format(test = 1, 2) ### Parse error ---- -'{test} and {0}'.format(test = 1, 2) ### Parse error ---- -'{} and {1}'.format(1, 2) ### Cannot mix manual field specification and automatic field numbering in format string ---- -'{1} and {}'.format(1, 2) ### Cannot mix manual field specification and automatic field numbering in format string ---- -'{test.}'.format(test = 1) ### Invalid character '.' inside replacement field ---- -'{test[}'.format(test = 1) ### Invalid character '[' inside replacement field ---- -'{test,}'.format(test = 1) ### Invalid character ',' inside replacement field ---- -'{ {} }'.format(42) ### Unmatched '{' ---- -'{a}{b}'.format(a = 5) ### Key 'b' was not found diff --git a/starlark-test/tests/java-testcases/string_partition.sky b/starlark-test/tests/java-testcases/string_partition.sky deleted file mode 100644 index dc2d5be9..00000000 --- a/starlark-test/tests/java-testcases/string_partition.sky +++ /dev/null @@ -1,40 +0,0 @@ -assert_eq('lawl'.partition('a'), ('l', 'a', 'wl')) -assert_eq('lawl'.rpartition('a'), ('l', 'a', 'wl')) -assert_eq('google'.partition('o'), ('g', 'o', 'ogle')) -assert_eq('google'.rpartition('o'), ('go', 'o', 'gle')) -assert_eq('xxx'.partition('x'), ('', 'x', 'xx')) -assert_eq('xxx'.rpartition('x'), ('xx', 'x', '')) -assert_eq(''.partition('a'), ('', '', '')) -assert_eq(''.rpartition('a'), ('', '', '')) - -# default separator -assert_eq('hi this is a test'.partition(), ('hi', ' ', 'this is a test')) -assert_eq('hi this is a test'.rpartition(), ('hi this is a', ' ', 'test')) -assert_eq('google'.partition(), ('google', '', '')) -assert_eq('google'.rpartition(), ('', '', 'google')) - -# no match -assert_eq('google'.partition('x'), ('google', '', '')) -assert_eq('google'.rpartition('x'), ('', '', 'google')) - -# at word boundaries -assert_eq('goog'.partition('g'), ('', 'g', 'oog')) -assert_eq('goog'.rpartition('g'), ('goo', 'g', '')) -assert_eq('plex'.partition('p'), ('', 'p', 'lex')) -assert_eq('plex'.rpartition('p'), ('', 'p', 'lex')) -assert_eq('plex'.partition('x'), ('ple', 'x', '')) -assert_eq('plex'.rpartition('x'), ('ple', 'x', '')) - -assert_eq('google'.partition('oog'), ('g', 'oog', 'le')) -assert_eq('google'.rpartition('oog'), ('g', 'oog', 'le')) -assert_eq('lolgooglolgooglolgooglol'.partition('goog'), ('lol', 'goog', 'lolgooglolgooglol')) -assert_eq('lolgooglolgooglolgooglol'.rpartition('goog'), ('lolgooglolgooglol', 'goog', 'lol')) - -# full string -assert_eq('google'.partition('google'), ('', 'google', '')) -assert_eq('google'.rpartition('google'), ('', 'google', '')) - ---- -'google'.partition('') ### Empty separator ---- -'google'.rpartition('') ### Empty separator diff --git a/starlark-test/tests/java-testcases/string_split.sky b/starlark-test/tests/java-testcases/string_split.sky deleted file mode 100644 index e8c5ba10..00000000 --- a/starlark-test/tests/java-testcases/string_split.sky +++ /dev/null @@ -1,45 +0,0 @@ -# split -assert_eq('h i'.split(' '), ['h', 'i']) -assert_eq('h i p'.split(' '), ['h', 'i', 'p']) -assert_eq('a,e,i,o,u'.split(',', 2), ['a', 'e', 'i,o,u']) -assert_eq(' 1 2 3 '.split(' '), ['', '', '1', '', '2', '', '3', '', '']) - -# rsplit -assert_eq('abcdabef'.rsplit('ab'), ['', 'cd', 'ef']) -assert_eq('google_or_gogol'.rsplit('go'), ['', 'ogle_or_', '', 'l']) - -# rsplit regex -assert_eq('foo/bar.lisp'.rsplit('.'), ['foo/bar', 'lisp']) -assert_eq('foo/bar.?lisp'.rsplit('.?'), ['foo/bar', 'lisp']) -assert_eq('fwe$foo'.rsplit('$'), ['fwe', 'foo']) -assert_eq('windows'.rsplit('\w'), ['windows']) - -# rsplit no match -assert_eq(''.rsplit('o'), ['']) -assert_eq('google'.rsplit('x'), ['google']) - -# rsplit separator -assert_eq('xxxxxx'.rsplit('x'), ['', '', '', '', '', '', '']) -assert_eq('xxxxxx'.rsplit('x', 1), ['xxxxx', '']) -assert_eq('xxxxxx'.rsplit('x', 2), ['xxxx', '', '']) -assert_eq('xxxxxx'.rsplit('x', 3), ['xxx', '', '', '']) -assert_eq('xxxxxx'.rsplit('x', 4), ['xx', '', '', '', '']) -assert_eq('xxxxxx'.rsplit('x', 5), ['x', '', '', '', '', '']) -assert_eq('xxxxxx'.rsplit('x', 6), ['', '', '', '', '', '', '']) -assert_eq('xxxxxx'.rsplit('x', 7), ['', '', '', '', '', '', '']) - -# split max split -assert_eq('google'.rsplit('o'), ['g', '', 'gle']) -assert_eq('google'.rsplit('o'), ['g', '', 'gle']) -assert_eq('google'.rsplit('o', 1), ['go', 'gle']) -assert_eq('google'.rsplit('o', 2), ['g', '', 'gle']) -assert_eq('google'.rsplit('o', 3), ['g', '', 'gle']) -assert_eq('ogooglo'.rsplit('o'), ['', 'g', '', 'gl', '']) -assert_eq('ogooglo'.rsplit('o', 1), ['ogoogl', '']) -assert_eq('ogooglo'.rsplit('o', 2), ['ogo', 'gl', '']) -assert_eq('ogooglo'.rsplit('o', 3), ['og', '', 'gl', '']) -assert_eq('ogooglo'.rsplit('o', 4), ['', 'g', '', 'gl', '']) -assert_eq('ogooglo'.rsplit('o', 5), ['', 'g', '', 'gl', '']) -assert_eq('google'.rsplit('google'), ['', '']) -assert_eq('google'.rsplit('google', 1), ['', '']) -assert_eq('google'.rsplit('google', 2), ['', '']) diff --git a/starlark-test/tests/java-testcases/string_splitlines.sky b/starlark-test/tests/java-testcases/string_splitlines.sky deleted file mode 100644 index 8590513f..00000000 --- a/starlark-test/tests/java-testcases/string_splitlines.sky +++ /dev/null @@ -1,29 +0,0 @@ -# Empty line -assert_eq(''.splitlines(), []) -assert_eq('\n'.splitlines(), ['']) - -# Starts with line break -assert_eq('\ntest'.splitlines(), ['', 'test']) - -# Ends with line break -assert_eq('test\n'.splitlines(), ['test']) - -# Different line breaks -assert_eq('this\nis\na\ntest'.splitlines(), ['this', 'is', 'a', 'test']) - -# Only line breaks -assert_eq('\n\n\n'.splitlines(), ['', '', '']) -assert_eq('\r\r\r'.splitlines(), ['', '', '']) -assert_eq('\n\r\n\r'.splitlines(), ['', '', '']) -assert_eq('\r\n\r\n\r\n'.splitlines(), ['', '', '']) - -# Escaped sequences -assert_eq('\n\\n\\\n'.splitlines(), ['', '\\n\\']) - -# KeepEnds -assert_eq(''.splitlines(True), []) -assert_eq('\n'.splitlines(True), ['\n']) -assert_eq('this\nis\r\na\rtest'.splitlines(True), ['this\n', 'is\r\n', 'a\r', 'test']) -assert_eq('\ntest'.splitlines(True), ['\n', 'test']) -assert_eq('test\n'.splitlines(True), ['test\n']) -assert_eq('\n\\n\\\n'.splitlines(True), ['\n', '\\n\\\n']) diff --git a/starlark-test/tests/java-testcases/string_test_characters.sky b/starlark-test/tests/java-testcases/string_test_characters.sky deleted file mode 100644 index df81d375..00000000 --- a/starlark-test/tests/java-testcases/string_test_characters.sky +++ /dev/null @@ -1,54 +0,0 @@ -# isalnum -assert_eq(''.isalnum(), False) -assert_eq('a0 33'.isalnum(), False) -assert_eq('1'.isalnum(), True) -assert_eq('a033'.isalnum(), True) - -# isdigit -assert_eq(''.isdigit(), False) -assert_eq(' '.isdigit(), False) -assert_eq('a'.isdigit(), False) -assert_eq('0234325.33'.isdigit(), False) -assert_eq('1'.isdigit(), True) -assert_eq('033'.isdigit(), True) - -# isspace -assert_eq(''.isspace(), False) -assert_eq('a'.isspace(), False) -assert_eq('1'.isspace(), False) -assert_eq('\ta\n'.isspace(), False) -assert_eq(' '.isspace(), True) -assert_eq('\t\n'.isspace(), True) - -# islower -assert_eq(''.islower(), False) -assert_eq(' '.islower(), False) -assert_eq('1'.islower(), False) -assert_eq('Almost'.islower(), False) -assert_eq('abc'.islower(), True) -assert_eq(' \nabc'.islower(), True) -assert_eq('abc def\n'.islower(), True) -assert_eq('\ta\n'.islower(), True) - -# isupper -assert_eq(''.isupper(), False) -assert_eq(' '.isupper(), False) -assert_eq('1'.isupper(), False) -assert_eq('aLMOST'.isupper(), False) -assert_eq('ABC'.isupper(), True) -assert_eq(' \nABC'.isupper(), True) -assert_eq('ABC DEF\n'.isupper(), True) -assert_eq('\tA\n'.isupper(), True) - -# istitle -assert_eq(''.istitle(), False) -assert_eq(' '.istitle(), False) -assert_eq('134'.istitle(), False) -assert_eq('almost Correct'.istitle(), False) -assert_eq('1nope Nope Nope'.istitle(), False) -assert_eq('NO Way'.istitle(), False) -assert_eq('T'.istitle(), True) -assert_eq('Correct'.istitle(), True) -assert_eq('Very Correct! Yes\nIndeed1X'.istitle(), True) -assert_eq('1234Ab Ab'.istitle(), True) -assert_eq('\tA\n'.istitle(), True) diff --git a/starlark-test/tests/java_conformance_tests.rs b/starlark-test/tests/java_conformance_tests.rs deleted file mode 100644 index 63af21d1..00000000 --- a/starlark-test/tests/java_conformance_tests.rs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use starlark_test::do_conformance_test; - -include!(concat!(env!("OUT_DIR"), "/tests/java-testcases.rs")); diff --git a/starlark-test/tests/rust-testcases/bool.sky b/starlark-test/tests/rust-testcases/bool.sky deleted file mode 100644 index 7dcc52c3..00000000 --- a/starlark-test/tests/rust-testcases/bool.sky +++ /dev/null @@ -1,7 +0,0 @@ -# Boolean tests - -True + 9223372036854775807 ### Type of parameters mismatch ---- -[] * True ### Type of parameters mismatch ---- -() * False ### Type of parameters mismatch diff --git a/starlark-test/tests/rust-testcases/compr.sky b/starlark-test/tests/rust-testcases/compr.sky deleted file mode 100644 index 608a9943..00000000 --- a/starlark-test/tests/rust-testcases/compr.sky +++ /dev/null @@ -1,8 +0,0 @@ -# Test for for comprehensions - -# Comprehension in comprehension -a = [x for x in [y for y in range(3)]] -assert_eq([0, 1, 2], a) - -b = [x for x in [0, 1, 2] if x] -assert_eq([1, 2], b) diff --git a/starlark-test/tests/rust-testcases/control.sky b/starlark-test/tests/rust-testcases/control.sky deleted file mode 100644 index f8d3df39..00000000 --- a/starlark-test/tests/rust-testcases/control.sky +++ /dev/null @@ -1,20 +0,0 @@ -# `break` or `continue` cannot be used outside of loop -# even if code is not executed (static error) -def zzz(): - return - break ### break cannot be used outside of loop ---- -def qqq(): - # note this is a parse time error - [] += [] ### incorrect augmented assignment target ---- -def qqq(): - a = [] - (a,) += [[]] ### incorrect augmented assignment target ---- -def rrr(): - # note this is a parse time error - [a, 1] = [3, 1] ### incorrect assignment target ---- -def rrr(): - (a, foo()) = [3, 1] ### incorrect assignment target diff --git a/starlark-test/tests/rust-testcases/dict.sky b/starlark-test/tests/rust-testcases/dict.sky deleted file mode 100644 index c3b46f34..00000000 --- a/starlark-test/tests/rust-testcases/dict.sky +++ /dev/null @@ -1,3 +0,0 @@ -# Dict tests - -{[]: 1 for x in [1]} ### Value is not hashable diff --git a/starlark-test/tests/rust-testcases/freeze.sky b/starlark-test/tests/rust-testcases/freeze.sky deleted file mode 100644 index 1cfd524d..00000000 --- a/starlark-test/tests/rust-testcases/freeze.sky +++ /dev/null @@ -1,36 +0,0 @@ -def two_iterations(): - l = [1] - for x in l: - # Second freeze of `l` must not fail - for y in l: - pass - - # Assert successfully unfrozen after two iterations - l.append(2) - -two_iterations() - ---- - -def mutate_after_second_in_first(): - l = [1] - for x in l: - for y in l: - pass - - # Test second iteration does not unfreeze - l.append(2) ### Cannot mutate an iterable while iterating - -mutate_after_second_in_first() - ---- -# test imported objects are frozen -# file: imported.sky - -a = [[]] - -# file: main.sky - -load("imported.sky", "a") - -a[0].append(1) ### Cannot mutate value diff --git a/starlark-test/tests/rust-testcases/inspect.sky b/starlark-test/tests/rust-testcases/inspect.sky deleted file mode 100644 index 1a1610f2..00000000 --- a/starlark-test/tests/rust-testcases/inspect.sky +++ /dev/null @@ -1,2 +0,0 @@ -a = "" -assert_("String" in inspect(a).rust_type_name) diff --git a/starlark-test/tests/rust-testcases/int.sky b/starlark-test/tests/rust-testcases/int.sky deleted file mode 100644 index 5d7ea1a7..00000000 --- a/starlark-test/tests/rust-testcases/int.sky +++ /dev/null @@ -1,28 +0,0 @@ -# Integer tests - -9223372036854775807 + 1 ### Integer overflow ---- --9223372036854775807 - 2 ### Integer overflow ---- -9223372036854775807 * 2 ### Integer overflow ---- -int_min = -9223372036854775807 - 1 --int_min ### Integer overflow ---- -int_min = -9223372036854775807 - 1 -int_min // -1 ### Integer overflow ---- -int_min = -9223372036854775807 - 1 -assert_eq(0, int_min % -1) -assert_eq(0, int_min % int_min) -assert_eq(9223372036854775806, int_min % 9223372036854775807) -assert_eq(-1, 9223372036854775807 % int_min) - - -# Issue #98 -assert_eq(4, 7 - 2 - 1) - -# Issue 152 -assert_eq(0, int("0", 8)) -assert_eq(0, int("-0", 8)) -assert_eq(0, int("+0", 8)) diff --git a/starlark-test/tests/rust-testcases/josharian_fuzzing.sky b/starlark-test/tests/rust-testcases/josharian_fuzzing.sky deleted file mode 100644 index 1e1ef1e3..00000000 --- a/starlark-test/tests/rust-testcases/josharian_fuzzing.sky +++ /dev/null @@ -1,25 +0,0 @@ -# This file contains the list of example reported by https://github.com/josharian -# as part of his fuzzing of starlark-rust - -# https://github.com/google/starlark-rust/issues/44: whitespace isn't required between some tokens -assert_eq(6or(), 6) -# 6burgle still generates a parse error. -6burgle ### [CP01] ---- -# https://github.com/google/starlark-rust/issues/56: Non whitespace after 0 should be allowed. -assert_eq(0in[1,2,3], False) ---- -# https://github.com/google/starlark-rust/issues/61: panic on bad range using string.index -assert_eq('a'.find('', 1, 0), -1) -assert_eq('a'.rfind('', 1, 0), -1) -'a'.index('', 1, 0) ### [UF00] ---- -assert_eq('a'.find('', 1, 0), -1) -assert_eq('a'.rfind('', 1, 0), -1) -'a'.rindex('', 1, 0) ### [UF00] ---- -# https://github.com/google/starlark-rust/issues/64: alphabetize dir entries -assert_eq(dir(""), sorted(dir(""))) ---- -# https://github.com/google/starlark-rust/issues/66: / is only for floats (which we don't support) -1 / 1 ### [CV00] diff --git a/starlark-test/tests/rust-testcases/module.sky b/starlark-test/tests/rust-testcases/module.sky deleted file mode 100644 index 4cb457b2..00000000 --- a/starlark-test/tests/rust-testcases/module.sky +++ /dev/null @@ -1,11 +0,0 @@ -# file: util.bzl - -def add_one(x): - return x + 1 - -# file: main.sky - -load("util.bzl", "add_one") - -assert_eq(5, add_one(4)) - diff --git a/starlark-test/tests/rust-testcases/mutation_during_iteration.sky b/starlark-test/tests/rust-testcases/mutation_during_iteration.sky deleted file mode 100644 index 0a101a3e..00000000 --- a/starlark-test/tests/rust-testcases/mutation_during_iteration.sky +++ /dev/null @@ -1,24 +0,0 @@ -# Test for disallowing mutation during iteration. -# https://github.com/bazelbuild/starlark/blob/815aed90b552fa70adca4dc18d73082fae83b538/design.md#no-mutation-during-iteration -a = [1, 2, 3] -def fun(): - for x in a: - a.append(1) - -fun() ### Cannot mutate an iterable while iterating ---- -def increment_values(dict): - for k in dict: - dict[k] += 1 - -dict = {"one": 1, "two": 2} -increment_values(dict) ### Cannot mutate an iterable while iterating ---- -# modifying deep content is allowed -def modify_deep_content(): - list = [[0], [1], [2]] - for x in list: - list[x[0]][0] = 2 * x[0] - return list -assert_eq(modify_deep_content(), [[0], [2], [4]]) ---- diff --git a/starlark-test/tests/rust-testcases/range.sky b/starlark-test/tests/rust-testcases/range.sky deleted file mode 100644 index 929dd1e0..00000000 --- a/starlark-test/tests/rust-testcases/range.sky +++ /dev/null @@ -1,25 +0,0 @@ -# Range tests - -# Content matters, not how range is created -assert_eq(range(1), range(0, -1, -1)) - -assert_eq(list(range(10)), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) -assert_eq(list(range(3, 10)), [3, 4, 5, 6, 7, 8, 9]) -assert_eq(list(range(3, 10, 2)), [3, 5, 7, 9]) -assert_eq(list(range(10, 3, -2)), [10, 8, 6, 4]) - -def f(): - # Largest possible range, can create, can't really do much with it - r = range(-9223372036854775807-1, 9223372036854775807) - l = [] - for x in r: - l += [x] - if len(l) == 3: - break - assert_eq([-9223372036854775807-1, -9223372036854775807, -9223372036854775807+1], l) -f() - ---- -len(range(-9223372036854775807-1, 9223372036854775807)) ### Integer overflow ---- -assert_eq(9223372036854775807, len(range(-9223372036854775807, 9223372036854775807, 2))) diff --git a/starlark-test/tests/rust-testcases/regression.sky b/starlark-test/tests/rust-testcases/regression.sky deleted file mode 100644 index 264a5bb1..00000000 --- a/starlark-test/tests/rust-testcases/regression.sky +++ /dev/null @@ -1,6 +0,0 @@ -# Regression for https://github.com/google/starlark-rust/issues/10 -"abc" * True ### Type of parameters mismatch ---- -# Make sure int * string works as well as string * int -assert_eq(3 * "abc", "abcabcabc") -assert_eq("abc" * 3, "abcabcabc") \ No newline at end of file diff --git a/starlark-test/tests/rust-testcases/string.sky b/starlark-test/tests/rust-testcases/string.sky deleted file mode 100644 index ae5a2b24..00000000 --- a/starlark-test/tests/rust-testcases/string.sky +++ /dev/null @@ -1,19 +0,0 @@ -# String tests - - -# From Starlark spec - -# The conversion's operand is the next element of args, which must be a tuple -# with exactly one component per conversion, - -assert_eq("ab1cd2ef", "ab%scd%sef" % [1, 2]) - -# ... unless the format string contains -# only a single conversion, in which case args itself is its operand. -assert_eq("ab[1]cd", "ab%scd" % [1]) - - -# Issue #43 -''%(0) ### The type 'int' is not iterable ---- -''%(0,) ### Too many arguments for format string diff --git a/starlark-test/tests/rust-testcases/struct.sky b/starlark-test/tests/rust-testcases/struct.sky deleted file mode 100644 index 135b71ed..00000000 --- a/starlark-test/tests/rust-testcases/struct.sky +++ /dev/null @@ -1,16 +0,0 @@ -# Struct tests - -# Comparison -assert_(struct() == struct()) -assert_(struct(a=1) == struct(a=1)) -assert_(struct(a=1, b=False) == struct(a=1, b=False)) - -# Order of fields is not important for comparison -assert_(struct(a=1, b=2) == struct(b=2, a=1)) - -# Inequality -assert_(struct(a=2) != struct()) -assert_(struct() != struct(a=2)) -assert_(struct(a=2) != struct(a=1)) -assert_(struct(a=2) != struct(b=1)) -assert_(struct(a=1, b=2) != struct(a=1, b="2")) diff --git a/starlark-test/tests/rust_tests.rs b/starlark-test/tests/rust_tests.rs deleted file mode 100644 index f822a10d..00000000 --- a/starlark-test/tests/rust_tests.rs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use starlark_test::do_conformance_test; - -include!(concat!(env!("OUT_DIR"), "/tests/rust-testcases.rs")); diff --git a/starlark/Cargo.toml b/starlark/Cargo.toml deleted file mode 100644 index 3051591c..00000000 --- a/starlark/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "starlark" -edition = "2018" -version = "0.3.2-pre" -authors = [ - "Damien Martin-Guillerez ", - "Stepan Koltsov ", -] -build = "build.rs" - -description = "An implementation in Rust of the Starlark language." -documentation = "https://docs.rs/crate/starlark" -homepage = "https://github.com/google/starlark-rust" -repository = "https://github.com/google/starlark-rust" -readme = "README.md" -keywords = ["starlark", "skylark", "bazel", "language", "interpreter"] -categories = ["development-tools"] -license = "Apache-2.0" - -[badges] -travis-ci = { repository = "google/starlark-rust", branch = "master" } -maintenance = { status = "passively-maintained" } - -[build-dependencies] -lalrpop = "0.19.2" - -[dependencies] -codemap = "0.1.1" -codemap-diagnostic = "0.1.1" -lalrpop-util = "0.19.2" -linked-hash-map = "0.5.1" - -[lib] -bench = false - -[features] -trace = [] diff --git a/starlark/LICENSE b/starlark/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/starlark/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/starlark/README.md b/starlark/README.md deleted file mode 100644 index 9c8cc434..00000000 --- a/starlark/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# Starlark in Rust -_An implementation in Rust of the Starlark language_ - -[Starlark](https://github.com/bazelbuild/starlark), formerly codenamed Skylark, is a non-Turing -complete language based on Python that was made for the [Bazel build system](https://bazel.build) to -define compilation plugin. - -Starlark has at least 3 implementations: a [Java one for Bazel]( -https://github.com/bazelbuild/bazel/tree/master/src/main/java/com/google/devtools/skylark), -a [Go one](https://github.com/google/skylark) and this one. - -This interpreter was made using the [specification from the go version]( -https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md) -and the Python 3 documentation when things were unclear. - -This interpreter does not support most of the go extensions (e.g. bitwise -operator or floating point). It does not include the `set()` type either ([the -official Starlark specification](https://github.com/bazelbuild/starlark/blob/master/spec.md) -does not have them either). It uses signed 64-bit integers. diff --git a/starlark/build.rs b/starlark/build.rs deleted file mode 100644 index 83cf06ef..00000000 --- a/starlark/build.rs +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -extern crate lalrpop; - -fn main() { - lalrpop(); -} - -fn lalrpop() { - // A test - println!("cargo:rerun-if-changed=src/syntax/grammar.lalrpop"); - lalrpop::Configuration::new() - .use_cargo_dir_conventions() - .always_use_colors() - .emit_report(true) - .process_file("src/syntax/grammar.lalrpop") - .unwrap(); -} diff --git a/starlark/examples/starlark-simple-cli.rs b/starlark/examples/starlark-simple-cli.rs deleted file mode 100644 index 574a47fd..00000000 --- a/starlark/examples/starlark-simple-cli.rs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! An example binary demonstrating how to use the starlark-rust crate. -//! -//! This program demonstrates how to set up a `codemap` and `Environment` required for -//! `eval()`, as well as how to use [values](values) returned by `eval()`. It accepts -//! a Starlark program on standard input and prints the result if the result is a string. - -extern crate codemap; -extern crate codemap_diagnostic; -extern crate starlark; - -use std::io::{self, Read}; -use std::process::exit; -use std::sync::{Arc, Mutex}; - -use codemap_diagnostic::{ColorConfig, Emitter}; -use starlark::eval::simple::eval; -use starlark::stdlib::global_environment; -use starlark::syntax::dialect::Dialect; - -pub fn simple_evaluation(starlark_input: &String) -> Result { - // Create a new global environment populated with the stdlib. - let (global_env, type_values) = global_environment(); - // Extra symbols can be added to the global environment before freezing if desired. - global_env.freeze(); - // Create our own local copy of the global environment. - let mut env = global_env.child("simple-cli"); - - // Create a codemap to record the raw source of all code executed, including code - // introduced by a Starlark load() call. - let map = Arc::new(Mutex::new(codemap::CodeMap::new())); - - // We don't have a filename since we're not reading from a file, so call it "stdin". - let result = eval( - &map, - "stdin", - &starlark_input, - Dialect::Bzl, - &mut env, - &type_values, - global_env.clone(), - ); - - match result { - Ok(res) => match res.get_type() { - "string" => Ok(res.to_str()), - _ => Err(format!( - "Error interpreting '{}': result must be string! (was {})", - starlark_input, - res.get_type() - )), - }, - Err(diagnostic) => { - // Get the lock to the codemap and unlock it so we can use it. - let cloned_map_lock = Arc::clone(&map); - let unlocked_map = cloned_map_lock.lock().unwrap(); - - // Emit code diagnostic information to standard error. - Emitter::stderr(ColorConfig::Always, Some(&unlocked_map)).emit(&[diagnostic]); - Err(format!("Error interpreting '{}'", starlark_input)) - } - } -} - -fn main() { - let mut starlark_input = String::new(); - io::stdin() - .read_to_string(&mut starlark_input) - .expect("Error reading from stdin"); - let starlark_input = starlark_input.trim().to_owned(); - - match simple_evaluation(&starlark_input) { - Ok(result_string) => println!("{}", result_string), - Err(error_string) => { - println!("{}", error_string); - exit(2); - } - } -} diff --git a/starlark/src/environment/mod.rs b/starlark/src/environment/mod.rs deleted file mode 100644 index 3e75c262..00000000 --- a/starlark/src/environment/mod.rs +++ /dev/null @@ -1,280 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! The enviroment, called "Module" in [this spec]( -//! https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md) -//! is the list of variable in the current scope. It can be frozen, after which all values from -//! this environment become immutable. - -use crate::values::cell::error::ObjectBorrowMutError; -use crate::values::cell::ObjectCell; -use crate::values::error::{RuntimeError, ValueError}; -use crate::values::string::rc::RcString; -use crate::values::*; -use std::collections::HashMap; -use std::rc::Rc; - -// TODO: move that code in some common error code list? -// CM prefix = Critical Module -const __RESERVED_CM00: &str = "CM00"; -const NOT_FOUND_ERROR_CODE: &str = "CM01"; -const LOCAL_VARIABLE_REFERENCED_BEFORE_ASSIGNMENT: &str = "CM03"; -pub(crate) const LOAD_NOT_SUPPORTED_ERROR_CODE: &str = "CM02"; -const CANNOT_IMPORT_ERROR_CODE: &str = "CE02"; -const BORROW_ERROR_CODE: &str = "CE03"; - -#[derive(Debug)] -#[doc(hidden)] -pub enum EnvironmentError { - /// Variables was no found. - VariableNotFound(String), - LocalVariableReferencedBeforeAssignment(String), - /// Cannot import private symbol, i.e. underscore prefixed - CannotImportPrivateSymbol(String), - BorrowMut(ObjectBorrowMutError), -} - -impl From for EnvironmentError { - fn from(e: ObjectBorrowMutError) -> EnvironmentError { - EnvironmentError::BorrowMut(e) - } -} - -impl Into for EnvironmentError { - fn into(self) -> RuntimeError { - RuntimeError { - code: match self { - EnvironmentError::VariableNotFound(..) => NOT_FOUND_ERROR_CODE, - EnvironmentError::CannotImportPrivateSymbol(..) => CANNOT_IMPORT_ERROR_CODE, - EnvironmentError::LocalVariableReferencedBeforeAssignment(..) => { - LOCAL_VARIABLE_REFERENCED_BEFORE_ASSIGNMENT - } - EnvironmentError::BorrowMut(..) => BORROW_ERROR_CODE, - }, - label: match self { - EnvironmentError::VariableNotFound(..) => "Variable was not found".to_owned(), - EnvironmentError::LocalVariableReferencedBeforeAssignment(..) => { - "Local variable referenced before assignment".to_owned() - } - EnvironmentError::CannotImportPrivateSymbol(ref s) => { - format!("Symbol '{}' is private", s) - } - EnvironmentError::BorrowMut(ref e) => format!("{}", e), - }, - message: match self { - EnvironmentError::VariableNotFound(s) => format!("Variable '{}' not found", s), - EnvironmentError::LocalVariableReferencedBeforeAssignment(ref s) => { - format!("Local variable '{}' referenced before assignment", s) - } - EnvironmentError::CannotImportPrivateSymbol(s) => { - format!("Cannot import private symbol '{}'", s) - } - EnvironmentError::BorrowMut(ref e) => { - format!("Cannot borrow environment mutably: {}", e) - } - }, - } - } -} - -impl From for ValueError { - fn from(e: EnvironmentError) -> Self { - ValueError::Runtime(e.into()) - } -} - -#[derive(Clone, Debug)] -pub struct Environment { - env: Rc>, -} - -#[derive(Debug)] -struct EnvironmentContent { - /// A name for this environment, used mainly for debugging. - name_: RcString, - /// Super environment that represent a higher scope than the current one - parent: Option, - /// List of variable bindings - /// - /// These bindings include methods for native types, e.g. `string.isalnum`. - variables: HashMap, - /// When `true`, set `{foo, bar}` literals are allowed. - set_literals: bool, -} - -impl Environment { - /// Create a new environment - pub fn new(name: &str) -> Environment { - Environment { - env: Rc::new(ObjectCell::new_mutable(EnvironmentContent { - name_: name.into(), - parent: None, - variables: HashMap::new(), - set_literals: false, - })), - } - } - - /// Create a new child environment for this environment - pub fn child(&self, name: &str) -> Environment { - self.freeze(); - Environment { - env: Rc::new(ObjectCell::new_mutable(EnvironmentContent { - name_: name.into(), - parent: Some(self.clone()), - variables: HashMap::new(), - set_literals: self.env.borrow().set_literals, - })), - } - } - - /// Create a new child environment - /// Freeze the environment, all its value will become immutable after that - pub fn freeze(&self) -> &Self { - if !self.env.get_header_copy().is_mutable_frozen() { - self.env.borrow_mut().freeze(); - self.env.freeze(); - } - self - } - - /// Return the name of this module - pub fn name(&self) -> RcString { - self.env.borrow().name_.clone() - } - - /// Set the value of a variable in that environment. - pub fn set(&self, name: &str, value: Value) -> Result<(), EnvironmentError> { - self.env.try_borrow_mut()?.set(name, value) - } - - /// Get the value of the variable `name` - pub fn get(&self, name: &str) -> Result { - self.env.borrow().get(name) - } - - pub fn import_symbol( - &self, - env: &Environment, - symbol: &str, - new_name: &str, - ) -> Result<(), EnvironmentError> { - let first = symbol.chars().next(); - match first { - Some('_') | None => Err(EnvironmentError::CannotImportPrivateSymbol( - symbol.to_owned(), - )), - _ => self.set(new_name, env.get(symbol)?), - } - } - - /// Return the parent environment (or `None` if there is no parent). - pub fn get_parent(&self) -> Option { - self.env.borrow().get_parent() - } - - /// Set the function which will be used to instantiate set literals encountered when evaluating - /// in this `Environment`. Set literals are {}s with one or more elements between, separated by - /// commas, e.g. `{1, 2, "three"}`. - /// - /// If this function is not called on the `Environment`, its parent's set constructor will be - /// used when set literals are encountered. If neither this `Environment`, nor any of its - /// transitive parents, have a set constructor defined, attempts to evaluate set literals will - /// raise and error. - /// - /// The `Value` returned by this function is expected to be a one-dimensional collection - /// containing no duplicates. - pub fn enable_set_literals(&self) { - self.env.borrow_mut().set_literals = true; - } - - /// Is it OK to have set literals? - pub(crate) fn set_literals_emabled(&self) -> bool { - self.env.borrow().set_literals - } -} - -impl EnvironmentContent { - /// Create a new child environment - /// Freeze the environment, all its value will become immutable after that - pub fn freeze(&mut self) { - for v in self.variables.values_mut() { - v.freeze(); - } - } - - /// Set the value of a variable in that environment. - pub fn set(&mut self, name: &str, value: Value) -> Result<(), EnvironmentError> { - self.variables.insert(name.to_string(), value); - Ok(()) - } - - /// Get the value of the variable `name` - pub fn get(&self, name: &str) -> Result { - if self.variables.contains_key(name) { - Ok(self.variables[name].clone()) - } else { - match self.parent { - Some(ref p) => p.get(name), - None => Err(EnvironmentError::VariableNotFound(name.to_owned())), - } - } - } - - /// Return the parent environment (or `None` if there is no parent). - pub fn get_parent(&self) -> Option { - self.parent.clone() - } -} - -/// Environment passed to `call` calls. -/// -/// Function implementations are only allowed to access -/// type values from "type values" from the caller context, -/// so this struct is passed instead of full `Environment`. -#[derive(Clone, Default, Debug)] -pub struct TypeValues { - /// List of static values of an object per type - type_objs: HashMap>, -} - -impl TypeValues { - /// Get a type value if it exists (e.g. list.index). - pub fn get_type_value(&self, obj: &Value, id: &str) -> Option { - self.type_objs - .get(obj.get_type()) - .and_then(|o| o.get(id)) - .cloned() - } - - /// List the attribute of a type - pub fn list_type_value(&self, obj: &Value) -> Vec { - self.type_objs - .get(obj.get_type()) - .into_iter() - .flat_map(|o| o.keys().cloned()) - .collect() - } - - /// Get the object of type `obj_type`, and create it if none exists - pub fn add_type_value(&mut self, obj: &str, attr: &str, value: Value) { - if let Some(ref mut v) = self.type_objs.get_mut(obj) { - v.insert(attr.into(), value); - } else { - let mut dict = HashMap::new(); - dict.insert(attr.into(), value); - self.type_objs.insert(obj.into(), dict); - } - } -} diff --git a/starlark/src/eval/call_stack.rs b/starlark/src/eval/call_stack.rs deleted file mode 100644 index 6da56b44..00000000 --- a/starlark/src/eval/call_stack.rs +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//! Starlark call stack. - -use crate::values::error::ValueError; -use crate::values::{FunctionId, Value}; -use codemap::{CodeMap, Pos}; -use std::cell::Cell; -use std::fmt; -use std::sync::{Arc, Mutex}; - -#[derive(Clone)] -struct Frame(Value, Arc>, Pos); - -impl fmt::Debug for Frame { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Frame").field(&self.0).finish() - } -} - -/// Starlark call stack. Used internally, opague for the public API. -// TODO: unimplement `Clone`, `Default`, users do not need to be able to create stacks -#[derive(Clone, Debug, Default)] -pub struct CallStack { - stack: Vec, -} - -impl CallStack { - /// Push an element to the stack - pub(crate) fn push(&mut self, function: Value, code_map: Arc>, pos: Pos) { - self.stack.push(Frame(function, code_map, pos)); - } - - /// Pop an element from the stack, panic if stack is already empty. - pub(crate) fn pop(&mut self) { - self.stack.pop().unwrap(); - } - - /// Test if call stack contains a function with given id. - pub(crate) fn contains(&self, function_id: FunctionId) -> bool { - self.stack - .iter() - .any(|&Frame(ref f, _, _)| f.function_id() == function_id) - } - - /// Print call stack as multiline string - /// with each line beginning with newline. - pub(crate) fn print_with_newline_before<'a>(&'a self) -> impl fmt::Display + 'a { - DisplayWithNewlineBefore { call_stack: self } - } -} - -struct DisplayWithNewlineBefore<'a> { - call_stack: &'a CallStack, -} - -impl<'a> fmt::Display for DisplayWithNewlineBefore<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for Frame(function, code_map, pos) in self.call_stack.stack.iter().rev() { - let loc = { code_map.lock().unwrap().look_up_pos(*pos) }; - write!( - f, - "\n call to {} at {}:{}", - function.to_str(), - loc.file.name(), - loc.position.line + 1, // line 1 is 0, so add 1 for human readable. - )?; - } - Ok(()) - } -} - -// Maximum recursion level for comparison -// TODO(dmarting): those are rather short, maybe make it configurable? -#[cfg(debug_assertions)] -const MAX_RECURSION: u32 = 200; - -#[cfg(not(debug_assertions))] -const MAX_RECURSION: u32 = 3000; - -// A thread-local counter is used to detect too deep recursion. -// -// Thread-local is chosen instead of explicit function "recursion" parameter -// for two reasons: -// * It's possible to propagate stack depth across external functions like -// `Display::to_string` where passing a stack depth parameter is hard -// * We need to guarantee that stack depth is not lost in complex invocation -// chains like function calls compare which calls native function which calls -// starlark function which calls to_str. We could change all evaluation stack -// signatures to accept some "context" parameters, but passing it as thread-local -// is easier. -thread_local!(static STACK_DEPTH: Cell = Cell::new(0)); - -/// Stored previous stack depth before calling `try_inc`. -/// -/// Stores that previous stack depths back to thread-local on drop. -#[must_use] -pub struct StackGuard { - prev_depth: u32, -} - -impl Drop for StackGuard { - fn drop(&mut self) { - STACK_DEPTH.with(|c| c.set(self.prev_depth)); - } -} - -/// Increment stack depth. -fn inc() -> StackGuard { - let prev_depth = STACK_DEPTH.with(|c| { - let prev = c.get(); - c.set(prev + 1); - prev - }); - StackGuard { prev_depth } -} - -/// Check stack depth does not exceed configured max stack depth. -fn check() -> Result<(), ValueError> { - if STACK_DEPTH.with(Cell::get) >= MAX_RECURSION { - return Err(ValueError::TooManyRecursionLevel); - } - Ok(()) -} - -/// Try increment stack depth. -/// -/// Return opaque `StackGuard` object which resets stack to previous value -/// on `drop`. -/// -/// If stack depth exceeds configured limit, return error. -pub fn try_inc() -> Result { - check()?; - Ok(inc()) -} diff --git a/starlark/src/eval/compiler.rs b/starlark/src/eval/compiler.rs deleted file mode 100644 index 96a096a5..00000000 --- a/starlark/src/eval/compiler.rs +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Utilities for translation of AST into interpreter-friendly data structures - -use crate::eval::expr::AssignTargetExprCompiled; -use crate::eval::expr::AstClauseCompiled; -use crate::eval::expr::AstGlobalOrSlot; -use crate::eval::expr::ClauseCompiled; -use crate::eval::expr::ExprCompiled; -use crate::eval::expr::ExprLocal; -use crate::eval::expr::GlobalOrSlot; -use crate::eval::globals::Globals; -use crate::eval::locals::LocalsBuilder; -use crate::eval::locals::LocalsQuery; -use crate::syntax::ast::AstClause; -use crate::syntax::ast::AstExpr; -use crate::syntax::ast::AstString; -use crate::syntax::ast::Clause; -use crate::syntax::ast::Expr; -use codemap::Span; -use codemap::Spanned; -use codemap_diagnostic::Diagnostic; - -/// Encapsulate differences between compilation of module scope vs -/// function or comprehension scope -pub(crate) trait LocalOrGlobalCompiler { - /// Resolve identifier to either local slot or global name - fn ident(&mut self, ident: AstString) -> GlobalOrSlot; - - fn ast_ident(&mut self, ident: AstString) -> AstGlobalOrSlot { - Spanned { - span: ident.span, - node: self.ident(ident), - } - } - - /// Compile list comprehension - fn list_comprenesion( - &mut self, - span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result; - /// Compile set comprehension - fn set_comprenesion( - &mut self, - span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result; - /// Compile dict comprehension - fn dict_comprenesion( - &mut self, - span: Span, - key: AstExpr, - value: AstExpr, - clauses: Vec, - ) -> Result; -} - -pub(crate) struct LocalCompiler<'a> { - locals_query: &'a mut LocalsQuery<'a>, -} - -impl<'a> LocalCompiler<'a> { - pub fn new(locals_query: &'a mut LocalsQuery<'a>) -> LocalCompiler<'a> { - LocalCompiler { locals_query } - } -} - -impl<'a> LocalCompiler<'a> { - fn compile_clauses(&mut self, clauses: Vec, expr: E) -> Result - where - E: FnOnce(Vec, &mut LocalCompiler) -> Result, - { - let mut transformed_clauses = Vec::new(); - let mut scope_count = 0; - for clause in clauses { - transformed_clauses.push(Spanned { - span: clause.span, - node: match clause.node { - Clause::If(expr) => ClauseCompiled::If(ExprCompiled::compile(expr, self)?), - Clause::For(target, expr) => { - let expr = ExprCompiled::compile(expr, self)?; - self.locals_query.push_next_scope(); - scope_count += 1; - let target = AssignTargetExprCompiled::compile(target, self)?; - ClauseCompiled::For(target, expr) - } - }, - }); - } - let r = expr(transformed_clauses, self)?; - for _ in 0..scope_count { - self.locals_query.pop_scope(); - } - Ok(r) - } -} - -impl<'a> LocalOrGlobalCompiler for LocalCompiler<'a> { - fn ident(&mut self, ident: AstString) -> GlobalOrSlot { - let (slot, local) = self.locals_query.slot(&ident.node); - GlobalOrSlot { - name: ident.node, - local, - slot, - } - } - - fn list_comprenesion( - &mut self, - _span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_clauses(clauses, |clauses, compiler| { - let expr = ExprCompiled::compile(expr, compiler)?; - Ok(ExprCompiled::ListComprehension(expr, clauses)) - }) - } - - fn set_comprenesion( - &mut self, - _span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_clauses(clauses, |clauses, compiler| { - let expr = ExprCompiled::compile(expr, compiler)?; - Ok(ExprCompiled::SetComprehension(expr, clauses)) - }) - } - - fn dict_comprenesion( - &mut self, - _span: Span, - key: AstExpr, - value: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_clauses(clauses, |clauses, compiler| { - let key = ExprCompiled::compile(key, compiler)?; - let value = ExprCompiled::compile(value, compiler)?; - Ok(ExprCompiled::DictComprehension((key, value), clauses)) - }) - } -} - -pub(crate) struct GlobalCompiler<'a> { - globals: &'a mut Globals, -} - -impl<'a> GlobalCompiler<'a> { - pub fn new(globals: &'a mut Globals) -> GlobalCompiler<'a> { - GlobalCompiler { globals } - } - - fn compile_comprehension_in_global_scope( - &mut self, - expr: AstExpr, - ) -> Result { - let mut locals_builder = LocalsBuilder::default(); - - Expr::collect_locals(&expr, &mut locals_builder); - - let locals = locals_builder.build(); - // Note we are using private global index for comprehensions - let mut globals = Globals::default(); - - let mut locals_query = LocalsQuery::new(&locals, &mut globals); - - let expr = ExprCompiled::compile_local(expr, &mut locals_query)?; - - Ok(ExprCompiled::Local(ExprLocal { - expr, - locals, - globals, - })) - } -} - -impl<'a> LocalOrGlobalCompiler for GlobalCompiler<'a> { - fn ident(&mut self, ident: AstString) -> GlobalOrSlot { - GlobalOrSlot { - slot: self.globals.register_global(&ident.node), - name: ident.node, - local: false, - } - } - fn list_comprenesion( - &mut self, - span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_comprehension_in_global_scope(Box::new(Spanned { - span, - node: Expr::ListComprehension(expr, clauses), - })) - } - - fn set_comprenesion( - &mut self, - span: Span, - expr: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_comprehension_in_global_scope(Box::new(Spanned { - span, - node: Expr::SetComprehension(expr, clauses), - })) - } - - fn dict_comprenesion( - &mut self, - span: Span, - key: AstExpr, - value: AstExpr, - clauses: Vec, - ) -> Result { - self.compile_comprehension_in_global_scope(Box::new(Spanned { - span, - node: Expr::DictComprehension((key, value), clauses), - })) - } -} diff --git a/starlark/src/eval/compr.rs b/starlark/src/eval/compr.rs deleted file mode 100644 index d0d97d29..00000000 --- a/starlark/src/eval/compr.rs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! List/dict/set comprenension evaluation. - -use crate::eval::eval_expr; -use crate::eval::expr::AstClauseCompiled; -use crate::eval::expr::ClauseCompiled; -use crate::eval::set_expr; -use crate::eval::t; -use crate::eval::EvalException; -use crate::values::context::EvaluationContext; -use crate::values::context::EvaluationContextEnvironment; - -pub(crate) fn eval_one_dimensional_comprehension< - E: EvaluationContextEnvironment, - F: FnMut(&mut EvaluationContext) -> Result<(), EvalException>, ->( - expr: &mut F, - clauses: &[AstClauseCompiled], - context: &mut EvaluationContext, -) -> Result<(), EvalException> { - if let Some((first, tl)) = clauses.split_first() { - match &first.node { - ClauseCompiled::If(ref cond) => { - if !eval_expr(cond, context)?.to_bool() { - return Ok(()); - } - eval_one_dimensional_comprehension(expr, tl, context) - } - ClauseCompiled::For(ref var, ref iter) => { - let iterable = eval_expr(iter, context)?; - for item in &t(iterable.iter(), iter)? { - set_expr(var, context, item)?; - - eval_one_dimensional_comprehension(expr, tl, context)?; - } - - Ok(()) - } - } - } else { - expr(context) - } -} diff --git a/starlark/src/eval/def.rs b/starlark/src/eval/def.rs deleted file mode 100644 index 12ac3353..00000000 --- a/starlark/src/eval/def.rs +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Implementation of `def`. - -use crate::environment::{Environment, TypeValues}; -use crate::eval::call_stack::CallStack; -use crate::eval::compiler::LocalCompiler; -use crate::eval::eval_block; -use crate::eval::expr::AstExprCompiled; -use crate::eval::expr::ExprCompiled; -use crate::eval::globals::Globals; -use crate::eval::locals::Locals; -use crate::eval::locals::LocalsBuilder; -use crate::eval::locals::LocalsQuery; -use crate::eval::stmt::BlockCompiled; -use crate::eval::EvalException; -use crate::eval::EvaluationContext; -use crate::eval::EvaluationContextEnvironment; -use crate::eval::IndexedGlobals; -use crate::eval::IndexedLocals; -use crate::stdlib::structs::StarlarkStruct; -use crate::syntax::ast::AssignTargetExpr; -use crate::syntax::ast::AstParameter; -use crate::syntax::ast::AstStatement; -use crate::syntax::ast::AstString; -use crate::syntax::ast::AugmentedAssignTargetExpr; -use crate::syntax::ast::Expr; -use crate::syntax::ast::Parameter; -use crate::syntax::ast::Statement; -use crate::values::context::EvaluationContextEnvironmentLocal; -use crate::values::error::ValueError; -use crate::values::function; -use crate::values::function::FunctionParameter; -use crate::values::function::FunctionSignature; -use crate::values::function::FunctionType; -use crate::values::function::StrOrRepr; -use crate::values::inspect::Inspectable; -use crate::values::none::NoneType; -use crate::values::string::rc::RcString; -use crate::values::Immutable; -use crate::values::TypedValue; -use crate::values::Value; -use crate::values::ValueOther; -use crate::values::ValueResult; -use codemap::CodeMap; -use codemap::Spanned; -use codemap_diagnostic::Diagnostic; -use linked_hash_map::LinkedHashMap; -use std::convert::TryInto; -use std::fmt; -use std::iter; -use std::sync::{Arc, Mutex}; - -#[derive(Debug, Clone)] -pub(crate) enum ParameterCompiled { - Normal(AstString), - WithDefaultValue(AstString, AstExprCompiled), - Args(AstString), - KWArgs(AstString), -} -pub(crate) type AstParameterCompiled = Spanned; - -impl ParameterCompiled { - fn compile( - param: AstParameter, - globals: &mut Globals, - ) -> Result { - Ok(Spanned { - span: param.span, - node: match param.node { - Parameter::Normal(n) => ParameterCompiled::Normal(n), - Parameter::WithDefaultValue(n, d) => ParameterCompiled::WithDefaultValue( - n, - ExprCompiled::compile_global(d, globals)?, - ), - Parameter::Args(args) => ParameterCompiled::Args(args), - Parameter::KWArgs(args) => ParameterCompiled::KWArgs(args), - }, - }) - } -} - -/// `def` AST with post-processing suitable for faster excecution -#[doc(hidden)] -#[derive(Debug, Clone)] -pub struct DefCompiled { - pub(crate) name: AstString, - pub(crate) slot: usize, - pub(crate) params: Vec, - pub(crate) suite: BlockCompiled, - locals: Locals, - globals: Globals, -} - -impl DefCompiled { - pub fn new( - name: AstString, - slot: usize, - params: Vec, - suite: AstStatement, - ) -> Result { - let mut locals_builder = LocalsBuilder::default(); - let mut globals = Globals::default(); - - for p in ¶ms { - locals_builder.register_local(p.name()); - } - - let params = params - .into_iter() - .map(|p| ParameterCompiled::compile(p, &mut globals)) - .collect::>()?; - - DefCompiled::collect_locals(&suite, &mut locals_builder); - - let locals = locals_builder.build(); - - let mut locals_query = LocalsQuery::new(&locals, &mut globals); - - let mut local_compiler = LocalCompiler::new(&mut locals_query); - - let suite = BlockCompiled::compile_local(suite, &mut local_compiler)?; - - Ok(DefCompiled { - name, - slot, - params, - suite, - locals, - globals, - }) - } - - fn collect_locals(stmt: &AstStatement, locals_builder: &mut LocalsBuilder) { - match stmt.node { - Statement::Assign(ref dest, ref source) => { - AssignTargetExpr::collect_locals_from_assign_expr(dest, locals_builder); - Expr::collect_locals(source, locals_builder); - } - Statement::AugmentedAssign(ref dest, _op, ref source) => { - AugmentedAssignTargetExpr::collect_locals_from_assign_expr(dest, locals_builder); - Expr::collect_locals(source, locals_builder); - } - Statement::For(ref dest, ref iter, ref body) => { - AssignTargetExpr::collect_locals_from_assign_expr(dest, locals_builder); - Expr::collect_locals(iter, locals_builder); - DefCompiled::collect_locals(body, locals_builder); - } - Statement::Statements(ref stmts) => { - for stmt in stmts { - DefCompiled::collect_locals(stmt, locals_builder); - } - } - Statement::If(ref cond, ref then_block) => { - Expr::collect_locals(cond, locals_builder); - DefCompiled::collect_locals(then_block, locals_builder); - } - Statement::IfElse(ref cond, ref then_block, ref else_block) => { - Expr::collect_locals(cond, locals_builder); - DefCompiled::collect_locals(then_block, locals_builder); - DefCompiled::collect_locals(else_block, locals_builder); - } - Statement::Return(ref expr) => { - if let Some(expr) = expr { - Expr::collect_locals(expr, locals_builder); - } - } - Statement::Expression(ref expr) => { - Expr::collect_locals(expr, locals_builder); - } - Statement::Break | Statement::Continue | Statement::Pass => {} - Statement::Load(..) | Statement::Def(..) => unreachable!(), - } - } -} - -impl Inspectable for DefCompiled { - fn inspect(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - fields.insert("name".into(), self.name.node.clone().into()); - fields.insert("locals".into(), self.locals.inspect()); - fields.insert("suite".into(), self.suite.inspect()); - Value::new(StarlarkStruct::new(fields)) - } -} - -/// Starlark function internal representation and implementation of [`TypedValue`]. -pub(crate) struct Def { - signature: FunctionSignature, - function_type: FunctionType, - captured_env: Environment, - map: Arc>, - stmt: DefCompiled, -} - -impl Def { - pub fn new( - module: RcString, - signature: FunctionSignature, - stmt: DefCompiled, - map: Arc>, - env: Environment, - ) -> ValueOther { - // This can be implemented by delegating to `Function::new`, - // but having a separate type allows slight more efficient implementation - // and optimizations in the future. - ValueOther::new(Def { - function_type: FunctionType::Def(stmt.name.node.clone(), module), - signature, - stmt, - captured_env: env, - map, - }) - } -} - -impl TypedValue for Def { - type Holder = Immutable; - - const TYPE: &'static str = "function"; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - function::str_impl(buf, &self.function_type, &self.signature, StrOrRepr::Str) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - function::str_impl(buf, &self.function_type, &self.signature, StrOrRepr::Repr) - } - - fn call( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult { - // argument binding - let mut ctx = EvaluationContext { - call_stack, - env: EvaluationContextEnvironmentLocal { - globals: IndexedGlobals::new(&self.stmt.globals, self.captured_env.clone()), - locals: IndexedLocals::new(&self.stmt.locals), - }, - type_values, - map: self.map.clone(), - }; - - let mut parser = function::ParameterParser::new( - &self.signature, - &self.function_type, - positional, - named, - args, - kwargs, - )?; - - for (i, (s, positional_only)) in self.signature.iter().enumerate() { - let (name, v) = match s { - FunctionParameter::Normal(ref name) => { - (name, parser.next_normal(name, positional_only)?) - } - FunctionParameter::WithDefaultValue(ref name, ref default_value) => ( - name, - parser.next_with_default_value(name, positional_only, default_value), - ), - FunctionParameter::ArgsArray(ref name) => (name, parser.next_args_array().into()), - FunctionParameter::KWArgsDict(ref name) => { - (name, parser.next_kwargs_dict().try_into().unwrap()) - } - FunctionParameter::Optional(..) => { - unreachable!("optional parameters only exist in native functions") - } - }; - - // tricky part: we know that we assign locals for function parameters - // sequentially starting from 0 - if cfg!(debug_assertions) { - assert_eq!( - i, - ctx.env - .locals - .local_defs - .top_level_name_to_slot(name) - .unwrap() - ); - } - ctx.env.set_local(i, name, v); - } - - parser.check_no_more_args()?; - - match eval_block(&self.stmt.suite, &mut ctx) { - Err(EvalException::Return(_s, ret)) => Ok(ret), - Err(x) => Err(ValueError::DiagnosedError(x.into())), - Ok(..) => Ok(Value::new(NoneType::None)), - } - } - - fn inspect_custom(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - fields.insert("captured_env".into(), self.captured_env.name().into()); - fields.insert("stmt".into(), self.stmt.inspect()); - Value::new(StarlarkStruct::new(fields)) - } -} diff --git a/starlark/src/eval/expr.rs b/starlark/src/eval/expr.rs deleted file mode 100644 index 9c05a61d..00000000 --- a/starlark/src/eval/expr.rs +++ /dev/null @@ -1,392 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Interpreter-ready expr - -use crate::eval::compiler::GlobalCompiler; -use crate::eval::compiler::LocalCompiler; -use crate::eval::compiler::LocalOrGlobalCompiler; -use crate::eval::globals::Globals; -use crate::eval::locals::Locals; -use crate::eval::locals::LocalsQuery; -use crate::stdlib::structs::StarlarkStruct; -use crate::syntax::ast::AssignTargetExpr; -use crate::syntax::ast::AstAssignTargetExpr; -use crate::syntax::ast::AstAugmentedAssignTargetExpr; -use crate::syntax::ast::AstExpr; -use crate::syntax::ast::AstString; -use crate::syntax::ast::AugmentedAssignTargetExpr; -use crate::syntax::ast::BinOp; -use crate::syntax::ast::Expr; -use crate::syntax::ast::UnOp; -use crate::values::frozen::FrozenValue; -use crate::values::inspect::Inspectable; -use crate::values::string::rc::RcString; -use crate::values::Value; -use codemap::Spanned; -use codemap_diagnostic::Diagnostic; -use linked_hash_map::LinkedHashMap; - -/// After syntax check each variable is resolved to either global or slot -#[derive(Debug, Clone)] -pub(crate) struct GlobalOrSlot { - pub name: RcString, - pub local: bool, - pub slot: usize, -} -pub(crate) type AstGlobalOrSlot = Spanned; - -#[derive(Debug, Clone)] -pub(crate) enum AssignTargetExprCompiled { - Name(AstGlobalOrSlot), - Dot(AstExprCompiled, AstString), - ArrayIndirection(AstExprCompiled, AstExprCompiled), - Subtargets(Vec), -} -pub(crate) type AstAssignTargetExprCompiled = Spanned; - -#[derive(Debug, Clone)] -pub(crate) enum AugmentedAssignTargetExprCompiled { - // there's no augmented assignment for globals - Slot(usize, AstString), - Dot(AstExprCompiled, AstString), - ArrayIndirection(AstExprCompiled, AstExprCompiled), -} -pub(crate) type AstAugmentedAssignTargetExprCompiled = Spanned; - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub(crate) enum ClauseCompiled { - For(AstAssignTargetExprCompiled, AstExprCompiled), - If(AstExprCompiled), -} -pub(crate) type AstClauseCompiled = Spanned; - -/// Expression wrapper which creates own local context. -/// Used to evaluate comprehensions -#[derive(Debug, Clone)] -pub(crate) struct ExprLocal { - pub expr: AstExprCompiled, - pub locals: Locals, - pub globals: Globals, -} - -impl Inspectable for ExprLocal { - fn inspect(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - fields.insert("expr".into(), self.expr.inspect()); - fields.insert("locals".into(), self.locals.inspect()); - fields.insert("globals".into(), self.globals.inspect()); - Value::new(StarlarkStruct::new(fields)) - } -} - -/// Interperter-ready version of [`Expr`](crate::syntax::ast::Expr) -#[derive(Debug, Clone)] -pub(crate) enum ExprCompiled { - Tuple(Vec), - Dot(AstExprCompiled, AstString), - Call( - AstExprCompiled, - Vec, - Vec<(AstString, AstExprCompiled)>, - Option, - Option, - ), - ArrayIndirection(AstExprCompiled, AstExprCompiled), - Slice( - AstExprCompiled, - Option, - Option, - Option, - ), - Name(AstGlobalOrSlot), - Value(FrozenValue), - Not(AstExprCompiled), - And(AstExprCompiled, AstExprCompiled), - Or(AstExprCompiled, AstExprCompiled), - BinOp(BinOp, AstExprCompiled, AstExprCompiled), - UnOp(UnOp, AstExprCompiled), - If(AstExprCompiled, AstExprCompiled, AstExprCompiled), // Order: condition, v1, v2 <=> v1 if condition else v2 - List(Vec), - Set(Vec), - Dict(Vec<(AstExprCompiled, AstExprCompiled)>), - ListComprehension(AstExprCompiled, Vec), - SetComprehension(AstExprCompiled, Vec), - DictComprehension((AstExprCompiled, AstExprCompiled), Vec), - /// Creates a local scope for evaluation of subexpression in global scope. - /// Used for evaluate comprehensions in global scope. - Local(ExprLocal), -} - -#[doc(hidden)] -pub(crate) type AstExprCompiled = Box>; - -impl GlobalOrSlot { - pub fn inspect(&self) -> Value { - let GlobalOrSlot { name, local, slot } = self; - Value::from((name.clone(), if *local { "local" } else { "global" }, *slot)) - } -} - -impl ExprCompiled { - pub(crate) fn compile( - expr: AstExpr, - compiler: &mut C, - ) -> Result { - Ok(Box::new(Spanned { - span: expr.span, - node: match expr.node { - Expr::Tuple(args) => ExprCompiled::Tuple( - args.into_iter() - .map(|a| Self::compile(a, compiler)) - .collect::>()?, - ), - Expr::List(args) => ExprCompiled::List( - args.into_iter() - .map(|a| Self::compile(a, compiler)) - .collect::>()?, - ), - Expr::Set(args) => ExprCompiled::Set( - args.into_iter() - .map(|a| Self::compile(a, compiler)) - .collect::>()?, - ), - Expr::Dict(args) => ExprCompiled::Dict( - args.into_iter() - .map(|(k, v)| { - Ok((Self::compile(k, compiler)?, Self::compile(v, compiler)?)) - }) - .collect::>()?, - ), - Expr::Identifier(ident) => ExprCompiled::Name(Spanned { - span: ident.span, - node: compiler.ident(ident), - }), - Expr::Dot(object, field) => { - ExprCompiled::Dot(Self::compile(object, compiler)?, field) - } - Expr::ArrayIndirection(array, index) => ExprCompiled::ArrayIndirection( - Self::compile(array, compiler)?, - Self::compile(index, compiler)?, - ), - Expr::Call(f, args, kwargs, star, star_star) => ExprCompiled::Call( - Self::compile(f, compiler)?, - args.into_iter() - .map(|a| Self::compile(a, compiler)) - .collect::>()?, - kwargs - .into_iter() - .map(|(k, v)| Ok((k, Self::compile(v, compiler)?))) - .collect::>()?, - star.map(|e| Self::compile(e, compiler)).transpose()?, - star_star.map(|e| Self::compile(e, compiler)).transpose()?, - ), - Expr::Slice(array, a, b, c) => ExprCompiled::Slice( - Self::compile(array, compiler)?, - a.map(|e| Self::compile(e, compiler)).transpose()?, - b.map(|e| Self::compile(e, compiler)).transpose()?, - c.map(|e| Self::compile(e, compiler)).transpose()?, - ), - Expr::IntLiteral(i) => ExprCompiled::Value(FrozenValue::from(i.node)), - Expr::StringLiteral(s) => { - ExprCompiled::Value(FrozenValue::new(s.node.into()).unwrap()) - } - Expr::Not(e) => ExprCompiled::Not(Self::compile(e, compiler)?), - Expr::And(lhs, rhs) => { - ExprCompiled::And(Self::compile(lhs, compiler)?, Self::compile(rhs, compiler)?) - } - Expr::Or(lhs, rhs) => { - ExprCompiled::Or(Self::compile(lhs, compiler)?, Self::compile(rhs, compiler)?) - } - Expr::BinOp(op, lhs, rhs) => ExprCompiled::BinOp( - op, - Self::compile(lhs, compiler)?, - Self::compile(rhs, compiler)?, - ), - Expr::UnOp(op, e) => ExprCompiled::UnOp(op, Self::compile(e, compiler)?), - Expr::If(cond, then_expr, else_expr) => ExprCompiled::If( - Self::compile(cond, compiler)?, - Self::compile(then_expr, compiler)?, - Self::compile(else_expr, compiler)?, - ), - Expr::ListComprehension(expr, clauses) => { - compiler.list_comprenesion(expr.span, expr, clauses)? - } - Expr::SetComprehension(expr, clauses) => { - compiler.set_comprenesion(expr.span, expr, clauses)? - } - Expr::DictComprehension((key, value), clauses) => { - compiler.dict_comprenesion(expr.span, key, value, clauses)? - } - }, - })) - } - - pub(crate) fn compile_local<'a>( - expr: AstExpr, - locals_query: &'a mut LocalsQuery<'a>, - ) -> Result { - Self::compile(expr, &mut LocalCompiler::new(locals_query)) - } - - pub(crate) fn compile_global( - expr: AstExpr, - globals: &mut Globals, - ) -> Result { - Self::compile(expr, &mut GlobalCompiler::new(globals)) - } -} - -impl Inspectable for ExprCompiled { - fn inspect(&self) -> Value { - let (name, param): (&str, Value) = match &self { - ExprCompiled::Dot(object, field) => ("dot", (object.inspect(), field.inspect()).into()), - ExprCompiled::ArrayIndirection(array, index) => ( - "array_indirection", - (array.inspect(), index.inspect()).into(), - ), - ExprCompiled::Call(expr, args, kwargs, star, star_star) => { - ("call", (expr, args, kwargs, star, star_star).inspect()) - } - ExprCompiled::Slice(array, a, b, c) => ("slice", (array, a, b, c).inspect()), - ExprCompiled::Name(n) => ("name", n.node.inspect()), - ExprCompiled::Value(v) => ("value", Value::from(v.clone())), - ExprCompiled::Not(e) => ("not", e.inspect()), - ExprCompiled::And(l, r) => ("and", (l, r).inspect()), - ExprCompiled::Or(l, r) => ("or", (l, r).inspect()), - ExprCompiled::BinOp(op, l, r) => ("bin_op", (format!("{:?}", op), l, r).inspect()), - ExprCompiled::UnOp(op, e) => ("un_op", (format!("{:?}", op), e).inspect()), - ExprCompiled::If(cond, then_expr, else_expr) => { - ("if", (cond, then_expr, else_expr).inspect()) - } - ExprCompiled::List(e) => ("list", e.inspect()), - ExprCompiled::Tuple(e) => ("tuple", e.inspect()), - ExprCompiled::Set(e) => ("set", e.inspect()), - ExprCompiled::Dict(d) => ("dict", d.inspect()), - ExprCompiled::ListComprehension(expr, clauses) => { - ("list_comprehension", (expr, clauses).inspect()) - } - ExprCompiled::DictComprehension(expr, clauses) => { - ("dict_comprehension", (expr, clauses).inspect()) - } - ExprCompiled::SetComprehension(expr, clauses) => { - ("set_comprehension", (expr, clauses).inspect()) - } - ExprCompiled::Local(e) => ("local", e.inspect()), - }; - Value::from((Value::from(name), param)) - } -} - -impl AssignTargetExprCompiled { - pub(crate) fn compile( - expr: AstAssignTargetExpr, - compiler: &mut C, - ) -> Result { - Ok(Spanned { - span: expr.span, - node: match expr.node { - AssignTargetExpr::Identifier(a) => { - AssignTargetExprCompiled::Name(compiler.ast_ident(a)) - } - AssignTargetExpr::ArrayIndirection(array, index) => { - AssignTargetExprCompiled::ArrayIndirection( - ExprCompiled::compile(array, compiler)?, - ExprCompiled::compile(index, compiler)?, - ) - } - AssignTargetExpr::Dot(object, field) => { - AssignTargetExprCompiled::Dot(ExprCompiled::compile(object, compiler)?, field) - } - AssignTargetExpr::Subtargets(subtargets) => AssignTargetExprCompiled::Subtargets( - subtargets - .into_iter() - .map(|t| AssignTargetExprCompiled::compile(t, compiler)) - .collect::>()?, - ), - }, - }) - } -} - -impl AugmentedAssignTargetExprCompiled { - pub(crate) fn compile_impl( - expr: AstAugmentedAssignTargetExpr, - compiler: &mut C, - ) -> Result { - Ok(Spanned { - span: expr.span, - node: match expr.node { - AugmentedAssignTargetExpr::Identifier(a) => { - let span = a.span; - let GlobalOrSlot { slot, local, name } = compiler.ident(a); - assert!(local, "global must be filtered out at parse level"); - AugmentedAssignTargetExprCompiled::Slot(slot, Spanned { span, node: name }) - } - AugmentedAssignTargetExpr::ArrayIndirection(array, index) => { - AugmentedAssignTargetExprCompiled::ArrayIndirection( - ExprCompiled::compile(array, compiler)?, - ExprCompiled::compile(index, compiler)?, - ) - } - AugmentedAssignTargetExpr::Dot(object, field) => { - AugmentedAssignTargetExprCompiled::Dot( - ExprCompiled::compile(object, compiler)?, - field, - ) - } - }, - }) - } -} - -impl Inspectable for AssignTargetExprCompiled { - fn inspect(&self) -> Value { - let (name, param): (&str, Value) = match self { - AssignTargetExprCompiled::Dot(object, field) => ("dot", (object, field).inspect()), - AssignTargetExprCompiled::ArrayIndirection(array, index) => { - ("array_indirection", (array, index).inspect()) - } - AssignTargetExprCompiled::Name(name) => ("name", name.node.inspect()), - AssignTargetExprCompiled::Subtargets(st) => ("subtargets", st.inspect()), - }; - Value::from((name, param)) - } -} - -impl Inspectable for AugmentedAssignTargetExprCompiled { - fn inspect(&self) -> Value { - let (name, param): (&str, Value) = match self { - AugmentedAssignTargetExprCompiled::Slot(slot, name) => ("slot", (slot, name).inspect()), - AugmentedAssignTargetExprCompiled::ArrayIndirection(array, index) => { - ("array_indirection", (array, index).inspect()) - } - AugmentedAssignTargetExprCompiled::Dot(object, field) => { - ("dot", (object, field).inspect()) - } - }; - Value::from((name, param)) - } -} - -impl Inspectable for ClauseCompiled { - fn inspect(&self) -> Value { - let (name, param): (&str, Value) = match self { - ClauseCompiled::If(cond) => ("if", cond.inspect()), - ClauseCompiled::For(var, over) => ("for", (var, over).inspect()), - }; - Value::from((name, param)) - } -} diff --git a/starlark/src/eval/globals.rs b/starlark/src/eval/globals.rs deleted file mode 100644 index e37d06d3..00000000 --- a/starlark/src/eval/globals.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Utilities to work with scope global variables. - -use crate::stdlib::structs::StarlarkStruct; -use crate::values::inspect::Inspectable; -use crate::values::string::rc::RcString; -use crate::values::Value; -use linked_hash_map::LinkedHashMap; -use std::collections::HashMap; - -#[derive(Default, Debug, Clone)] -pub(crate) struct Globals { - name_to_index: HashMap, -} - -impl Globals { - pub fn register_global(&mut self, name: &str) -> usize { - let global_count = self.name_to_index.len(); - *self - .name_to_index - .entry(name.to_owned()) - .or_insert(global_count) - } - - /// Return the number of global variable slots - pub fn len(&self) -> usize { - self.name_to_index.len() - } -} - -impl Inspectable for Globals { - fn inspect(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - fields.insert("name_to_index".into(), self.name_to_index.inspect()); - Value::new(StarlarkStruct::new(fields)) - } -} diff --git a/starlark/src/eval/interactive.rs b/starlark/src/eval/interactive.rs deleted file mode 100644 index 3834de83..00000000 --- a/starlark/src/eval/interactive.rs +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Defines very basic versions of the evaluation functions that are suitable for interactive use: -//! they output diagnostic to stderr and the result value to stdout. -use crate::environment::{Environment, TypeValues}; -use crate::syntax::dialect::Dialect; -use crate::values::Value; -use codemap::CodeMap; -use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter}; -use std::sync::{Arc, Mutex}; - -pub struct EvalError { - codemap: Arc>, - diagnostic: Diagnostic, -} - -impl EvalError { - pub fn write_to_stderr(self) { - Emitter::stderr(ColorConfig::Auto, Some(&self.codemap.lock().unwrap())) - .emit(&[self.diagnostic]) - } -} - -/// Evaluate a string content, mutate the environment accordingly, and return the value of the last -/// statement, or a printable error. -/// -/// # Arguments -/// -/// __This version uses the [`SimpleFileLoader`](crate::eval::simple::SimpleFileLoader) -/// implementation for the file loader__ -/// -/// * path: the name of the file being evaluated, for diagnostics -/// * content: the content to evaluate -/// * dialect: starlark language dialect -/// * env: the environment to mutate during the evaluation -pub fn eval( - path: &str, - content: &str, - dialect: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader_env: Environment, -) -> Result, EvalError> { - let map = Arc::new(Mutex::new(CodeMap::new())); - transform_result( - super::simple::eval( - &map, - path, - content, - dialect, - env, - type_values, - file_loader_env, - ), - map, - ) -} - -/// Evaluate a file, mutate the environment accordingly, and return the value of the last -/// statement, or a printable error. -/// -/// __This version uses the [`SimpleFileLoader`](crate::eval::simple::SimpleFileLoader) -/// implementation for the file loader__ -/// -/// # Arguments -/// -/// * path: the file to parse and evaluate -/// * dialect: Starlark language dialect -/// * env: the environment to mutate during the evaluation -pub fn eval_file( - path: &str, - dialect: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader_env: Environment, -) -> Result, EvalError> { - let map = Arc::new(Mutex::new(CodeMap::new())); - transform_result( - super::simple::eval_file(&map, path, dialect, env, type_values, file_loader_env), - map, - ) -} - -fn transform_result( - result: Result, - codemap: Arc>, -) -> Result, EvalError> { - match result { - Ok(ref v) if v.get_type() == "NoneType" => Ok(None), - Ok(v) => Ok(Some(v)), - Err(diagnostic) => Err(EvalError { - codemap, - diagnostic, - }), - } -} diff --git a/starlark/src/eval/locals.rs b/starlark/src/eval/locals.rs deleted file mode 100644 index fc3bce71..00000000 --- a/starlark/src/eval/locals.rs +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Utilities to work with scope local variables. - -use crate::eval::globals::Globals; -use crate::stdlib::structs::StarlarkStruct; -use crate::values::dict::Dictionary; -use crate::values::inspect::Inspectable; -use crate::values::string::rc::RcString; -use crate::values::Value; -use linked_hash_map::LinkedHashMap; -use std::collections::hash_map; -use std::collections::HashMap; - -#[derive(Default, Debug, Clone)] -struct Scope { - /// Name to slot mapping in current scope - name_to_slot: HashMap, - nested_scopes: Vec, -} - -/// Mapping of local variables and scopes to local variable slots -#[derive(Default, Debug, Clone)] -#[doc(hidden)] -pub struct Locals { - locals: Scope, - local_count: usize, -} - -/// Utility to assign slots to local variables -#[derive(Default)] -pub(crate) struct LocalsBuilder { - locals: Locals, - current_scope_path: Vec, -} - -/// Utility to query slots assigned to local variables -pub(crate) struct LocalsQuery<'a> { - locals: &'a Locals, - globals: &'a mut Globals, - current_scope_path: Vec, - next: usize, -} - -impl Scope { - /// Find local variable index in given scope - fn local_index(&self, name: &str, scope_path: &[usize]) -> Option { - let deepest_index = if let Some((first, rem)) = scope_path.split_first() { - self.nested_scopes[*first].local_index(name, rem) - } else { - None - }; - match deepest_index { - Some(index) => Some(index), - None => self.name_to_slot.get(name).cloned(), - } - } - - fn scope_by_path<'a>(&'a self, path: &[usize]) -> &'a Scope { - match path.split_first() { - Some((&first, rem)) => self.nested_scopes[first].scope_by_path(rem), - None => self, - } - } -} - -impl Inspectable for Scope { - fn inspect(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - - let mut name_to_slot = Dictionary::new_typed(); - for (n, s) in &self.name_to_slot { - name_to_slot.insert(n.as_str().into(), (*s).into()).unwrap(); - } - fields.insert("name_to_slot".into(), Value::new(name_to_slot)); - - fields.insert("nested_scopes".into(), self.nested_scopes.inspect()); - - Value::new(StarlarkStruct::new(fields)) - } -} - -impl Locals { - /// Return the number of local variable slots - pub fn len(&self) -> usize { - self.local_count - } - - pub fn top_level_name_to_slot(&self, name: &str) -> Option { - self.locals.local_index(name, &[]) - } -} - -impl Inspectable for Locals { - fn inspect(&self) -> Value { - let mut fields = LinkedHashMap::::new(); - fields.insert("count".into(), (self.local_count as i64).into()); - fields.insert("locals".into(), self.locals.inspect()); - Value::new(StarlarkStruct::new(fields)) - } -} - -impl LocalsBuilder { - fn current_locals(&mut self) -> &mut Scope { - let mut locals = &mut self.locals.locals; - for &index in &self.current_scope_path { - locals = &mut locals.nested_scopes[index]; - } - locals - } - - /// Create a new nested scope - pub fn push_scope(&mut self) { - let locals = self.current_locals(); - locals.nested_scopes.push(Scope::default()); - let n = locals.nested_scopes.len() - 1; - self.current_scope_path.push(n); - } - - /// Go to one scope down - pub fn pop_scope(&mut self) { - self.current_scope_path.pop().unwrap(); - } - - /// Register a variable in current scope - pub fn register_local(&mut self, name: RcString) { - let local_count = self.locals.local_count; - if let hash_map::Entry::Vacant(e) = self.current_locals().name_to_slot.entry(name) { - e.insert(local_count); - } - self.locals.local_count += 1; - } - - /// Finish the building - pub fn build(self) -> Locals { - // sanity check - assert!(self.current_scope_path.is_empty()); - - self.locals - } -} - -impl<'a> LocalsQuery<'a> { - pub fn new(locals: &'a Locals, globals: &'a mut Globals) -> LocalsQuery<'a> { - LocalsQuery { - locals, - globals, - current_scope_path: Vec::new(), - next: 0, - } - } - - /// Return a slot for a variable visible in current scope. - /// Local could be registered in current scope or in parent scopes, - /// but not in nested scopes. - pub fn slot(&mut self, name: &str) -> (usize, bool) { - match self - .locals - .locals - .local_index(name, &self.current_scope_path) - { - Some(slot) => (slot, true), - None => (self.globals.register_global(name), false), - } - } - - /// Go to the next nested scope - pub fn push_next_scope(&mut self) { - self.current_scope_path.push(self.next); - self.next = 0; - } - - /// Pop a scope - pub fn pop_scope(&mut self) { - // We must not leave the current scope if - // nested scopes were not traversed - assert_eq!( - self.next, - self.locals - .locals - .scope_by_path(&self.current_scope_path) - .nested_scopes - .len() - ); - - self.next = self.current_scope_path.pop().unwrap() + 1; - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn one_level() { - let mut builder = LocalsBuilder::default(); - builder.register_local("a".into()); - builder.register_local("b".into()); - builder.register_local("a".into()); - let locals = builder.build(); - let mut globals = Globals::default(); - let mut query = LocalsQuery::new(&locals, &mut globals); - assert_eq!((0, true), query.slot("a")); - assert_eq!((1, true), query.slot("b")); - assert_eq!((0, false), query.slot("c")); - } - - #[test] - fn override_on_second_level() { - let mut builder = LocalsBuilder::default(); - builder.register_local("a".into()); - builder.push_scope(); - builder.register_local("a".into()); - builder.pop_scope(); - let locals = builder.build(); - let mut globals = Globals::default(); - let mut query = LocalsQuery::new(&locals, &mut globals); - assert_eq!((0, true), query.slot("a")); - query.push_next_scope(); - assert_eq!((1, true), query.slot("a")); - query.pop_scope(); - assert_eq!((0, true), query.slot("a")); - } - - #[test] - fn overrride_twice_on_second_level() { - // Here we have three distinct `a` variables: - // in the top scope, and in two nested scopes - let mut builder = LocalsBuilder::default(); - builder.register_local("a".into()); - builder.push_scope(); - builder.register_local("a".into()); - builder.pop_scope(); - builder.push_scope(); - builder.register_local("a".into()); - builder.pop_scope(); - let locals = builder.build(); - let mut globals = Globals::default(); - let mut query = LocalsQuery::new(&locals, &mut globals); - assert_eq!((0, true), query.slot("a")); - query.push_next_scope(); - assert_eq!((1, true), query.slot("a")); - query.pop_scope(); - assert_eq!((0, true), query.slot("a")); - query.push_next_scope(); - assert_eq!((2, true), query.slot("a")); - query.pop_scope(); - assert_eq!((0, true), query.slot("a")); - } -} diff --git a/starlark/src/eval/mod.rs b/starlark/src/eval/mod.rs deleted file mode 100644 index 081ddec1..00000000 --- a/starlark/src/eval/mod.rs +++ /dev/null @@ -1,891 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Evaluation environment, provide converters from Ast* element to value. -//! -//! # Starlark and BUILD dialect -//! -//! All evaluation function can evaluate the full Starlark language (i.e. Bazel's -//! .bzl files) or the BUILD file dialect (i.e. used to interpret Bazel's BUILD file). -//! The BUILD dialect does not allow `def` statements. -use crate::environment::Environment; -use crate::environment::TypeValues; -use crate::eval::call_stack::CallStack; -use crate::eval::compr::eval_one_dimensional_comprehension; -use crate::eval::def::Def; -use crate::eval::def::ParameterCompiled; -use crate::eval::expr::AssignTargetExprCompiled; -use crate::eval::expr::AstAssignTargetExprCompiled; -use crate::eval::expr::AstAugmentedAssignTargetExprCompiled; -use crate::eval::expr::AstExprCompiled; -use crate::eval::expr::AugmentedAssignTargetExprCompiled; -use crate::eval::expr::ExprCompiled; -use crate::eval::expr::ExprLocal; -use crate::eval::module::Module; -use crate::eval::stmt::AstStatementCompiled; -use crate::eval::stmt::BlockCompiled; -use crate::eval::stmt::StatementCompiled; -use crate::linked_hash_set::value::Set; -use crate::syntax::ast::BinOp; -use crate::syntax::ast::UnOp; -use crate::syntax::ast::*; -use crate::syntax::dialect::Dialect; -use crate::syntax::errors::SyntaxError; -use crate::syntax::lexer::{LexerIntoIter, LexerItem}; -use crate::syntax::parser::{parse, parse_file, parse_lexer}; -use crate::values::context::EvaluationContext; -use crate::values::context::EvaluationContextEnvironment; -use crate::values::context::EvaluationContextEnvironmentLocal; -use crate::values::context::EvaluationContextEnvironmentModule; -use crate::values::context::IndexedGlobals; -use crate::values::context::IndexedLocals; -use crate::values::dict::Dictionary; -use crate::values::error::UnsupportedOperation; -use crate::values::error::ValueError; -use crate::values::function::FunctionParameter; -use crate::values::function::FunctionSignature; -use crate::values::function::WrappedMethod; -use crate::values::none::NoneType; -use crate::values::string::rc::RcString; -use crate::values::*; -use codemap::{CodeMap, Span, Spanned}; -use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle}; -use linked_hash_map::LinkedHashMap; -use std::cmp::Ordering; -use std::sync::{Arc, Mutex}; - -fn eval_vector( - v: &[AstExprCompiled], - ctx: &mut EvaluationContext, -) -> Result, EvalException> { - v.into_iter().map(|s| eval_expr(s, ctx)).collect() -} - -// TODO: move that code in some common error code list? -// CE prefix = Critical Evaluation -#[doc(hidden)] -pub const BREAK_ERROR_CODE: &str = "CE00"; -#[doc(hidden)] -pub const CONTINUE_ERROR_CODE: &str = "CE01"; -#[doc(hidden)] -pub const RETURN_ERROR_CODE: &str = "CE02"; -#[doc(hidden)] -pub const INCORRECT_LEFT_VALUE_ERROR_CODE: &str = "CE03"; -#[doc(hidden)] -pub const INCORRECT_UNPACK_ERROR_CODE: &str = "CE04"; -#[doc(hidden)] -pub const RECURSION_ERROR_CODE: &str = "CE05"; - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum EvalException { - // Flow control statement reached - Break(Span), - Continue(Span), - Return(Span, Value), - // Error bubbling up as diagnostics - DiagnosedError(Diagnostic), - // Expression used as left value cannot be assigned - IncorrectLeftValue(Span), - // Incorrect number of value to unpack (expected, got) - IncorrectNumberOfValueToUnpack(Span, i64, i64), - // Recursion - Recursion(Span, String, CallStack), -} - -impl From for EvalException { - fn from(diagnostic: Diagnostic) -> Self { - EvalException::DiagnosedError(diagnostic) - } -} - -type EvalResult = Result; - -/// An object with [`Span`] -trait AsSpan { - fn as_span(&self) -> Span; -} - -impl AsSpan for Span { - fn as_span(&self) -> Span { - *self - } -} - -impl AsSpan for Spanned { - fn as_span(&self) -> Span { - self.span - } -} - -impl AsSpan for Box> { - fn as_span(&self) -> Span { - self.span - } -} - -/// Convert syntax error to spanned evaluation exception -fn t(r: Result, spanned: &S) -> Result { - match r { - Ok(v) => Ok(v), - Err(e) => Err(EvalException::DiagnosedError( - e.to_diagnostic(spanned.as_span()), - )), - } -} - -impl Into for EvalException { - fn into(self) -> Diagnostic { - match self { - EvalException::DiagnosedError(e) => e, - EvalException::Break(s) => Diagnostic { - level: Level::Error, - message: "Break statement used outside of a loop".to_owned(), - code: Some(BREAK_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: None, - }], - }, - EvalException::Continue(s) => Diagnostic { - level: Level::Error, - message: "Continue statement used outside of a loop".to_owned(), - code: Some(CONTINUE_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: None, - }], - }, - EvalException::Return(s, ..) => Diagnostic { - level: Level::Error, - message: "Return statement used outside of a function call".to_owned(), - code: Some(RETURN_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: None, - }], - }, - EvalException::IncorrectLeftValue(s) => Diagnostic { - level: Level::Error, - message: "Incorrect expression as left value".to_owned(), - code: Some(INCORRECT_LEFT_VALUE_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: None, - }], - }, - EvalException::IncorrectNumberOfValueToUnpack(s, expected, got) => Diagnostic { - level: Level::Error, - message: format!("Unpacked {} values but expected {}", got, expected), - code: Some(INCORRECT_UNPACK_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: None, - }], - }, - EvalException::Recursion(s, f, stack) => Diagnostic { - level: Level::Error, - message: format!( - "Function {} recursed, call stack:{}", - f, - stack.print_with_newline_before() - ), - code: Some(RECURSION_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: s, - style: SpanStyle::Primary, - label: Some("Recursive call".to_owned()), - }], - }, - } - } -} - -/// A trait for loading file using the load statement path. -pub trait FileLoader { - /// Open the file given by the load statement `path`. - fn load(&self, path: &str, type_values: &TypeValues) -> Result; -} - -fn eval_un_op(op: UnOp, v: Value) -> Result { - match op { - UnOp::Plus => v.plus(), - UnOp::Minus => v.minus(), - } -} - -fn eval_bin_op(op: BinOp, l: Value, r: Value) -> Result { - match op { - BinOp::EqualsTo => l.equals(&r).map(Value::new), - BinOp::Different => l.equals(&r).map(|b| Value::new(!b)), - BinOp::LowerThan => l.compare(&r).map(|c| Value::new(c == Ordering::Less)), - BinOp::GreaterThan => l.compare(&r).map(|c| Value::new(c == Ordering::Greater)), - BinOp::LowerOrEqual => l.compare(&r).map(|c| Value::new(c != Ordering::Greater)), - BinOp::GreaterOrEqual => l.compare(&r).map(|c| Value::new(c != Ordering::Less)), - BinOp::In => r.contains(&l).map(Value::new), - BinOp::NotIn => r.contains(&l).map(|r| Value::new(!r)), - BinOp::Substraction => l.sub(r), - BinOp::Addition => l.add(r), - BinOp::Multiplication => l.mul(r), - BinOp::Percent => l.percent(r), - BinOp::Division => { - // No types currently support / so always error. - return Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Div, - left: l.get_type().to_string(), - right: Some(r.get_type().to_string()), - }); - } - BinOp::FloorDivision => l.floor_div(r), - BinOp::Pipe => l.pipe(r), - } -} - -fn eval_bin_op_expr( - expr: &AstExprCompiled, - op: BinOp, - l: &AstExprCompiled, - r: &AstExprCompiled, - context: &mut EvaluationContext, -) -> EvalResult { - let l = eval_expr(l, context)?; - let r = eval_expr(r, context)?; - - t(eval_bin_op(op, l, r), expr) -} - -fn eval_slice( - this: &AstExprCompiled, - a: &AstExprCompiled, - start: &Option, - stop: &Option, - stride: &Option, - context: &mut EvaluationContext, -) -> EvalResult { - let a = eval_expr(a, context)?; - let start = match start { - Some(ref e) => Some(eval_expr(e, context)?), - None => None, - }; - let stop = match stop { - Some(ref e) => Some(eval_expr(e, context)?), - None => None, - }; - let stride = match stride { - Some(ref e) => Some(eval_expr(e, context)?), - None => None, - }; - t(a.slice(start, stop, stride), this) -} - -fn eval_call( - this: &AstExprCompiled, - e: &AstExprCompiled, - pos: &[AstExprCompiled], - named: &[(AstString, AstExprCompiled)], - args: &Option, - kwargs: &Option, - context: &mut EvaluationContext, -) -> EvalResult { - let npos = eval_vector(pos, context)?; - let mut nnamed = LinkedHashMap::new(); - for &(ref k, ref v) in named.iter() { - nnamed.insert(k.node.clone(), eval_expr(v, context)?); - } - let nargs = if let Some(ref x) = args { - Some(eval_expr(x, context)?) - } else { - None - }; - let nkwargs = if let Some(ref x) = kwargs { - Some(eval_expr(x, context)?) - } else { - None - }; - let f = eval_expr(e, context)?; - if context.call_stack.contains(f.function_id()) { - let mut new_stack = context.call_stack.clone(); - new_stack.push(f.clone(), context.map.clone(), this.span.low()); - Err(EvalException::Recursion(this.span, f.to_repr(), new_stack)) - } else { - context - .call_stack - .push(f.clone(), context.map.clone(), this.span.low()); - let r = t( - eval_expr(e, context)?.call( - context.call_stack, - context.type_values, - npos, - nnamed, - nargs, - nkwargs, - ), - this, - ); - context.call_stack.pop(); - r - } -} - -fn eval_dot( - this: &AstExprCompiled, - e: &AstExprCompiled, - s: &AstString, - context: &mut EvaluationContext, -) -> EvalResult { - let left = eval_expr(e, context)?; - if let Some(v) = context.type_values.get_type_value(&left, &s.node) { - if v.get_type() == "function" { - // Insert self so the method see the object it is acting on - Ok(WrappedMethod::new(left, v)) - } else { - Ok(v) - } - } else { - t(left.get_attr(&s.node), this) - } -} - -enum TransformedExpr { - Dot(Value, RcString, Span), - ArrayIndirection(Value, Value, Span), - Slot(usize, AstString), -} - -fn set_transformed( - transformed: &TransformedExpr, - context: &mut EvaluationContext, - new_value: Value, -) -> EvalResult { - let ok = Ok(Value::new(NoneType::None)); - match transformed { - TransformedExpr::Dot(ref e, ref s, ref span) => { - t(e.clone().set_attr(&s, new_value), span)?; - ok - } - TransformedExpr::ArrayIndirection(ref e, ref idx, ref span) => { - t(e.clone().set_at(idx.clone(), new_value), span)?; - ok - } - TransformedExpr::Slot(slot, ident) => { - context.env.set_local(*slot, &ident.node, new_value); - ok - } - } -} - -fn eval_transformed( - transformed: &TransformedExpr, - context: &mut EvaluationContext, -) -> EvalResult { - match transformed { - TransformedExpr::Dot(ref left, ref s, ref span) => { - if let Some(v) = context.type_values.get_type_value(left, &s) { - if v.get_type() == "function" { - // Insert self so the method see the object it is acting on - Ok(WrappedMethod::new(left.clone(), v)) - } else { - Ok(v) - } - } else { - t(left.get_attr(&s), span) - } - } - TransformedExpr::ArrayIndirection(ref e, ref idx, ref span) => t(e.at(idx.clone()), span), - TransformedExpr::Slot(slot, ident) => t(context.env.get_local(*slot, &ident.node), ident), - } -} - -// An intermediate transformation that tries to evaluate parameters of function / indices. -// It is used to cache result of LHS in augmented assignment. -// This transformation by default should be a deep copy (clone). -fn transform( - expr: &AstAugmentedAssignTargetExprCompiled, - context: &mut EvaluationContext, -) -> Result { - match &expr.node { - AugmentedAssignTargetExprCompiled::Dot(ref e, ref s) => Ok(TransformedExpr::Dot( - eval_expr(e, context)?, - s.node.clone(), - expr.span, - )), - AugmentedAssignTargetExprCompiled::ArrayIndirection(ref e, ref idx) => { - Ok(TransformedExpr::ArrayIndirection( - eval_expr(e, context)?, - eval_expr(idx, context)?, - expr.span, - )) - } - AugmentedAssignTargetExprCompiled::Slot(index, ref ident) => { - Ok(TransformedExpr::Slot(*index, ident.clone())) - } - } -} - -// Evaluate the AST in global context, create local context, and continue evaluating in local -fn eval_expr_local( - local: &ExprLocal, - context: &mut EvaluationContext, -) -> EvalResult { - let mut ctx = EvaluationContext { - call_stack: context.call_stack, - env: EvaluationContextEnvironmentLocal { - // Note assertion that we where in module context - globals: IndexedGlobals::new(&local.globals, context.env.env().clone()), - locals: IndexedLocals::new(&local.locals), - }, - type_values: context.type_values, - map: context.map.clone(), - }; - eval_expr(&local.expr, &mut ctx) -} - -// Evaluate the AST element, i.e. mutate the environment and return an evaluation result -fn eval_expr( - expr: &AstExprCompiled, - context: &mut EvaluationContext, -) -> EvalResult { - match expr.node { - ExprCompiled::Tuple(ref v) => { - let r = eval_vector(v, context)?; - Ok(Value::new(tuple::Tuple::new(r))) - } - ExprCompiled::Dot(ref e, ref s) => eval_dot(expr, e, s, context), - ExprCompiled::Call(ref e, ref pos, ref named, ref args, ref kwargs) => { - eval_call(expr, e, pos, named, args, kwargs, context) - } - ExprCompiled::ArrayIndirection(ref e, ref idx) => { - let idx = eval_expr(idx, context)?; - t(eval_expr(e, context)?.at(idx), expr) - } - ExprCompiled::Slice(ref a, ref start, ref stop, ref stride) => { - eval_slice(expr, a, start, stop, stride, context) - } - ExprCompiled::Name(ref name) => t(context.env.get(&name.node), name), - ExprCompiled::Value(ref v) => Ok(v.clone().into()), - ExprCompiled::Not(ref s) => Ok(Value::new(!eval_expr(s, context)?.to_bool())), - ExprCompiled::UnOp(op, ref s) => { - let v = eval_expr(s, context)?; - t(eval_un_op(op, v), expr) - } - ExprCompiled::Or(ref l, ref r) => { - let l = eval_expr(l, context)?; - Ok(if l.to_bool() { - l - } else { - eval_expr(r, context)? - }) - } - ExprCompiled::And(ref l, ref r) => { - let l = eval_expr(l, context)?; - Ok(if !l.to_bool() { - l - } else { - eval_expr(r, context)? - }) - } - ExprCompiled::BinOp(op, ref l, ref r) => eval_bin_op_expr(expr, op, l, r, context), - ExprCompiled::If(ref cond, ref v1, ref v2) => { - if eval_expr(cond, context)?.to_bool() { - eval_expr(v1, context) - } else { - eval_expr(v2, context) - } - } - ExprCompiled::List(ref v) => { - let r = eval_vector(v, context)?; - Ok(Value::from(r)) - } - ExprCompiled::Dict(ref v) => { - let r = dict::Dictionary::new(); - for s in v.iter() { - t( - r.borrow_mut() - .set_at(eval_expr(&s.0, context)?, eval_expr(&s.1, context)?), - expr, - )? - } - Ok(r.into()) - } - ExprCompiled::Set(ref v) => { - if !context.env.env().set_literals_emabled() { - return t(Err(ValueError::TypeNotSupported("set".to_string())), expr); - } - let mut values = Vec::with_capacity(v.len()); - for s in v { - values.push(eval_expr(s, context)?); - } - t(Set::from(values), expr) - } - ExprCompiled::ListComprehension(ref expr, ref clauses) => { - let mut list = Vec::new(); - eval_one_dimensional_comprehension( - &mut |context| { - list.push(eval_expr(expr, context)?); - Ok(()) - }, - clauses, - context, - )?; - Ok(Value::from(list)) - } - ExprCompiled::SetComprehension(ref expr, ref clauses) => { - if !context.env.env().set_literals_emabled() { - return t(Err(ValueError::TypeNotSupported("set".to_string())), expr); - } - - let mut values = Vec::new(); - eval_one_dimensional_comprehension( - &mut |context| { - values.push(eval_expr(expr, context)?); - Ok(()) - }, - clauses, - context, - )?; - - t(Set::from(values), expr) - } - ExprCompiled::DictComprehension((ref k, ref v), ref clauses) => { - let mut dict = Dictionary::new_typed(); - eval_one_dimensional_comprehension( - &mut |context| { - t( - dict.insert(eval_expr(k, context)?, eval_expr(v, context)?), - &expr.span, - ) - }, - clauses, - context, - )?; - Ok(Value::new(dict)) - } - ExprCompiled::Local(ref local) => eval_expr_local(&local, context), - } -} - -// Perform an assignment on the LHS represented by this AST element -fn set_expr( - expr: &AstAssignTargetExprCompiled, - context: &mut EvaluationContext, - new_value: Value, -) -> EvalResult { - let ok = Ok(Value::new(NoneType::None)); - match expr.node { - AssignTargetExprCompiled::Subtargets(ref v) => { - // TODO: the span here should probably include the rvalue - let new_values: Vec = t(new_value.iter(), expr)?.iter().collect(); - let l = v.len(); - if new_values.len() != l { - Err(EvalException::IncorrectNumberOfValueToUnpack( - expr.span, - l as i64, - new_values.len() as i64, - )) - } else { - let mut it1 = v.iter(); - let mut it2 = new_values.into_iter(); - for _ in 0..l { - set_expr(it1.next().unwrap(), context, it2.next().unwrap())?; - } - ok - } - } - AssignTargetExprCompiled::Dot(ref e, ref s) => { - t(eval_expr(e, context)?.set_attr(&(s.node), new_value), expr)?; - ok - } - AssignTargetExprCompiled::Name(ref name) => { - t(context.env.set(&name.node, new_value), expr)?; - ok - } - AssignTargetExprCompiled::ArrayIndirection(ref e, ref idx) => { - t( - eval_expr(e, context)?.set_at(eval_expr(idx, context)?, new_value), - expr, - )?; - ok - } - } -} - -fn eval_assign_modify( - stmt: &AstStatementCompiled, - lhs: &AstAugmentedAssignTargetExprCompiled, - rhs: &AstExprCompiled, - context: &mut EvaluationContext, - op: AugmentedAssignOp, -) -> EvalResult -where -{ - let op = match op { - AugmentedAssignOp::Increment => Value::add, - AugmentedAssignOp::Decrement => Value::sub, - AugmentedAssignOp::Multiplier => Value::mul, - AugmentedAssignOp::Divider => Value::div, - AugmentedAssignOp::FloorDivider => Value::floor_div, - AugmentedAssignOp::Percent => Value::percent, - }; - - let lhs = transform(lhs, context)?; - let l = eval_transformed(&lhs, context)?; - let r = eval_expr(rhs, context)?; - set_transformed(&lhs, context, t(op(&l, r), stmt)?) -} - -fn eval_stmt( - stmt: &AstStatementCompiled, - context: &mut EvaluationContext, -) -> EvalResult { - match stmt.node { - StatementCompiled::Break => Err(EvalException::Break(stmt.span)), - StatementCompiled::Continue => Err(EvalException::Continue(stmt.span)), - StatementCompiled::Return(ref e) => { - Err(EvalException::Return(stmt.span, eval_expr(e, context)?)) - } - StatementCompiled::Expression(ref e) => eval_expr(e, context), - StatementCompiled::Assign(ref lhs, ref rhs) => { - let rhs = eval_expr(rhs, context)?; - set_expr(lhs, context, rhs) - } - StatementCompiled::AugmentedAssign(ref lhs, op, ref rhs) => { - eval_assign_modify(stmt, lhs, rhs, context, op) - } - StatementCompiled::IfElse(ref cond, ref st1, ref st2) => { - if eval_expr(cond, context)?.to_bool() { - eval_block(st1, context) - } else { - eval_block(st2, context) - } - } - StatementCompiled::For(ref e1, ref e2, ref st) => { - let iterable = eval_expr(e2, context)?; - let mut result = Ok(Value::new(NoneType::None)); - for v in &t(iterable.iter(), &e2.span)? { - set_expr(e1, context, v)?; - match eval_block(st, context) { - Err(EvalException::Break(..)) => break, - Err(EvalException::Continue(..)) => (), - Err(x) => { - result = Err(x); - break; - } - _ => (), - } - } - result - } - StatementCompiled::Def(ref stmt) => { - let mut p = Vec::new(); - for x in &stmt.params { - p.push(match x.node { - ParameterCompiled::Normal(ref n) => FunctionParameter::Normal(n.node.clone()), - ParameterCompiled::WithDefaultValue(ref n, ref v) => { - FunctionParameter::WithDefaultValue(n.node.clone(), eval_expr(v, context)?) - } - ParameterCompiled::Args(ref n) => FunctionParameter::ArgsArray(n.node.clone()), - ParameterCompiled::KWArgs(ref n) => { - FunctionParameter::KWArgsDict(n.node.clone()) - } - }) - } - let f = Def::new( - context.env.assert_module_env().env.name(), - FunctionSignature::new(p, 0), - stmt.clone(), - context.map.clone(), - context.env.assert_module_env().env.clone(), - ); - t( - context - .env - .set_global(stmt.slot, &stmt.name.node, f.clone().into()), - &stmt.name, - )?; - Ok(f.into()) - } - StatementCompiled::Load(ref name, ref v) => { - let loadenv = context - .env - .assert_module_env() - .loader - .load(name, context.type_values)?; - loadenv.freeze(); - for &(ref new_name, ref orig_name) in v.iter() { - t( - context.env.assert_module_env().env.import_symbol( - &loadenv, - &orig_name.node, - &new_name.node, - ), - &new_name.span.merge(orig_name.span), - )? - } - Ok(Value::new(NoneType::None)) - } - } -} - -fn eval_block( - block: &BlockCompiled, - context: &mut EvaluationContext, -) -> EvalResult { - let mut r = Value::new(NoneType::None); - for stmt in &block.0 { - r = eval_stmt(stmt, context)?; - } - Ok(r) -} - -fn eval_module( - module: &Module, - env: &mut Environment, - type_values: &TypeValues, - map: Arc>, - loader: &dyn FileLoader, -) -> EvalResult { - let mut call_stack = CallStack::default(); - let mut context = EvaluationContext { - env: EvaluationContextEnvironmentModule { - env: env.clone(), - globals: IndexedGlobals::new(&module.globals, env.clone()), - loader, - }, - type_values, - call_stack: &mut call_stack, - map, - }; - eval_block(&module.block, &mut context) -} - -/// Evaluate a content provided by a custom Lexer, mutate the environment accordingly and return -/// the evaluated value. -/// -/// # Arguments -/// -/// * map: the codemap object used for diagnostics -/// * filename: the name of the file being evaluated, for diagnostics -/// * content: the content to evaluate, for diagnostics -/// * dialect: starlark syntax dialect -/// * lexer: the custom lexer to use -/// * env: the environment to mutate during the evaluation -/// * file_loader: the [`FileLoader`] to react to `load()` statements. -pub fn eval_lexer, T2: LexerIntoIter>( - map: &Arc>, - filename: &str, - content: &str, - dialect: Dialect, - lexer: T2, - env: &mut Environment, - type_values: &TypeValues, - file_loader: &dyn FileLoader, -) -> Result { - match eval_module( - &parse_lexer(map, filename, content, dialect, lexer)?, - env, - type_values, - map.clone(), - file_loader, - ) { - Ok(v) => Ok(v), - Err(p) => Err(p.into()), - } -} - -/// Evaluate a string content, mutate the environment accordingly and return the evaluated value. -/// -/// # Arguments -/// -/// * map: the codemap object used for diagnostics -/// * path: the name of the file being evaluated, for diagnostics -/// * content: the content to evaluate -/// * build: set to true if you want to evaluate a BUILD file or false to evaluate a .bzl file. -/// More information about the difference can be found in [this module's -/// documentation](index.html#build_file). -/// * env: the environment to mutate during the evaluation -/// * file_loader: the [`FileLoader`] to react to `load()` statements. -pub fn eval( - map: &Arc>, - path: &str, - content: &str, - build: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader: &dyn FileLoader, -) -> Result { - match eval_module( - &parse(map, path, content, build)?, - env, - type_values, - map.clone(), - file_loader, - ) { - Ok(v) => Ok(v), - Err(p) => Err(p.into()), - } -} - -/// Evaluate a file, mutate the environment accordingly and return the evaluated value. -/// -/// # Arguments -/// -/// * map: the codemap object used for diagnostics -/// * path: the file to parse and evaluate -/// * build: set to true if you want to evaluate a BUILD file or false to evaluate a .bzl file. -/// More information about the difference can be found in [this module's -/// documentation](index.html#build_file). -/// * env: the environment to mutate during the evaluation -/// * file_loader: the [`FileLoader`] to react to `load()` statements. -pub fn eval_file( - map: &Arc>, - path: &str, - build: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader: &dyn FileLoader, -) -> Result { - match eval_module( - &parse_file(map, path, build)?, - env, - type_values, - map.clone(), - file_loader, - ) { - Ok(v) => Ok(v), - Err(p) => Err(p.into()), - } -} - -pub mod interactive; -pub mod noload; -pub mod simple; - -pub mod call_stack; - -#[cfg(test)] -mod tests; - -pub(crate) mod compiler; -pub(crate) mod compr; -pub(crate) mod def; -pub(crate) mod expr; -pub(crate) mod globals; -pub(crate) mod locals; -pub mod module; -pub mod stmt; diff --git a/starlark/src/eval/module.rs b/starlark/src/eval/module.rs deleted file mode 100644 index 873d1984..00000000 --- a/starlark/src/eval/module.rs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Starlark module (`.bzl` or `BUILD` file parsed and post-processed) - -use crate::eval::globals::Globals; -use crate::eval::stmt::BlockCompiled; -use crate::syntax::ast::AstStatement; -use crate::syntax::ast::Statement; -use crate::syntax::dialect::Dialect; -use codemap_diagnostic::Diagnostic; - -/// Starlark module (`.bzl` or `BUILD` file parsed and post-processed) -#[derive(Debug, Clone)] -pub struct Module { - /// Index of global variables used in this scope - /// (but not in child scopes). - pub(crate) globals: Globals, - /// Code - pub(crate) block: BlockCompiled, -} - -impl Module { - pub(crate) fn compile(stmt: AstStatement, _dialect: Dialect) -> Result { - let mut globals = Globals::default(); - Statement::validate_break_continue(&stmt)?; - Statement::validate_augmented_assignment_in_module(&stmt)?; - let block = BlockCompiled::compile_global(stmt, &mut globals)?; - Ok(Module { globals, block }) - } -} diff --git a/starlark/src/eval/noload.rs b/starlark/src/eval/noload.rs deleted file mode 100644 index dea1e7a9..00000000 --- a/starlark/src/eval/noload.rs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define simpler version of the evaluation function, -//! which does not support `load(...)` statement. - -use crate::environment::{Environment, TypeValues, LOAD_NOT_SUPPORTED_ERROR_CODE}; -use crate::eval::{EvalException, FileLoader}; -use crate::syntax::dialect::Dialect; -use crate::values::Value; -use codemap::CodeMap; -use codemap_diagnostic::{Diagnostic, Level}; -use std::sync::{Arc, Mutex}; - -/// File loader which returns error unconditionally. -pub struct NoLoadFileLoader; - -impl FileLoader for NoLoadFileLoader { - fn load(&self, _path: &str, _: &TypeValues) -> Result { - Err(EvalException::DiagnosedError(Diagnostic { - level: Level::Error, - message: "ErrorFileLoader does not support loading".to_owned(), - code: Some(LOAD_NOT_SUPPORTED_ERROR_CODE.to_owned()), - spans: Vec::new(), - })) - } -} - -/// Evaluate a string content, mutate the environment accordingly and return the evaluated value. -/// -/// # Arguments -/// -/// __This version uses the [`NoLoadFileLoader`] implementation for -/// the file loader__ -/// -/// * map: the codemap object used for diagnostics -/// * path: the name of the file being evaluated, for diagnostics -/// * content: the content to evaluate -/// * dialect: Starlark language dialect -/// * env: the environment to mutate during the evaluation -/// * global: the environment used to resolve type values -pub fn eval( - map: &Arc>, - path: &str, - content: &str, - dialect: Dialect, - env: &mut Environment, - type_values: &TypeValues, -) -> Result { - super::eval( - map, - path, - content, - dialect, - env, - type_values, - &NoLoadFileLoader, - ) -} diff --git a/starlark/src/eval/simple.rs b/starlark/src/eval/simple.rs deleted file mode 100644 index 291724d1..00000000 --- a/starlark/src/eval/simple.rs +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define simpler version of the evaluation function -use super::Dialect; -use super::{EvalException, FileLoader}; -use crate::environment::{Environment, TypeValues}; -use crate::values::*; -use codemap::CodeMap; -use codemap_diagnostic::Diagnostic; -use std::collections::HashMap; -use std::sync::{Arc, Mutex}; - -/// A simple FileLoader that load file from disk and cache the result in a hashmap. -#[derive(Clone)] -pub struct SimpleFileLoader { - map: Arc>>, - parent_env: Environment, - codemap: Arc>, -} - -impl SimpleFileLoader { - pub fn new(map: &Arc>, parent_env: Environment) -> SimpleFileLoader { - SimpleFileLoader { - map: Arc::new(Mutex::new(HashMap::new())), - parent_env, - codemap: map.clone(), - } - } -} - -impl FileLoader for SimpleFileLoader { - fn load(&self, path: &str, type_values: &TypeValues) -> Result { - { - let lock = self.map.lock().unwrap(); - if lock.contains_key(path) { - return Ok(lock.get(path).unwrap().clone()); - } - } // Release the lock - let mut env = self.parent_env.child(path); - if let Err(d) = super::eval_file( - &self.codemap, - path, - Dialect::Bzl, - &mut env, - type_values, - self, - ) { - return Err(EvalException::DiagnosedError(d)); - } - env.freeze(); - self.map - .lock() - .unwrap() - .insert(path.to_owned(), env.clone()); - Ok(env) - } -} - -/// Evaluate a string content, mutate the environment accordingly and return the evaluated value. -/// -/// # Arguments -/// -/// __This version uses the [`SimpleFileLoader`] implementation for -/// the file loader__ -/// -/// * map: the codemap object used for diagnostics -/// * path: the name of the file being evaluated, for diagnostics -/// * content: the content to evaluate -/// * dialect: Starlark language dialect -/// * env: the environment to mutate during the evaluation -pub fn eval( - map: &Arc>, - path: &str, - content: &str, - dialect: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader_env: Environment, -) -> Result { - super::eval( - map, - path, - content, - dialect, - env, - type_values, - &SimpleFileLoader::new(map, file_loader_env), - ) -} - -/// Evaluate a file, mutate the environment accordingly and return the evaluated value. -/// -/// __This version uses the [`SimpleFileLoader`] implementation for -/// the file loader__ -/// -/// # Arguments -/// -/// * map: the codemap object used for diagnostics -/// * path: the file to parse and evaluate -/// * build: set to true if you want to evaluate a BUILD file or false to evaluate a .bzl file -/// * env: the environment to mutate during the evaluation -pub fn eval_file( - map: &Arc>, - path: &str, - build: Dialect, - env: &mut Environment, - type_values: &TypeValues, - file_loader_env: Environment, -) -> Result { - super::eval_file( - map, - path, - build, - env, - type_values, - &SimpleFileLoader::new(map, file_loader_env), - ) -} diff --git a/starlark/src/eval/stmt.rs b/starlark/src/eval/stmt.rs deleted file mode 100644 index 0f1f7016..00000000 --- a/starlark/src/eval/stmt.rs +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Interpreter-ready statement - -use crate::eval::compiler::GlobalCompiler; -use crate::eval::compiler::LocalCompiler; -use crate::eval::def::DefCompiled; -use crate::eval::expr::AssignTargetExprCompiled; -use crate::eval::expr::AstAssignTargetExprCompiled; -use crate::eval::expr::AstAugmentedAssignTargetExprCompiled; -use crate::eval::expr::AstExprCompiled; -use crate::eval::expr::AugmentedAssignTargetExprCompiled; -use crate::eval::expr::ExprCompiled; -use crate::eval::globals::Globals; -use crate::syntax::ast::AstStatement; -use crate::syntax::ast::AstString; -use crate::syntax::ast::AugmentedAssignOp; -use crate::syntax::ast::Statement; -use crate::values::frozen::FrozenValue; -use crate::values::inspect::Inspectable; -use crate::values::none::NoneType; -use crate::values::Value; -use codemap::Spanned; -use codemap_diagnostic::Diagnostic; - -#[doc(hidden)] -pub(crate) type AstStatementCompiled = Spanned; - -/// Interperter-ready version of [`Statement`](crate::syntax::ast::Statement) -#[derive(Debug, Clone)] -pub(crate) enum StatementCompiled { - Break, - Continue, - Return(AstExprCompiled), - Expression(AstExprCompiled), - Assign(AstAssignTargetExprCompiled, AstExprCompiled), - AugmentedAssign( - AstAugmentedAssignTargetExprCompiled, - AugmentedAssignOp, - AstExprCompiled, - ), - IfElse(AstExprCompiled, BlockCompiled, BlockCompiled), - For(AstAssignTargetExprCompiled, AstExprCompiled, BlockCompiled), - Def(DefCompiled), - Load(AstString, Vec<(AstString, AstString)>), -} - -#[derive(Debug, Clone)] -pub(crate) struct BlockCompiled(pub(crate) Vec); - -impl BlockCompiled { - fn compile_local_stmts( - stmts: Vec, - compiler: &mut LocalCompiler, - ) -> Result { - let mut r = Vec::new(); - for stmt in stmts { - r.extend(Self::compile_local(stmt, compiler)?.0); - } - Ok(BlockCompiled(r)) - } - - pub(crate) fn compile_local( - stmt: AstStatement, - compiler: &mut LocalCompiler, - ) -> Result { - Ok(BlockCompiled(vec![Spanned { - span: stmt.span, - node: match stmt.node { - Statement::Def(..) => unreachable!(), - Statement::For(var, over, body) => { - let over = ExprCompiled::compile(over, compiler)?; - StatementCompiled::For( - AssignTargetExprCompiled::compile(var, compiler)?, - over, - BlockCompiled::compile_local(body, compiler)?, - ) - } - Statement::Return(Some(expr)) => { - StatementCompiled::Return(ExprCompiled::compile(expr, compiler)?) - } - Statement::Return(None) => StatementCompiled::Return(Box::new(Spanned { - span: stmt.span, - node: ExprCompiled::Value(FrozenValue::from(NoneType::None)), - })), - Statement::If(cond, then_block) => StatementCompiled::IfElse( - ExprCompiled::compile(cond, compiler)?, - BlockCompiled::compile_local(then_block, compiler)?, - BlockCompiled(Vec::new()), - ), - Statement::IfElse(cond, then_block, else_block) => StatementCompiled::IfElse( - ExprCompiled::compile(cond, compiler)?, - BlockCompiled::compile_local(then_block, compiler)?, - BlockCompiled::compile_local(else_block, compiler)?, - ), - Statement::Statements(stmts) => { - return BlockCompiled::compile_local_stmts(stmts, compiler) - } - Statement::Expression(e) => { - StatementCompiled::Expression(ExprCompiled::compile(e, compiler)?) - } - Statement::Assign(left, right) => StatementCompiled::Assign( - AssignTargetExprCompiled::compile(left, compiler)?, - ExprCompiled::compile(right, compiler)?, - ), - Statement::AugmentedAssign(left, op, right) => StatementCompiled::AugmentedAssign( - AugmentedAssignTargetExprCompiled::compile_impl(left, compiler)?, - op, - ExprCompiled::compile(right, compiler)?, - ), - Statement::Load(module, args) => StatementCompiled::Load(module, args), - Statement::Pass => return Ok(BlockCompiled(Vec::new())), - Statement::Break => StatementCompiled::Break, - Statement::Continue => StatementCompiled::Continue, - }, - }])) - } - - fn compile_global_stmts( - stmts: Vec, - globals: &mut Globals, - ) -> Result { - let mut r = Vec::new(); - for stmt in stmts { - r.extend(Self::compile_global(stmt, globals)?.0); - } - Ok(BlockCompiled(r)) - } - - pub(crate) fn compile_global( - stmt: AstStatement, - globals: &mut Globals, - ) -> Result { - Ok(BlockCompiled(vec![Spanned { - span: stmt.span, - node: match stmt.node { - Statement::Def(name, params, suite) => { - let slot = globals.register_global(&name.node); - StatementCompiled::Def(DefCompiled::new(name, slot, params, suite)?) - } - Statement::For(var, over, body) => StatementCompiled::For( - AssignTargetExprCompiled::compile(var, &mut GlobalCompiler::new(globals))?, - ExprCompiled::compile_global(over, globals)?, - BlockCompiled::compile_global(body, globals)?, - ), - Statement::If(cond, then_block) => StatementCompiled::IfElse( - ExprCompiled::compile_global(cond, globals)?, - BlockCompiled::compile_global(then_block, globals)?, - BlockCompiled(Vec::new()), - ), - Statement::IfElse(cond, then_block, else_block) => StatementCompiled::IfElse( - ExprCompiled::compile_global(cond, globals)?, - BlockCompiled::compile_global(then_block, globals)?, - BlockCompiled::compile_global(else_block, globals)?, - ), - Statement::Statements(stmts) => { - return BlockCompiled::compile_global_stmts(stmts, globals) - } - Statement::Expression(expr) => { - StatementCompiled::Expression(ExprCompiled::compile_global(expr, globals)?) - } - Statement::Return(Some(expr)) => { - StatementCompiled::Return(ExprCompiled::compile_global(expr, globals)?) - } - Statement::Return(None) => StatementCompiled::Return(Box::new(Spanned { - span: stmt.span, - node: ExprCompiled::Value(FrozenValue::from(NoneType::None)), - })), - Statement::Assign(target, source) => StatementCompiled::Assign( - AssignTargetExprCompiled::compile(target, &mut GlobalCompiler::new(globals))?, - ExprCompiled::compile_global(source, globals)?, - ), - Statement::AugmentedAssign(target, op, source) => { - StatementCompiled::AugmentedAssign( - AugmentedAssignTargetExprCompiled::compile_impl( - target, - &mut GlobalCompiler::new(globals), - )?, - op, - ExprCompiled::compile_global(source, globals)?, - ) - } - Statement::Load(path, map) => StatementCompiled::Load(path, map), - Statement::Pass => return Ok(BlockCompiled(Vec::new())), - Statement::Break => StatementCompiled::Break, - Statement::Continue => StatementCompiled::Continue, - }, - }])) - } -} - -impl Inspectable for BlockCompiled { - fn inspect(&self) -> Value { - self.0.inspect() - } -} - -impl Inspectable for StatementCompiled { - fn inspect(&self) -> Value { - let (name, param): (&str, Value) = match self { - StatementCompiled::Break => ("break", Value::from(NoneType::None)), - StatementCompiled::Continue => ("continue", Value::from(NoneType::None)), - StatementCompiled::Return(e) => ("return", e.inspect()), - StatementCompiled::Expression(e) => ("expression", e.inspect()), - StatementCompiled::Assign(t, e) => ("assign", (t, e).inspect()), - StatementCompiled::AugmentedAssign(t, op, e) => { - ("augmented_assign", (t, format!("{:?}", op), e).inspect()) - } - StatementCompiled::IfElse(cond, then_block, else_block) => { - ("if_else", (cond, then_block, else_block).inspect()) - } - StatementCompiled::For(var, over, block) => ("for", (var, over, block).inspect()), - StatementCompiled::Def(def) => ("def", def.name.inspect()), - StatementCompiled::Load(what, bindings) => ("load", (what, bindings).inspect()), - }; - Value::from((Value::from(name), param)) - } -} diff --git a/starlark/src/eval/tests.rs b/starlark/src/eval/tests.rs deleted file mode 100644 index 5c33d392..00000000 --- a/starlark/src/eval/tests.rs +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use crate::environment::Environment; -use crate::environment::TypeValues; -use crate::eval::eval; -use crate::eval::EvalException; -use crate::eval::FileLoader; -use crate::eval::{noload, RECURSION_ERROR_CODE}; -use crate::syntax::dialect::Dialect; -use crate::testutil::starlark_no_diagnostic; -use crate::values::Value; -use codemap::CodeMap; -use std::sync::{Arc, Mutex}; - -#[test] -fn arithmetic_test() { - starlark_ok!("(1 + 2 == 3)"); - starlark_ok!("(1 * 2 == 2)"); - starlark_ok!("(-1 * 2 == -2)"); - starlark_ok!("(5 // 2 == 2)"); - starlark_ok!("(5 % 2 == 1)"); -} - -#[test] -fn alias_test() { - starlark_ok!( - r#" -a = [1, 2, 3] -b = a -a[2] = 0 -a == [1, 2, 0] and b == [1, 2, 0] -"# - ) -} - -#[test] -fn recursive_list() { - starlark_fail!( - r#" -cyclic = [1, 2, 3] -cyclic[1] = cyclic -"# - ) -} - -#[test] -fn funcall_test() { - const F: &str = " -def f1(): - return 1 - -def f2(a): return a - -def f3(a, b, c): - return a + b + c - -def f4(a, *args): - r = a - for i in args: - r += i - return r - -def f5(a, **kwargs): return kwargs - -def rec1(): rec1() - -def rec2(): rec3() -def rec3(): rec4() -def rec4(): rec5() -def rec5(): rec6() -def rec6(): rec2() -"; - starlark_ok!(F, "(f1() == 1)"); - starlark_ok!(F, "(f2(2) == 2)"); - starlark_ok!(F, "(f3(1, 2, 3) == 6)"); - starlark_ok!(F, "(f4(1, 2, 3) == 6)"); - starlark_ok!(F, "(f5(2) == {})"); - starlark_ok!(F, "(f5(a=2) == {})"); - starlark_ok!(F, "(f5(1, b=2) == {'b': 2})"); - starlark_fail!(F, "rec1()", RECURSION_ERROR_CODE); - starlark_fail!(F, "rec2()", RECURSION_ERROR_CODE); - // multiple argument with the same name should not be allowed - starlark_fail!("def f(a, a=2): pass"); - // Invalid order of parameter - starlark_fail!("def f(a, *args, b): pass"); - starlark_fail!("def f(a, *args, b=1): pass"); - starlark_fail!("def f(a, b=1, *args, c=1): pass"); - starlark_fail!("def f(a, **kwargs, b=1): pass"); - starlark_fail!("def f(a, b=1, **kwargs, c=1): pass"); - starlark_fail!("def f(a, **kwargs, *args): pass"); -} - -#[test] -fn sets_disabled() { - let (mut env, type_values) = crate::stdlib::global_environment(); - let err = starlark_no_diagnostic(&mut env, "s = {1, 2, 3}", &type_values).unwrap_err(); - assert_eq!( - err.message, - "Type `set` is not supported. Perhaps you need to enable some crate feature?".to_string() - ); - assert_eq!(err.level, codemap_diagnostic::Level::Error); - assert_eq!( - err.code, - Some(crate::values::error::NOT_SUPPORTED_ERROR_CODE.to_string()) - ); -} - -#[test] -fn sets() { - fn env_with_set() -> (Environment, TypeValues) { - let (mut env, mut type_values) = crate::stdlib::global_environment(); - crate::linked_hash_set::global(&mut env, &mut type_values); - (env, type_values) - } - - fn starlark_ok_with_global_env(snippet: &str) { - let (mut env, type_values) = env_with_set(); - assert!(starlark_no_diagnostic(&mut env, snippet, &type_values,).unwrap()); - } - - starlark_ok_with_global_env( - "s1 = {1, 2, 3, 1} ; s2 = set([1, 2, 3]) ; len(s1) == 3 and s1 == s2", - ); - starlark_ok_with_global_env("list(set([1, 2, 3, 1])) == [1, 2, 3]"); - starlark_ok_with_global_env("list(set()) == []"); - starlark_ok_with_global_env("not set()"); - - let (parent_env, type_values) = env_with_set(); - assert!(starlark_no_diagnostic( - &mut parent_env.child("child"), - "len({1, 2}) == 2", - &type_values, - ) - .unwrap()); -} - -#[test] -fn test_context_captured() { - #[derive(Clone)] - struct TestContextCapturedFileLoader {} - - impl FileLoader for TestContextCapturedFileLoader { - fn load(&self, path: &str, type_values: &TypeValues) -> Result { - assert_eq!("f.bzl", path); - let mut env = Environment::new("new"); - // Check that `x` is captured with the function - let f_bzl = r#" -x = 17 -def f(): return x -"#; - noload::eval( - &Arc::new(Mutex::new(CodeMap::new())), - path, - f_bzl, - Dialect::Bzl, - &mut env, - type_values, - ) - .unwrap(); - env.freeze(); - Ok(env) - } - } - - let mut env = Environment::new("z"); - // Import `f` but do not import `x` - let program = "load('f.bzl', 'f')\nf()"; - assert_eq!( - Value::new(17), - eval( - &Arc::new(Mutex::new(CodeMap::new())), - "outer.build", - program, - Dialect::Build, - &mut env, - &TypeValues::default(), - &TestContextCapturedFileLoader {} - ) - .unwrap() - ); -} - -#[test] -fn test_type_values_are_imported_from_caller() { - use crate::starlark_fun; - use crate::starlark_module; - use crate::starlark_parse_param_type; - use crate::starlark_signature; - use crate::starlark_signature_extraction; - use crate::starlark_signatures; - - starlark_module! { string_truncate => - string.truncate(this: String, len: usize) { - // This works properly only for ASCII, but that enough for a test - this.truncate(len); - Ok(Value::new(this)) - } - } - - struct MyFileLoader {} - - impl FileLoader for MyFileLoader { - fn load(&self, path: &str, type_values: &TypeValues) -> Result { - assert_eq!("utils.bzl", path); - - let mut env = Environment::new("utils.bzl"); - noload::eval( - &Arc::new(Mutex::new(CodeMap::new())), - "utils.bzl", - "def truncate_strings(strings, len): return [s.truncate(len) for s in strings]", - Dialect::Bzl, - &mut env, - type_values, - )?; - Ok(env) - } - } - - let mut env = Environment::new("my.bzl"); - - let mut type_values = TypeValues::default(); - string_truncate(&mut Environment::new("ignore"), &mut type_values); - - // Note `string.truncate` is not available in either `utils.bzl` or `my.bzl`, - // but this code works. - let result = eval( - &Arc::new(Mutex::new(CodeMap::new())), - "my.bzl", - "load('utils.bzl', 'truncate_strings'); truncate_strings(['abc', 'de'], 2)", - Dialect::Bzl, - &mut env, - &type_values, - &MyFileLoader {}, - ) - .unwrap(); - - assert_eq!("[\"ab\", \"de\"]", result.to_str()); -} diff --git a/starlark/src/lib.rs b/starlark/src/lib.rs deleted file mode 100644 index fdbbe929..00000000 --- a/starlark/src/lib.rs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! A Starlark interpreter library in rust. -//! -//! Starlark, formerly codenamed Skylark, is a non-Turing complete language based on Python that -//! was made for the [Bazel build system](https://bazel.build) to define compilation plugin. -//! -//! Starlark has at least 3 implementations: a [Java one for Bazel]( -//! https://github.com/bazelbuild/bazel/tree/master/src/main/java/com/google/devtools/skylark), -//! a [go one](https://github.com/google/skylark) and this one. -//! -//! This interpreter was made using the [specification from the go version]( -//! https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md) -//! and the Python 3 documentation when things were unclear. -//! -//! This interpreter does not support most of the go extensions (e.g. bitwise operator or -//! floating point). It does not include the `set()` type either (the Java implementation use a -//! custom type, `depset`, instead). It uses signed 64-bit integer. -//! -//! # Usage -//! -//! The library can be used to define a dialect of Starlark (e.g. for a build system). -//! -//! The methods in the [eval](eval) modules can be used to evaluate Starlark code: -//! * General purpose [eval](eval::eval) and [eval_file](eval::eval_file) function evaluate -//! Starlark code and return the result of the last statement. Those are generic purpose -//! function to be used when rewiring load statements. -//! * A file loader that simply load relative path to the program is provided by the -//! [eval::simple] module. This module also contains version of [eval](eval::simple::eval) and -//! [eval_file](eval::simple::eval_file) that use this file loader. -//! * Interactive versions of those function are provided in the [eval::interactive] module. -//! Those function are printing the result / diagnostic to the stdout / stderr instead of -//! returning an output. -//! -//! # Defining a Starlark dialect -//! -//! To specify a new Starlark dialect, the global [Environment](environment::Environment) can be -//! edited, adding functions or constants. The [starlark_module!](starlark_module) macro let you -//! define new function with limited boilerplate. -//! -//! Those added function or macros can however return their own type, all of them should implement -//! the [TypedValue](values::TypedValue) trait. See the documentation of the [values](values) -//! module. -//! -//! # Content of the default global environment -//! -//! The default global environment is returned by the -//! [stdlib::global_environment] function and add the `True`, -//! `False` and `None` constants, as well as the functions in the [stdlib] module. -//! -//! # Provided types -//! -//! The [values](values) module provide the following types: -//! -//! * integer (signed 64bit), bool, and NoneType, -//! * [string](values::string), -//! * [dictionary](values::dict), -//! * [list](values::list), -//! * [tuple](values::tuple), and -//! * [function](values::function). - -#![deny(broken_intra_doc_links)] - -#[cfg(test)] -#[macro_use] -pub(crate) mod testutil; - -pub mod environment; -#[doc(hidden)] -pub mod syntax; -#[macro_use] -pub mod values; -#[macro_use] -pub mod eval; -#[macro_use] -pub mod stdlib; -pub mod linked_hash_set; diff --git a/starlark/src/linked_hash_set/mod.rs b/starlark/src/linked_hash_set/mod.rs deleted file mode 100644 index bbc81894..00000000 --- a/starlark/src/linked_hash_set/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -use crate::environment::{Environment, TypeValues}; - -pub(crate) mod set_impl; -mod stdlib; -pub(crate) mod value; - -/// Include `set` constructor and set functions in environment. -pub fn global(env: &mut Environment, type_values: &mut TypeValues) { - self::stdlib::global(env, type_values); - env.enable_set_literals(); -} diff --git a/starlark/src/linked_hash_set/set_impl.rs b/starlark/src/linked_hash_set/set_impl.rs deleted file mode 100644 index 13f91677..00000000 --- a/starlark/src/linked_hash_set/set_impl.rs +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Simple implementation of `LinkedHashSet`. - -use linked_hash_map::{Entry, LinkedHashMap}; -use std::hash::Hash; - -/// `LinkedHashSet` is a tiny wrapper around `LinkedHashMap`. -/// -/// Using `LinkedHashMap` directly to avoid adding extra dependency. -#[derive(PartialEq, Eq, Debug, Clone)] -pub(crate) struct LinkedHashSet { - map: LinkedHashMap, -} - -impl Default for LinkedHashSet { - fn default() -> Self { - LinkedHashSet::new() - } -} - -impl LinkedHashSet { - pub fn new() -> Self { - LinkedHashSet { - map: LinkedHashMap::new(), - } - } - - pub fn _with_capacity(capacity: usize) -> Self { - LinkedHashSet { - map: LinkedHashMap::with_capacity(capacity), - } - } - - pub fn len(&self) -> usize { - self.map.len() - } - - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - pub fn clear(&mut self) { - self.map.clear() - } - - pub fn iter(&self) -> impl Iterator { - self.map.keys() - } - - pub fn contains(&self, value: &K) -> bool { - self.map.get(value).is_some() - } - - pub fn insert(&mut self, value: K) { - self.map.insert(value, ()); - } - - pub fn insert_if_absent(&mut self, value: K) { - if let Entry::Vacant(e) = self.map.entry(value) { - e.insert(()); - } - } - - pub fn remove(&mut self, value: &K) -> bool { - self.map.remove(value).is_some() - } - - /// Items in both sets - pub fn intersection<'a>(&'a self, other: &'a LinkedHashSet) -> impl Iterator { - let (a, b) = if self.len() <= other.len() { - (self, other) - } else { - (other, self) - }; - a.iter().filter(move |k| b.contains(k)) - } - - /// Items which are in `self`, but not in `other`. - pub fn difference<'a>(&'a self, other: &'a LinkedHashSet) -> impl Iterator { - self.iter().filter(move |k| !other.contains(k)) - } - - /// Items which are in `self` or in `other` but not in `both` - pub fn symmetric_difference<'a>( - &'a self, - other: &'a LinkedHashSet, - ) -> impl Iterator { - self.difference(other).chain(other.difference(self)) - } - - pub fn is_subset(&self, other: &LinkedHashSet) -> bool { - self.len() <= other.len() && self.iter().all(|k| other.contains(k)) - } - - pub fn pop_front(&mut self) -> Option { - self.map.pop_front().map(|(k, ())| k) - } - - pub fn pop_back(&mut self) -> Option { - self.map.pop_back().map(|(k, ())| k) - } -} - -impl<'a, K: Hash + Eq> IntoIterator for &'a LinkedHashSet { - type Item = &'a K; - type IntoIter = linked_hash_map::Keys<'a, K, ()>; - - fn into_iter(self) -> Self::IntoIter { - self.map.keys() - } -} diff --git a/starlark/src/linked_hash_set/stdlib.rs b/starlark/src/linked_hash_set/stdlib.rs deleted file mode 100644 index d63e12a3..00000000 --- a/starlark/src/linked_hash_set/stdlib.rs +++ /dev/null @@ -1,607 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Methods for the `set` type. - -use crate::values::error::*; -use crate::values::none::NoneType; -use crate::values::*; - -use crate::linked_hash_set::value::Set; - -// Errors -- UF = User Failure -- Failure that should be expected by the user (e.g. from a fail()). -pub const SET_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE: &str = "UF30"; - -macro_rules! ok { - ($e:expr) => { - return Ok(Value::from($e)); - }; -} - -starlark_module! {global => - /// set: construct a set. - /// - /// `set(x)` returns a new set containing the elements of the - /// iterable sequence x. - /// - /// With no argument, `set()` returns a new empty set. - set(?a, /) { - let mut s = Set::default(); - if let Some(a) = a { - for x in &a.iter()? { - s.insert_if_absent(x)?; - } - } - ok!(s) - } - - /// set.add: append an element to a set. - /// - /// `S.add(x)` adds `x` to the set S, and returns `None`. - /// - /// `add` fails if the set is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set() - /// # ( - /// x.add(1) == None - /// # and - /// x.add(2) == None - /// # and - /// x.add(3) == None - /// # and - /// x.add(1) == None - /// # and - /// x == set([1, 2, 3]) - /// # )"#).unwrap()); - /// ``` - set.add(this, el, /) { - let mut this = this.downcast_mut::()?.unwrap(); - this.insert_if_absent(el)?; - Ok(Value::new(NoneType::None)) - } - - /// set.clear: clear a set - /// - /// `S.clear()` removes all the elements of the set S and returns `None`. - /// - /// It fails if the set is frozen or if there are active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3]) - /// # ( - /// x.clear() == None - /// # and - /// x == set() - /// # )"#).unwrap()); - /// ``` - set.clear(this) { - let mut this = this.downcast_mut::()?.unwrap(); - this.clear(); - Ok(Value::new(NoneType::None)) - } - - /// set.copy: return a set containing all of the elements of this set, in the same order. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3]) - /// y = x.copy() - /// x.add(4) - /// y.add(5) - /// # ( - /// x == set([1, 2, 3, 4]) - /// # and - /// y == set([1, 2, 3, 5]) - /// # )"#).unwrap()); - /// ``` - set.copy(this) { - let this = this.downcast_ref::().unwrap(); - ok!(this.copy()) - } - - /// set.difference: return a set containing all of the elements of this set, without any - /// elements present in any of the passed sets. - /// - /// `S.difference(x, y)` returns `S - x - y`. - /// - /// `difference` fails if its argument(s) are not iterable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// y = [1] - /// z = set([2, 3]) - /// ( - /// x.difference(y, z) == set([4]) - /// # and - /// x.difference() == x - /// # and - /// x == set([1, 2, 3, 4]) - /// # and - /// y == [1] - /// # and - /// z == set([2, 3]) - /// # )"#).unwrap()); - /// ``` - set.difference(this, *others) { - let mut ret = Set::default(); - for el in &this.iter()? { - let mut is_in_any_other = false; - for other in &others { - if other.contains(&el)? { - is_in_any_other = true; - break; - } - } - if !is_in_any_other { - ret.insert_if_absent(el)?; - } - } - ok!(ret) - } - - /// set.difference_update: remove all elements of another iterable from this set. - /// - /// `S.difference_update(x)` removes all values in x from S. - /// - /// `difference_update` fails if its argument is not iterable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// x.difference_update(set([1])) - /// x.difference_update([2, 3]) - /// x == set([4]) - /// # "#).unwrap()); - /// ``` - set.difference_update(this, other, /) { - let mut this = this.downcast_mut::()?.unwrap(); - let previous_length = this.len() as usize; - let mut values = Vec::with_capacity(previous_length); - for el in this.get_content() { - if !other.contains(el.get_value())? { - values.push(el.clone()); - } - } - this.clear(); - for value in values.into_iter() { - this.insert(value.into())?; - } - Ok(Value::new(NoneType::None)) - } - - /// set.discard: remove a value from a set if it is present. - /// - /// `S.discard(x)` removes the the value `x` from the set S if it is present, and returns `None`. - /// - /// `discard` fails if the set is frozen, or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3]) - /// # ( - /// x.discard(2) == None - /// # and - /// x.discard(4) == None - /// # and - /// x == set([1, 3]) - /// # )"#).unwrap()); - /// ``` - set.discard(this, needle, /) { - let mut this = this.downcast_mut::()?.unwrap(); - this.remove(&needle)?; - Ok(Value::new(NoneType::None)) - } - - /// set.intersection: return a set containing all of the elements of this set which are also - /// present in all of the passed iterables. - /// - /// `intersection` fails if its argument(s) are not iterable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// y = [1, 2] - /// z = set([2, 3]) - /// ( - /// x.intersection(y, z) == set([2]) - /// # and - /// x.intersection() == x - /// # and - /// x.intersection().clear() == None - /// # and - /// x == set([1, 2, 3, 4]) - /// # and - /// y == [1, 2] - /// # and - /// z == set([2, 3]) - /// # )"#).unwrap()); - /// ``` - set.intersection(this, *others) { - let mut ret = Set::default(); - for el in &this.iter()? { - let mut is_in_every_other = true; - for other in &others { - if !other.contains(&el)? { - is_in_every_other = false; - break; - } - } - if is_in_every_other { - ret.insert_if_absent(el)?; - } - } - ok!(ret) - } - - /// set.intersection_update: remove all elements from this set which are not in the other - /// iterable. - /// - /// `S.intersection_update(x)` removes all values not in x from S. - /// - /// `intersection_update` fails if its argument is not iterable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// x.intersection_update(set([1, 2])) - /// x.intersection_update([2, 3]) - /// x == set([2]) - /// # "#).unwrap()); - /// ``` - set.intersection_update(this, other, /) { - let mut this = this.downcast_mut::()?.unwrap(); - let previous_length = this.len(); - let mut values = Vec::with_capacity(previous_length); - for el in this.get_content() { - if other.contains(el.get_value())? { - values.push(el.clone()); - } - } - this.clear(); - for value in values.into_iter() { - this.insert(value.into())?; - } - Ok(Value::new(NoneType::None)) - } - - /// set.isdisjoint: return whether a set has no intersection with another set. - /// - /// `isdisjoint` fails if its argument is not a set. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// ( - /// x.isdisjoint(set()) == True - /// # and - /// x.isdisjoint(set([5])) == True - /// # and - /// x.isdisjoint(set([1])) == False - /// # and - /// x.isdisjoint(set([1, 5])) == False - /// # )"#).unwrap()); - /// ``` - set.isdisjoint(this, other, /) { - ok!(Set::compare(&this, &other, &|s1, s2| Ok(s1.intersection(s2).next().is_none()))?) - } - - /// set.issubset: return whether another set contains this set. - /// - /// `issubset` fails if its argument is not a set. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// ( - /// x.issubset(set()) == False - /// # and - /// x.issubset(set([1, 2, 3])) == False - /// # and - /// x.issubset(set([4, 3, 2, 1])) == True - /// # and - /// x.issubset(set([1, 2, 3, 4, 5])) == True - /// # )"#).unwrap()); - /// ``` - set.issubset(this, other, /) { - ok!(Set::compare(&this, &other, &|this, other| Ok(this.is_subset(other)))?) - } - - /// set.issubset: return whether this set contains another set. - /// - /// `issuperset` fails if its argument is not a set. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// ( - /// x.issuperset(set()) == True - /// # and - /// x.issuperset(set([1, 2, 3])) == True - /// # and - /// x.issuperset(set([4, 3, 2, 1])) == True - /// # and - /// x.issuperset(set([1, 2, 3, 4, 5])) == False - /// # )"#).unwrap()); - /// ``` - set.issuperset(this, other, /) { - ok!(Set::compare(&this, &other, &|this, other| Ok(other.is_subset(this)))?) - } - - /// set.pop: removes and returns the last element of a set. - /// - /// `S.pop([index])` removes and returns the last element of the set S, or, - /// if the optional index is provided, at that index. - /// - /// `pop` fails if the index is negative or not less than the length of - /// the set, of if the set is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// # ( - /// x.pop(1) == 2 - /// # and - /// x.pop() == 4 - /// # and - /// x.pop(0) == 1 - /// # and - /// x == set([3]) - /// # )"#).unwrap()); - /// ``` - set.pop(this, index = NoneType::None, /) { - let mut this = this.downcast_mut::()?.unwrap(); - let length = this.len() as i64; - let index = if index.get_type() == "NoneType" { - length - 1 - } else { - index.to_int()? - }; - if index < 0 || index >= length { - return Err(ValueError::IndexOutOfBound(index)); - } - let index = index as usize; - let ret = if index == (length - 1) as usize { - this.pop_back() - } else if index == 0 { - this.pop_front() - } else { - let ret = this.get_content().iter().nth(index).cloned(); - let values: Vec<_> = this.get_content().iter().take(index).chain(this.get_content().iter().skip(index + 1)).cloned().collect(); - this.clear(); - for value in values { - this.insert(value.into())?; - } - ret.map(Into::into) - }; - Ok(ret.unwrap()) - } - - /// set.remove: remove a value from a set - /// - /// `S.remove(x)` removes the the value `x` from the set S, and returns `None`. - /// - /// `remove` fails if the set does not contain `x`, is frozen, or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3]) - /// # ( - /// x.remove(2) == None - /// # and - /// x == set([1, 3]) - /// # )"#).unwrap()); - /// ``` - /// - /// A subsequent call to `x.remove(2)` would yield an error because the element won't be - /// found. - set.remove(this, needle, /) { - let mut this = this.downcast_mut::()?.unwrap(); - let did_remove = this.remove(&needle)?; - if did_remove { - Ok(Value::new(NoneType::None)) - } else { - starlark_err!( - SET_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE, - format!("Element '{}' not found in '{}'", needle, this.to_str()), - "not found".to_owned() - ) - } - } - - /// set.symmetric_difference: return a set containing the elements present in exactly one of - /// this and another set. - /// - /// `symmetric_difference` fails if its argument is not a set. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3, 4]) - /// y = set([0, 1, 2]) - /// z = set([5]) - /// ( - /// x.symmetric_difference(y) == set([3, 4, 0]) - /// # and - /// y.symmetric_difference(x) == set([0, 3, 4]) - /// # and - /// x.symmetric_difference(z) == set([1, 2, 3, 4, 5]) - /// # )"#).unwrap()); - /// ``` - set.symmetric_difference(this, other, /) { - Set::compare(&this, &other, &|s1, s2| { - Ok(Set::from(s1.symmetric_difference(s2).cloned().collect()).unwrap()) - }) - } - - /// set.symmetric_difference_update: update this set to contain the symmetric difference of - /// this and another set. - /// - /// `symmetric_difference_update` fails if its argument is not a set. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x1 = set([1, 2, 3, 4]) - /// x2 = set([1, 2, 3, 4]) - /// y = set([0, 1, 2]) - /// z = set([5]) - /// ( - /// x1.symmetric_difference_update(y) == None - /// # and - /// x1 == set([3, 4, 0]) - /// # and - /// y == set([0, 1, 2]) - /// # and - /// y.symmetric_difference_update(x2) == None - /// # and - /// y == set([0, 3, 4]) - /// # and - /// x2 == set([1, 2, 3, 4]) - /// # and - /// x2.symmetric_difference_update(z) == None - /// # and - /// x2 == set([1, 2, 3, 4, 5]) - /// # and - /// z == set([5]) - /// # )"#).unwrap()); - /// ``` - set.symmetric_difference_update(this, other, /) { - let symmetric_difference = Set::compare(&this, &other, &|s1, s2| { - Ok(Set::from(s1.symmetric_difference(s2).cloned().collect()).unwrap()) - })?; - let mut this = this.downcast_mut::()?.unwrap(); - this.clear(); - for item in &symmetric_difference.iter()? { - this.insert(item)?; - } - Ok(Value::new(NoneType::None)) - } - - /// set.union: return a set containing all of the elements of this set, then all of the extra - /// elements of the other iterables. - /// - /// `S.union(x, y)` returns a set of the union of `S` and `x` and `y` - /// (which must be iterables)'s elements. - /// - /// `union` fails if its arguments are not iterable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set([1, 2, 3]) - /// y = set([2, 4, 5]) - /// z = [5, 6] - /// ( - /// x.union(y, z) == set([1, 2, 3, 4, 5, 6]) - /// # and - /// x == set([1, 2, 3]) - /// # and - /// y == set([2, 4, 5]) - /// # and - /// z == [5, 6] - /// # )"#).unwrap()); - /// ``` - set.union(this, *others) { - let mut ret = Set::default(); - for el in &this.iter()? { - ret.insert_if_absent(el)?; - } - for other in others { - for el in &other.iter()? { - ret.insert_if_absent(el)?; - } - } - ok!(ret) - } - - /// set.update: update a set to also contain another iterable's content. - /// - /// `S.update(x)` adds the elements of `x`, which must be iterable, to - /// the set S, and returns `None`. - /// - /// `update` fails if `x` is not iterable, or if the set S is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = set() - /// # ( - /// x.update([1, 2, 3], set(["foo"])) == None - /// # and - /// x.update(["bar"]) == None - /// # and - /// x == set([1, 2, 3, "foo", "bar"]) - /// # )"#).unwrap()); - /// ``` - set.update(this, *others) { - let mut this = this.downcast_mut::()?.unwrap(); - for other in others { - for el in &other.iter()? { - this.insert_if_absent(el)?; - } - } - Ok(Value::new(NoneType::None)) - } -} diff --git a/starlark/src/linked_hash_set/value.rs b/starlark/src/linked_hash_set/value.rs deleted file mode 100644 index 15c0216c..00000000 --- a/starlark/src/linked_hash_set/value.rs +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the set type of Starlark -use crate::linked_hash_set::set_impl::LinkedHashSet; -use crate::values::error::ValueError; -use crate::values::hashed_value::HashedValue; -use crate::values::iter::TypedIterable; -use crate::values::slice_indices::convert_slice_indices; -use crate::values::*; -use std::fmt; -use std::fmt::Write as _; -use std::num::Wrapping; - -#[derive(Default, Clone)] -pub(crate) struct Set { - content: LinkedHashSet, -} - -impl Set { - pub fn _new() -> ValueOther { - ValueOther::default() - } - - pub fn from>(values: Vec) -> Result { - let mut result = Self::default(); - for v in values.into_iter() { - result.content.insert_if_absent(HashedValue::new(v.into())?); - } - Ok(Value::new(result)) - } - - pub fn insert_if_absent(&mut self, v: Value) -> Result<(), ValueError> { - let v = v.clone_for_container(self)?; - self.content.insert_if_absent(HashedValue::new(v.clone())?); - Ok(()) - } - - pub fn compare( - v1: &Value, - v2: &Value, - f: &dyn Fn( - &LinkedHashSet, - &LinkedHashSet, - ) -> Result, - ) -> Result { - match (v1.downcast_ref::(), v2.downcast_ref::()) { - (Some(v1), Some(v2)) => f(&v1.content, &v2.content), - _ => Err(ValueError::IncorrectParameterType), - } - } - - /// Get a reference to underlying set. - /// - /// Must not expose `content` directly, because `Set` must hold - /// certain invariants like no cyclic references. - pub(crate) fn get_content(&self) -> &LinkedHashSet { - &self.content - } - - pub fn clear(&mut self) { - self.content.clear(); - } - - pub fn copy(&self) -> Set { - Set { - content: self.content.clone(), - } - } - - pub fn remove(&mut self, needle: &Value) -> Result { - let needle = HashedValue::new(needle.clone())?; - Ok(self.content.remove(&needle)) - } - - pub fn insert(&mut self, value: Value) -> Result<(), ValueError> { - let value = value.clone_for_container(self)?; - let value = HashedValue::new(value)?; - self.content.insert(value); - Ok(()) - } - - pub fn _insert_hashed(&mut self, v: HashedValue) { - self.content.insert(v); - } - - pub fn _is_empty(&self) -> bool { - self.content.is_empty() - } - - pub fn pop_front(&mut self) -> Option { - self.content.pop_front().map(HashedValue::into) - } - - pub fn pop_back(&mut self) -> Option { - self.content.pop_back().map(HashedValue::into) - } - - pub fn len(&self) -> usize { - self.content.len() - } -} - -impl From for Value { - fn from(set: Set) -> Self { - Value::new(set) - } -} - -impl TypedValue for Set { - type Holder = Mutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(self.content.iter().map(|v| v.get_value().clone())) - } - - /// Returns a string representation for the set - /// - /// # Examples: - /// ``` - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// assert_eq!("[1, 2, 3]", Value::from(vec![1, 2, 3]).to_str()); - /// assert_eq!("[1, [2, 3]]", - /// Value::from(vec![Value::from(1), Value::from(vec![2, 3])]).to_str()); - /// assert_eq!("[1]", Value::from(vec![1]).to_str()); - /// assert_eq!("[]", Value::from(Vec::::new()).to_str()); - /// ``` - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{{")?; - for (i, v) in self.content.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - v.get_value().to_repr_impl(buf)?; - } - write!(buf, "}}")?; - Ok(()) - } - - const TYPE: &'static str = "set"; - fn to_bool(&self) -> bool { - !self.content.is_empty() - } - - fn equals(&self, other: &Set) -> Result { - if self.content.len() != other.content.len() { - return Ok(false); - } - - for a in &self.content { - if !other.content.contains(a) { - return Ok(false); - } - } - - Ok(true) - } - - fn at(&self, index: Value) -> ValueResult { - let i = index.convert_index(self.length()?)? as usize; - let to_skip = if i == 0 { 0 } else { i - 1 }; - Ok(self - .content - .iter() - .nth(to_skip) - .unwrap() - .get_value() - .clone()) - } - - fn length(&self) -> Result { - Ok(self.content.len() as i64) - } - - fn contains(&self, other: &Value) -> Result { - Ok(self.content.contains(&HashedValue::new(other.clone())?)) - } - - fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult { - let (start, stop, stride) = convert_slice_indices(self.length()?, start, stop, stride)?; - Ok(Value::from(tuple::slice_vector( - start, - stop, - stride, - self.content.iter().map(HashedValue::get_value), - ))) - } - - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Ok(self) - } - - /// Concatenate `other` to the current value. - /// - /// `other` has to be a set. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// # assert!( - /// // {1, 2, 3} + {2, 3, 4} == {1, 2, 3, 4} - /// Value::from(vec![1,2,3]).add(Value::from(vec![2,3])).unwrap() - /// == Value::from(vec![1, 2, 3, 2, 3]) - /// # ); - /// ``` - fn add(&self, other: &Set) -> Result { - let mut result = Set { - content: LinkedHashSet::new(), - }; - for x in &self.content { - result.content.insert(x.clone()); - } - for x in &other.content { - result.content.insert_if_absent(x.clone()); - } - Ok(result) - } - - fn get_hash(&self) -> Result { - Ok(self - .content - .iter() - .map(HashedValue::get_hash) - .map(Wrapping) - .fold(Wrapping(0_u64), |acc, v| acc + v) - .0) - } -} - -impl TypedIterable for Set { - fn to_iter<'a>(&'a self) -> Box + 'a> { - Box::new(self.content.iter().map(|v| v.get_value().clone())) - } - - fn to_vec(&self) -> Vec { - self.content.iter().map(|v| v.get_value().clone()).collect() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_to_str() { - assert_eq!("{1, 2, 3}", Set::from(vec![1, 2, 3]).unwrap().to_str()); - assert_eq!( - "{1, {2, 3}}", - Set::from(vec![Value::from(1), Set::from(vec![2, 3]).unwrap()]) - .unwrap() - .to_str() - ); - assert_eq!("{1}", Set::from(vec![1]).unwrap().to_str()); - assert_eq!("{}", Set::from(Vec::::new()).unwrap().to_str()); - } - - #[test] - fn equality_ignores_order() { - assert_eq!( - Set::from(vec![1, 2, 3]).unwrap(), - Set::from(vec![3, 2, 1]).unwrap(), - ); - } - - #[test] - fn test_value_alias() { - let v1 = Set::from(vec![1, 2]).unwrap(); - let v2 = v1.clone(); - v2.downcast_mut::() - .unwrap() - .unwrap() - .insert_if_absent(Value::from(3)) - .unwrap(); - assert_eq!(v2.to_str(), "{1, 2, 3}"); - assert_eq!(v1.to_str(), "{1, 2, 3}"); - } - - #[test] - fn test_is_descendant() { - let v1 = Set::from(vec![1, 2, 3]).unwrap(); - let v2 = Set::from(vec![Value::new(1), Value::new(2), v1.clone()]).unwrap(); - let v3 = Set::from(vec![Value::new(1), Value::new(2), v2.clone()]).unwrap(); - assert!(v3.is_descendant_value(&v2)); - assert!(v3.is_descendant_value(&v1)); - assert!(v3.is_descendant_value(&v3)); - - assert!(v2.is_descendant_value(&v1)); - assert!(v2.is_descendant_value(&v2)); - assert!(!v2.is_descendant_value(&v3)); - - assert!(v1.is_descendant_value(&v1)); - assert!(!v1.is_descendant_value(&v2)); - assert!(!v1.is_descendant_value(&v3)); - } -} diff --git a/starlark/src/stdlib/dict.rs b/starlark/src/stdlib/dict.rs deleted file mode 100644 index a9ac239d..00000000 --- a/starlark/src/stdlib/dict.rs +++ /dev/null @@ -1,426 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Methods for the `dict` type. - -use crate::values::dict::Dictionary; -use crate::values::error::*; -use crate::values::none::NoneType; -use crate::values::*; - -pub const DICT_KEY_NOT_FOUND_ERROR_CODE: &str = "UF20"; -pub const POP_ON_EMPTY_DICT_ERROR_CODE: &str = "UF21"; - -macro_rules! ok { - ($e:expr) => { - return Ok(Value::from($e)); - }; -} - -starlark_module! {global => - /// [dict.clear]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·clear - /// ): clear a dictionary - /// - /// `D.clear()` removes all the entries of dictionary D and returns `None`. - /// It fails if the dictionary is frozen or if there are active iterators. - /// - /// - /// `dict·clear` is not provided by the Java implementation. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// x.clear() # now x == {} - /// # (x == {})"#).unwrap()); - /// ``` - dict.clear(this) { - let mut this = this.downcast_mut::()?.unwrap(); - this.clear(); - Ok(Value::new(NoneType::None)) - } - - /// [dict.get]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·get - /// ): return an element from the dictionary. - /// - /// `D.get(key[, default])` returns the dictionary value corresponding to the given key. - /// If the dictionary contains no such value, `get` returns `None`, or the - /// value of the optional `default` parameter if present. - /// - /// `get` fails if `key` is unhashable. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.get("one") == 1 - /// # and - /// x.get("three") == None - /// # and - /// x.get("three", 0) == 0 - /// # )"#).unwrap()); - /// ``` - dict.get(this, key, default = NoneType::None, /) { - match this.at(key) { - Err(ValueError::KeyNotFound(..)) => Ok(default), - x => x - } - } - - /// [dict.items]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·items - /// ): get list of (key, value) pairs. - /// - /// `D.items()` returns a new list of key/value pairs, one per element in - /// dictionary D, in the same order as they would be returned by a `for` loop. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.items() == [("one", 1), ("two", 2)] - /// # )"#).unwrap()); - /// ``` - dict.items(this) { - let this = this.downcast_ref::().unwrap(); - ok!(this.items()) - } - - /// [dict.keys]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·keys - /// ): get the list of keys of the dictionary. - /// - /// `D.keys()` returns a new list containing the keys of dictionary D, in the - /// same order as they would be returned by a `for` loop. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.keys() == ["one", "two"] - /// # )"#).unwrap()); - /// ``` - dict.keys(this) { - let this = this.downcast_ref::().unwrap(); - Ok(Value::from(this.keys())) - } - - /// [dict.pop]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·pop - /// ): return an element and remove it from a dictionary. - /// - /// `D.pop(key[, default])` returns the value corresponding to the specified - /// key, and removes it from the dictionary. If the dictionary contains no - /// such value, and the optional `default` parameter is present, `pop` - /// returns that value; otherwise, it fails. - /// - /// `pop` fails if `key` is unhashable, or the dictionary is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.pop("one") == 1 - /// # and - /// x == {"two": 2} - /// # and - /// x.pop("three", 0) == 0 - /// # )"#).unwrap()); - /// ``` - /// - /// Failure: - /// - /// ```python - /// x.pop("four") # error: missing key - /// ``` - dict.pop(this, key, default = NoneType::None, /) { - let mut this = this.downcast_mut::()?.unwrap(); - match this.remove(&key)? { - Some(x) => Ok(x), - None => if default.get_type() == "NoneType" { - let key_error = format!("Key '{}' not found in '{}'", key.to_repr(), this.to_repr()); - starlark_err!( - DICT_KEY_NOT_FOUND_ERROR_CODE, - key_error, - "not found".to_owned() - ); - } else { - Ok(default.clone()) - } - } - } - - /// [dict.popitem]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·popitem - /// ): returns and removes the first key/value pair of a dictionary. - /// - /// `D.popitem()` returns the first key/value pair, removing it from the dictionary. - /// - /// `popitem` fails if the dictionary is empty, frozen, or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.popitem() == ("one", 1) - /// # and - /// x.popitem() == ("two", 2) - /// # )"#).unwrap()); - /// ``` - /// - /// Failure: - /// - /// ```python - /// x.popitem() # error: empty dict - /// ``` - dict.popitem(this) { - let mut this = this.downcast_mut::()?.unwrap(); - match this.pop_front() { - Some(x) => ok!(x), - None => starlark_err!( - POP_ON_EMPTY_DICT_ERROR_CODE, - "Cannot .popitem() on an empty dictionary".to_owned(), - "empty dictionary".to_owned() - ) - } - } - - /// [dict.setdefault]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·setdefault - /// ): get a value from a dictionary, setting it to a new value if not present. - /// - /// `D.setdefault(key[, default])` returns the dictionary value corresponding to the given key. - /// If the dictionary contains no such value, `setdefault`, like `get`, - /// returns `None` or the value of the optional `default` parameter if - /// present; `setdefault` additionally inserts the new key/value entry into the dictionary. - /// - /// `setdefault` fails if the key is unhashable or if the dictionary is frozen. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.setdefault("one") == 1 - /// # and - /// x.setdefault("three", 0) == 0 - /// # and - /// x == {"one": 1, "two": 2, "three": 0} - /// # and - /// x.setdefault("four") == None - /// # and - /// x == {"one": 1, "two": 2, "three": 0, "four": None} - /// # )"#).unwrap()); - /// ``` - dict.setdefault(this, key, default = NoneType::None, /) { - let mut this = this.downcast_mut::()?.unwrap(); - if let Some(r) = this.get(&key)? { - return Ok(r.clone()) - } - this.insert(key, default.clone())?; - Ok(default) - } - - /// [dict.update]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·update - /// ): update values in the dictionary. - /// - /// `D.update([pairs][, name=value[, ...])` makes a sequence of key/value - /// insertions into dictionary D, then returns `None.` - /// - /// If the positional argument `pairs` is present, it must be `None`, - /// another `dict`, or some other iterable. - /// If it is another `dict`, then its key/value pairs are inserted into D. - /// If it is an iterable, it must provide a sequence of pairs (or other iterables of length 2), - /// each of which is treated as a key/value pair to be inserted into D. - /// - /// For each `name=value` argument present, the name is converted to a - /// string and used as the key for an insertion into D, with its corresponding - /// value being `value`. - /// - /// `update` fails if the dictionary is frozen. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {} - /// x.update([("a", 1), ("b", 2)], c=3) - /// x.update({"d": 4}) - /// x.update(e=5) - /// # ( - /// x == {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5} - /// # )"#).unwrap()); - /// ``` - dict.update(this, ?pairs, /, **kwargs) { - if let Some(pairs) = pairs { - match pairs.get_type() { - "list" => for v in &pairs.iter()? { - if v.length()? != 2 { - starlark_err!( - INCORRECT_PARAMETER_TYPE_ERROR_CODE, - concat!( - "dict.update expect a list of pairsor a dictionary as first ", - "argument, got a list of non-pairs." - ).to_owned(), - "list of non-pairs".to_owned() - ) - } - this.set_at(v.at(Value::new(0))?, v.at(Value::new(1))?)?; - }, - "dict" => for k in &pairs.iter()? { - this.set_at(k.clone(), pairs.at(k)?)? - }, - x => starlark_err!( - INCORRECT_PARAMETER_TYPE_ERROR_CODE, - format!( - concat!( - "dict.update expect a list or a dictionary as first argument, ", - "got a value of type {}." - ), - x - ), - format!("type {} while expected list or dict", x) - ) - } - } - - for (k, v) in kwargs { - this.set_at(k.into(), v)?; - } - Ok(Value::new(NoneType::None)) - } - - /// [dict.values]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#dict·values - /// ): get the list of values of the dictionary. - /// - /// `D.values()` returns a new list containing the dictionary's values, in the - /// same order as they would be returned by a `for` loop over the - /// dictionary. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = {"one": 1, "two": 2} - /// # ( - /// x.values() == [1, 2] - /// # )"#).unwrap()); - /// ``` - dict.values(this) { - let this = this.downcast_ref::().unwrap(); - ok!(this.values()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_clear() { - starlark_ok!(r#"x = {"one": 1, "two": 2}; x.clear(); (x == {})"#); - } - - #[test] - fn test_get() { - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.get("one") == 1)"#); - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.get("three") == None)"#); - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.get("three", 0) == 0)"#); - } - - #[test] - fn test_items() { - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.items() == [("one", 1), ("two", 2)])"#); - } - - #[test] - fn test_keys() { - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.keys() == ["one", "two"])"#); - } - - #[test] - fn test_pop() { - starlark_ok!( - r#"x = {"one": 1, "two": 2}; ( - x.pop("one") == 1 and x == {"two": 2} and x.pop("three", 0) == 0)"# - ); - starlark_fail!( - r#"x = {"one": 1}; x.pop("four")"#, - DICT_KEY_NOT_FOUND_ERROR_CODE - ); - } - - #[test] - fn test_popitem() { - starlark_ok!( - r#"x = {"one": 1, "two": 2}; ( - x.popitem() == ("one", 1) and x.popitem() == ("two", 2))"# - ); - starlark_fail!(r#"x = {}; x.popitem()"#, POP_ON_EMPTY_DICT_ERROR_CODE); - } - - #[test] - fn test_setdefault() { - starlark_ok!( - r#"x = {"one": 1, "two": 2}; ( - x.setdefault("one") == 1 and - x.setdefault("three", 0) == 0 and - x == {"one": 1, "two": 2, "three": 0} and - x.setdefault("four") == None and - x == {"one": 1, "two": 2, "three": 0, "four": None })"# - ); - } - - #[test] - fn test_update() { - starlark_ok!( - r#" -x = {} -x.update([("a", 1), ("b", 2)], c=3) -x.update({"d": 4}) -x.update(e=5) -(x == {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})"# - ); - } - - #[test] - fn test_values() { - starlark_ok!(r#"x = {"one": 1, "two": 2}; (x.values() == [1, 2])"#); - } -} diff --git a/starlark/src/stdlib/inspect.rs b/starlark/src/stdlib/inspect.rs deleted file mode 100644 index db6f7d5d..00000000 --- a/starlark/src/stdlib/inspect.rs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Implementation of `inspect` builtin. - -use crate::values::Value; - -starlark_module! { global => - /// Return some internals about the value. - /// - /// This function is to be used for debugging only, it's format is not specified, - /// and may change any time. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(" - /// a = [] - /// 'List' in inspect(a).rust_type_name - /// # ").unwrap()); - /// ``` - inspect(value, /) { - Ok(Value::new(value.inspect())) - } -} - -#[cfg(test)] -mod test { - use crate::eval::noload; - use crate::stdlib::global_environment_for_repl_and_tests; - use crate::syntax::dialect::Dialect; - use crate::values::Immutable; - use crate::values::TypedValue; - use crate::values::Value; - use std::iter; - - #[test] - fn inspect() { - struct TestInspectable {} - - impl TypedValue for TestInspectable { - type Holder = Immutable; - const TYPE: &'static str = "test_inspectable"; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box> { - Box::new(iter::empty()) - } - - fn inspect_custom(&self) -> Value { - Value::from("test test") - } - } - - let (mut env, type_values) = global_environment_for_repl_and_tests(); - env.set("ti", Value::new(TestInspectable {})).unwrap(); - let custom = noload::eval( - &Default::default(), - "test.sky", - "inspect(ti).custom", - Dialect::Bzl, - &mut env, - &type_values, - ) - .unwrap(); - assert_eq!( - "test test", - custom.downcast_ref::().unwrap().as_str() - ); - } -} diff --git a/starlark/src/stdlib/list.rs b/starlark/src/stdlib/list.rs deleted file mode 100644 index c9c6912d..00000000 --- a/starlark/src/stdlib/list.rs +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Methods for the `list` type. - -use crate::values::list::List; -use crate::values::none::NoneType; -use crate::values::*; - -// Errors -- UF = User Failure -- Failure that should be expected by the user (e.g. from a fail()). -pub const LIST_INDEX_FAILED_ERROR_CODE: &str = "UF10"; -pub const LIST_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE: &str = "UF11"; - -starlark_module! {global => - /// [list.append]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·append - /// ): append an element to a list. - /// - /// `L.append(x)` appends `x` to the list L, and returns `None`. - /// - /// `append` fails if the list is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = [] - /// # ( - /// x.append(1) == None - /// # and - /// x.append(2) == None - /// # and - /// x.append(3) == None - /// # and - /// x == [1, 2, 3] - /// # )"#).unwrap()); - /// ``` - list.append(this, el, /) { - let mut this = this.downcast_mut::()?.unwrap(); - this.push(el)?; - Ok(Value::new(NoneType::None)) - } - - /// [list.clear]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·clear - /// ): clear a list - /// - /// `L.clear()` removes all the elements of the list L and returns `None`. - /// It fails if the list is frozen or if there are active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = [1, 2, 3] - /// x.clear() - /// # ( - /// x == [] - /// # )"#).unwrap()); - /// ``` - list.clear(this) { - let mut this = this.downcast_mut::()?.unwrap(); - this.clear(); - Ok(Value::new(NoneType::None)) - } - - /// [list.extend]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·extend - /// ): extend a list with another iterable's content. - /// - /// `L.extend(x)` appends the elements of `x`, which must be iterable, to - /// the list L, and returns `None`. - /// - /// `extend` fails if `x` is not iterable, or if the list L is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = [] - /// # ( - /// x.extend([1, 2, 3]) == None - /// # and - /// x.extend(["foo"]) == None - /// # and - /// x == [1, 2, 3, "foo"] - /// # )"#).unwrap()); - /// ``` - list.extend(this, other, /) { - let mut this = this.downcast_mut::()?.unwrap(); - this.extend(other)?; - Ok(Value::new(NoneType::None)) - } - - /// [list.index]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·index - /// ): get the index of an element in the list. - /// - /// `L.index(x[, start[, end]])` finds `x` within the list L and returns its index. - /// - /// The optional `start` and `end` parameters restrict the portion of - /// list L that is inspected. If provided and not `None`, they must be list - /// indices of type `int`. If an index is negative, `len(L)` is effectively - /// added to it, then if the index is outside the range `[0:len(L)]`, the - /// nearest value within that range is used; see [Indexing](#indexing). - /// - /// `index` fails if `x` is not found in L, or if `start` or `end` - /// is not a valid index (`int` or `None`). - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = ["b", "a", "n", "a", "n", "a"] - /// # ( - /// x.index("a") == 1 # bAnana - /// # and - /// x.index("a", 2) == 3 # banAna - /// # and - /// x.index("a", -2) == 5 # bananA - /// # )"#).unwrap()); - /// ``` - list.index(this, needle, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - let it = this.iter()?; - let mut it = it.iter().skip(start).take(end - start); - if let Some(offset) = it.position(|x| x == needle) { - Ok(Value::new((offset + start) as i64)) - } else { - starlark_err!( - LIST_INDEX_FAILED_ERROR_CODE, - format!("Element '{}' not found in '{}'", needle, this), - "not found".to_owned() - ); - } - } - - /// [list.insert]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·insert - /// ): insert an element in a list. - /// - /// `L.insert(i, x)` inserts the value `x` in the list L at index `i`, moving - /// higher-numbered elements along by one. It returns `None`. - /// - /// As usual, the index `i` must be an `int`. If its value is negative, - /// the length of the list is added, then its value is clamped to the - /// nearest value in the range `[0:len(L)]` to yield the effective index. - /// - /// `insert` fails if the list is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = ["b", "c", "e"] - /// x.insert(0, "a") - /// x.insert(-1, "d") - /// # ( - /// x == ["a", "b", "c", "d", "e"] - /// # )"#).unwrap()); - /// ``` - list.insert(this, index, el, /) { - let mut this = this.downcast_mut::()?.unwrap(); - convert_indices!(this, index); - this.insert(index, el)?; - Ok(Value::new(NoneType::None)) - } - - /// [list.pop]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·pop - /// ): removes and returns the last element of a list. - /// - /// `L.pop([index])` removes and returns the last element of the list L, or, - /// if the optional index is provided, at that index. - /// - /// `pop` fails if the index is negative or not less than the length of - /// the list, of if the list is frozen or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = [1, 2, 3] - /// # ( - /// x.pop() == 3 - /// # and - /// x.pop() == 2 - /// # and - /// x == [1] - /// # )"#).unwrap()); - /// ``` - list.pop(this, ?index, /) { - let mut this = this.downcast_mut::()?.unwrap(); - let index = match index { - Some(index) => index.to_int()?, - None => this.length()? - 1, - }; - Ok(this.pop(index)?) - } - - /// [list.remove]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#list·remove - /// ): remove a value from a list - /// - /// `L.remove(x)` removes the first occurrence of the value `x` from the list L, and returns `None`. - /// - /// `remove` fails if the list does not contain `x`, is frozen, or has active iterators. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// x = [1, 2, 3, 2] - /// x.remove(2) - /// # t = ( - /// x == [1, 3, 2] - /// # ) - /// x.remove(2) - /// # (t and ( - /// x == [1, 3] - /// # ))"#).unwrap()); - /// ``` - /// - /// A subsequence call to `x.remove(2)` would yield an error because the element won't be - /// found. - /// ``` - list.remove(this, needle, /) { - let mut this = this.downcast_mut::()?.unwrap(); - this.remove(needle)?; - Ok(Value::new(NoneType::None)) - } -} - -#[cfg(test)] -mod tests { - use super::LIST_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE; - - #[test] - fn test_append() { - starlark_ok!(r#"x = []; x.append(1); x.append(2); x.append(3); (x == [1, 2, 3])"#); - } - - #[test] - fn test_clear() { - starlark_ok!(r#"x = [1, 2, 3]; x.clear(); (x == [])"#); - } - - #[test] - fn test_extend() { - starlark_ok!(r#"x = []; x.extend([1, 2, 3]); x.extend(["foo"]); (x == [1, 2, 3, "foo"])"#); - } - - #[test] - fn test_index() { - starlark_ok!( - r#"x = ["b", "a", "n", "a", "n", "a"]; ( - x.index("a") == 1 and x.index("a", 2) == 3 and x.index("a", -2) == 5)"# - ); - } - - #[test] - fn test_insert() { - starlark_ok!( - r#"x = ["b", "c", "e"]; x.insert(0, "a"); x.insert(-1, "d"); ( - x == ["a", "b", "c", "d", "e"])"# - ); - } - - #[test] - fn test_pop() { - starlark_ok!(r#"x = [1, 2, 3]; x.pop() == 3"#); - starlark_ok!(r#"x = [1, 2, 3]; (x.pop() == 3 and x.pop() == 2 and x == [1])"#); - } - - #[test] - fn test_remove() { - starlark_ok!( - r#"x = [1, 2, 3, 2] -x.remove(2); t1 = x == [1, 3, 2] -x.remove(2); t2 = x == [1, 3] -(t1 and t2)"# - ); - starlark_fail!( - r#"x = [1, 2, 3, 2]; x.remove(2); x.remove(2); x.remove(2)"#, - LIST_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE - ); - } -} diff --git a/starlark/src/stdlib/macros/mod.rs b/starlark/src/stdlib/macros/mod.rs deleted file mode 100644 index 0a0cdb10..00000000 --- a/starlark/src/stdlib/macros/mod.rs +++ /dev/null @@ -1,482 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the `starlark_module!` macro to reduce written boilerplate when adding -//! native functions to starlark. - -use crate::environment::TypeValues; -use crate::eval::call_stack::CallStack; -use crate::values::function::ParameterParser; - -pub mod param; -pub mod signature; - -#[doc(hidden)] -#[macro_export] -macro_rules! starlark_signature { - ($signature:ident) => {}; - ($signature:ident / $(,$($rest:tt)+)?) => { - $signature.push_slash(); - $( starlark_signature!($signature $($rest)+) )? - }; - ($signature:ident call_stack $e:ident $(,$($rest:tt)+)?) => { - $( starlark_signature!($signature $($rest)+) )?; - }; - ($signature:ident env $e:ident $(,$($rest:tt)+)?) => { - $( starlark_signature!($signature $($rest)+) )?; - }; - ($signature:ident * $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - $signature.push_args(stringify!($t)); - $( starlark_signature!($signature $($rest)+) )? - }; - ($signature:ident ** $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - $signature.push_kwargs(stringify!($t)); - $( starlark_signature!($signature $($rest)+) )? - }; - - // handle params without default value (both named and unnamed) - ($signature:ident $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - $signature.push_normal(stringify!($t)); - $( starlark_signature!($signature $($rest)+) )? - }; - ($signature:ident ? $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - $signature.push_optional(stringify!($t)); - $( starlark_signature!($signature $($rest)+) )? - }; - - // handle params with default value (both named and unnamed) - ($signature:ident $t:ident : $pt:ty = $e:expr $(,$($rest:tt)+)?) => { - // explicitly specify parameter type to: - // * verify that default value is convertible to required type - // * help type inference find type parameters - $signature.push_with_default_value::( - stringify!($t), - $e, - ); - $( starlark_signature!($signature $($rest)+) )? - }; - ($signature:ident $t:ident = $e:expr $(,$($rest:tt)+)?) => { - $signature.push_with_default_value( - stringify!($t), - $e, - ); - $( starlark_signature!($signature $($rest)+) )? - }; -} - -#[doc(hidden)] -#[macro_export] -macro_rules! starlark_parse_param_type { - (1 : $pt:ty) => { $pt }; - (? : $pt:ty) => { $pt }; - (* : $pt:ty) => { $pt }; - (** : $pt:ty) => { $pt }; - (1) => { - $crate::values::Value - }; - (?) => { - ::std::option::Option<$crate::values::Value> - }; - (*) => { - ::std::vec::Vec<$crate::values::Value> - }; - (**) => { - ::linked_hash_map::LinkedHashMap<$crate::values::string::rc::RcString, $crate::values::Value> - }; -} - -/// Structure used to simplify passing several arguments through -/// `starlark_signature_extraction` macro. -#[doc(hidden)] -pub struct SignatureExtractionContext<'a> { - pub call_stack: &'a mut CallStack, - pub env: &'a TypeValues, - pub args: ParameterParser<'a>, -} - -#[doc(hidden)] -#[macro_export] -macro_rules! starlark_signature_extraction { - ($ctx:ident) => {}; - ($ctx:ident / $(,$($rest:tt)+)?) => { - $( starlark_signature_extraction!($ctx $($rest)+) )? - }; - ($ctx:ident call_stack $e:ident $(,$($rest:tt)+)?) => { - let $e = $ctx.call_stack; - $( starlark_signature_extraction!($ctx $($rest)+) )?; - }; - ($ctx:ident env $e:ident $(,$($rest:tt)+)?) => { - let $e = $ctx.env; - $( starlark_signature_extraction!($ctx $($rest)+) )?; - }; - ($ctx:ident * $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - #[allow(unused_mut)] - let mut $t: starlark_parse_param_type!(* $(: $pt)?) = - $ctx.args.next_arg()?.into_args_array(stringify!($t))?; - $( starlark_signature_extraction!($ctx $($rest)+) )? - }; - ($ctx:ident ** $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - #[allow(unused_mut)] - let mut $t: starlark_parse_param_type!(** $(: $pt)?) = - $ctx.args.next_arg()?.into_kw_args_dict(stringify!($t))?; - $( starlark_signature_extraction!($ctx $($rest)+) )? - }; - - ($ctx:ident ? $t:ident $(: $pt:ty)? $(,$($rest:tt)+)?) => { - #[allow(unused_mut)] - let mut $t: starlark_parse_param_type!(? $(: $pt)?) = - $ctx.args.next_arg()?.into_optional(stringify!($t))?; - $( starlark_signature_extraction!($ctx $($rest)+) )? - }; - ($ctx:ident $t:ident $(: $pt:ty)? $(= $e:expr)? $(,$($rest:tt)+)?) => { - #[allow(unused_mut)] - let mut $t: starlark_parse_param_type!(1 $(: $pt)?) = - $ctx.args.next_arg()?.into_normal(stringify!($t))?; - $( starlark_signature_extraction!($ctx $($rest)+) )? - }; -} - -#[doc(hidden)] -#[macro_export] -macro_rules! starlark_fun { - ($(#[$attr:meta])* $fn:ident ( $($signature:tt)* ) { $($content:tt)* } $($($rest:tt)+)?) => { - $(#[$attr])* - fn $fn( - call_stack: &mut $crate::eval::call_stack::CallStack, - env: &$crate::environment::TypeValues, - args: $crate::values::function::ParameterParser, - ) -> $crate::values::ValueResult { - let mut ctx = $crate::stdlib::macros::SignatureExtractionContext { - call_stack, - env, - args, - }; - starlark_signature_extraction!(ctx $($signature)*); - ctx.args.check_no_more_args()?; - $($content)* - } - $(starlark_fun! { - $($rest)+ - })? - }; - ($(#[$attr:meta])* $ty:ident . $fn:ident ( $($signature:tt)* ) { $($content:tt)* } - $($($rest:tt)+)?) => { - $(#[$attr])* - fn $fn( - call_stack: &mut $crate::eval::call_stack::CallStack, - env: &$crate::environment::TypeValues, - args: $crate::values::function::ParameterParser, - ) -> $crate::values::ValueResult { - let mut ctx = $crate::stdlib::macros::SignatureExtractionContext { - call_stack, - env, - args, - }; - starlark_signature_extraction!(ctx $($signature)*); - ctx.args.check_no_more_args()?; - $($content)* - } - $(starlark_fun! { - $($rest)+ - })? - }; -} - -#[doc(hidden)] -#[macro_export] -macro_rules! starlark_signatures { - ($env:expr, $type_values:expr, $(#[$attr:meta])* $name:ident ( $($signature:tt)* ) { $($content:tt)* } - $($($rest:tt)+)?) => { - { - let name = stringify!($name).trim_matches('_'); - #[allow(unused_mut)] - let mut signature = $crate::stdlib::macros::signature::SignatureBuilder::default(); - starlark_signature!(signature $($signature)*); - $env.set(name, $crate::values::function::NativeFunction::new(name.into(), $name, signature.build())).unwrap(); - } - $(starlark_signatures!{ $env, $type_values, - $($rest)+ - })? - }; - ($env:expr, $type_values:expr, $(#[$attr:meta])* $ty:ident . $name:ident ( $($signature:tt)* ) { $($content:tt)* } - $($($rest:tt)+)?) => { - { - let name = stringify!($name).trim_matches('_'); - let mut signature = $crate::stdlib::macros::signature::SignatureBuilder::default(); - starlark_signature!(signature $($signature)*); - $type_values.add_type_value(stringify!($ty), name, - $crate::values::function::NativeFunction::new(name.into(), $name, signature.build())); - } - $(starlark_signatures!{ $env, $type_values, - $($rest)+ - })? - } -} - -/// Declare a starlark module that store one or several function -/// -/// To declare a module with name `name`, the macro would be called: -/// -/// ```rust,ignore -/// starlark_module!{ name => -/// // Starlark function definition goes there -/// } -/// ``` -/// -/// For instance, the following example would declare two functions `str`, `my_fun` and `dbg` in a -/// module named `my_starlark_module`: -/// -/// ```rust -/// # #[macro_use] extern crate starlark; -/// # use starlark::values::*; -/// # use starlark::values::none::NoneType; -/// # use starlark::environment::Environment; -/// # use starlark::environment::TypeValues; -/// starlark_module!{ my_starlark_module => -/// // Declare a 'str' function (_ are trimmed away and just here to avoid collision with -/// // reserved keyword) -/// // `a` argument will be binded to a `a` Rust value, the `/` marks all preceding arguments -/// // as positional only. -/// __str__(a, /) { -/// Ok(Value::new(a.to_str().to_owned())) -/// } -/// -/// // Declare a function my_fun that takes one positional parameter 'a', a named and -/// // positional parameter 'b', a args array 'args' and a keyword dictionary `kwargs` -/// my_fun(a, /, b, c = 1, *args, **kwargs) { -/// // ... -/// # Ok(Value::new(true)) -/// } -/// -/// // Functions can optionally specify parameter types after colon. -/// // Parameter can be any type which implements `TryParamConvertFromValue`. -/// // When parameter type is not specified, it is defaulted to `Value` -/// // for regular parameters, `Vec` for `*args` -/// // and `LinkedHashMap` for `**kwargs`. -/// sqr(x: i64) { -/// Ok(Value::new(x * x)) -/// } -/// -/// // It is also possible to capture the call stack with `call_stack name`. -/// // It is an opaque object which can only be used to call other functions: -/// call(call_stack cs, env e, value) { -/// value.call(cs, e, Default::default(), Default::default(), None, None) -/// } -/// } -/// # -/// # fn main() { -/// # let mut env = Environment::new("test"); -/// # let mut type_values = TypeValues::default(); -/// # my_starlark_module(&mut env, &mut type_values); -/// # assert_eq!(env.get("str").unwrap().get_type(), "function"); -/// # assert_eq!(env.get("my_fun").unwrap().get_type(), "function"); -/// # assert_eq!(env.get("sqr").unwrap().get_type(), "function"); -/// # } -/// ``` -/// -/// The module would declare a function `my_starlark_module` that can be called to add the -/// corresponding functions to an environment. -/// -/// ``` -/// # #[macro_use] extern crate starlark; -/// # use starlark::values::*; -/// # use starlark::environment::Environment; -/// # use starlark::environment::TypeValues; -/// # starlark_module!{ my_starlark_module => -/// # __str__(a, /) { Ok(Value::new(a.to_str().to_owned())) } -/// # my_fun(a, /, b, c = 1, *args, **kwargs) { Ok(Value::new(true)) } -/// # } -/// # fn main() { -/// # let mut env = Environment::new("test"); -/// # let mut type_values = TypeValues::default(); -/// # my_starlark_module(&mut env, &mut type_values); -/// # assert_eq!(env.get("str").unwrap().get_type(), "function"); -/// # assert_eq!(env.get("my_fun").unwrap().get_type(), "function"); -/// # } -/// ``` -/// -/// Additionally function might be declared for a type by prefixing them by `type.`, e.g the -/// definition of a `hello` function for the `string` type would look like: -/// -/// ```rust -/// # #[macro_use] extern crate starlark; -/// # use starlark::values::*; -/// # use starlark::environment::Environment; -/// # use starlark::environment::TypeValues; -/// starlark_module!{ my_starlark_module => -/// // The first argument is always self in that module but we use "this" because "self" is a -/// // a rust keyword. -/// string.hello(this) { -/// Ok(Value::new( -/// format!("Hello, {}", this.to_str()) -/// )) -/// } -/// } -/// # -/// # fn main() { -/// # let mut env = Environment::new("test"); -/// # let mut type_values = TypeValues::default(); -/// # my_starlark_module(&mut env, &mut type_values); -/// # assert_eq!(type_values.get_type_value(&Value::from(""), "hello").unwrap().get_type(), "function"); -/// # } -/// ``` -#[macro_export] -macro_rules! starlark_module { - ($name:ident => $($t:tt)*) => ( - starlark_fun!{ - $($t)* - } - - #[doc(hidden)] - pub fn $name(env: &mut $crate::environment::Environment, type_values: &mut $crate::environment::TypeValues) { - starlark_signatures!{ env, type_values, - $($t)* - } - let _ = (env, type_values); - } - ) -} - -/// Shortcut for returning an error from the code, message and label. -/// -/// # Parameters: -/// -/// * $code is a short code to uniquely identify the error. -/// * $message is the long explanation for the user of the error. -/// * $label is a a short description of the error to be put next to the code. -#[macro_export] -macro_rules! starlark_err { - ($code:expr, $message:expr, $label:expr) => { - return Err($crate::values::error::RuntimeError { - code: $code, - message: $message, - label: $label, - } - .into()); - }; -} - -/// A shortcut to assert the type of a value -/// -/// # Parameters: -/// -/// * $e the value to check type for. -/// * $fn the function name (&'static str). -/// * $ty the expected type (ident) -#[macro_export] -macro_rules! check_type { - ($e:ident, $fn:expr, $ty:ident) => { - if $e.get_type() != stringify!($ty) { - starlark_err!( - INCORRECT_PARAMETER_TYPE_ERROR_CODE, - format!( - concat!( - $fn, - "() expect a ", - stringify!($ty), - " as first parameter while got a value of type {}." - ), - $e.get_type() - ), - format!( - concat!("type {} while expected ", stringify!($ty)), - $e.get_type() - ) - ) - } - }; -} - -/// Convert 2 indices according to Starlark indices convertion for function like .index. -/// -/// # Parameters: -/// -/// * $this: the identifier of self object -/// * $start: the variable denoting the start index -/// * $end: the variable denoting the end index (optional) -#[macro_export] -macro_rules! convert_indices { - ($this:ident, $start:ident, $end:ident) => { - let len = $this.length()?; - let $end = if $end.get_type() == "NoneType" { - len - } else { - $end.to_int()? - }; - let $start = if $start.get_type() == "NoneType" { - 0 - } else { - $start.to_int()? - }; - let $end = if $end < 0 { $end + len } else { $end }; - let $start = if $start < 0 { $start + len } else { $start }; - let $end = if $end < 0 { - 0 - } else { - if $end > len { - len as usize - } else { - $end as usize - } - }; - let $start = if $start < 0 { - 0 - } else { - if $start > len { - len as usize - } else { - $start as usize - } - }; - }; - ($this:ident, $start:ident) => { - let len = $this.length()?; - let $start = if $start.get_type() == "NoneType" { - 0 - } else { - $start.to_int()? - }; - let $start = if $start < 0 { $start + len } else { $start }; - let $start = if $start < 0 { - 0 - } else { - if $start > len { - len as usize - } else { - $start as usize - } - }; - }; -} - -#[cfg(test)] -mod tests { - use crate::environment::Environment; - use crate::environment::TypeValues; - use crate::values::none::NoneType; - use crate::values::Value; - - #[test] - fn no_arg() { - starlark_module! { global => - nop() { - Ok(Value::new(NoneType::None)) - } - } - - let mut env = Environment::new("root"); - global(&mut env, &mut TypeValues::default()); - env.get("nop").unwrap(); - } -} diff --git a/starlark/src/stdlib/macros/param.rs b/starlark/src/stdlib/macros/param.rs deleted file mode 100644 index 32b0cfeb..00000000 --- a/starlark/src/stdlib/macros/param.rs +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Parameter conversion utilities for `starlark_module!` macros. - -use crate::values::dict::Dictionary; -use crate::values::error::ValueError; -use crate::values::{TypedValue, Value}; -use linked_hash_map::LinkedHashMap; -use std::convert::TryInto; -use std::hash::Hash; - -/// Types implementing this type may appear in function parameter types -/// in `starlark_module!` macro function signatures. -pub trait TryParamConvertFromValue: Sized { - fn try_from(source: Value) -> Result; -} - -impl TryParamConvertFromValue for Vec { - fn try_from(source: Value) -> Result { - let mut r = Vec::new(); - for item in &source.iter()? { - r.push(T::try_from(item)?); - } - Ok(r) - } -} - -impl TryParamConvertFromValue - for LinkedHashMap -{ - fn try_from(source: Value) -> Result { - match source.downcast_ref::() { - Some(dict) => { - let mut r = LinkedHashMap::new(); - for (k, v) in dict.get_content() { - r.insert(K::try_from(k.get_value().clone())?, V::try_from(v.clone())?); - } - Ok(r) - } - None => Err(ValueError::IncorrectParameterType), - } - } -} - -impl TryParamConvertFromValue for Value { - fn try_from(source: Value) -> Result { - Ok(source) - } -} - -impl TryParamConvertFromValue for T { - fn try_from(source: Value) -> Result { - match source.downcast_ref::() { - Some(t) => Ok(t.clone()), - None => Err(ValueError::IncorrectParameterType), - } - } -} - -impl TryParamConvertFromValue for i32 { - fn try_from(source: Value) -> Result { - let source = i64::try_from(source)?; - source - .try_into() - .map_err(|_| ValueError::IncorrectParameterType) - } -} - -impl TryParamConvertFromValue for u32 { - fn try_from(source: Value) -> Result { - let source = i64::try_from(source)?; - source - .try_into() - .map_err(|_| ValueError::IncorrectParameterType) - } -} - -impl TryParamConvertFromValue for u64 { - fn try_from(source: Value) -> Result { - let source = i64::try_from(source)?; - source - .try_into() - .map_err(|_| ValueError::IncorrectParameterType) - } -} - -impl TryParamConvertFromValue for usize { - fn try_from(source: Value) -> Result { - let source = i64::try_from(source)?; - source - .try_into() - .map_err(|_| ValueError::IncorrectParameterType) - } -} - -/// Starlark `None` or another value. -pub enum EitherValueOrNone { - None, - NotNone(T), -} - -impl TryParamConvertFromValue for EitherValueOrNone { - fn try_from(source: Value) -> Result { - if source.get_type() == "NoneType" { - Ok(EitherValueOrNone::None) - } else { - Ok(EitherValueOrNone::NotNone(T::try_from(source)?)) - } - } -} - -#[cfg(test)] -mod test { - use crate::starlark_fun; - use crate::starlark_module; - use crate::starlark_parse_param_type; - use crate::starlark_signature; - use crate::starlark_signature_extraction; - use crate::starlark_signatures; - - use crate::eval::noload::eval; - use crate::stdlib::global_environment; - use crate::syntax::dialect::Dialect; - use crate::values::Value; - use codemap::CodeMap; - use std::sync::{Arc, Mutex}; - - starlark_module! { global => - cc_binary(name: String, srcs: Vec = Vec::new()) { - // real implementation may write it to a global variable - Ok(Value::new(format!("{:?} {:?}", name, srcs))) - } - } - - #[test] - fn test_simple() { - let (mut env, mut type_values) = global_environment(); - global(&mut env, &mut type_values); - env.freeze(); - - let mut child = env.child("my"); - - let r = eval( - &Arc::new(Mutex::new(CodeMap::new())), - "test_simple.star", - "cc_binary(name='star', srcs=['a.cc', 'b.cc'])", - Dialect::Build, - &mut child, - &type_values, - ) - .unwrap(); - - assert_eq!(r#""star" ["a.cc", "b.cc"]"#, r.to_str()); - } -} diff --git a/starlark/src/stdlib/macros/signature.rs b/starlark/src/stdlib/macros/signature.rs deleted file mode 100644 index 7105d288..00000000 --- a/starlark/src/stdlib/macros/signature.rs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#![doc(hidden)] - -//! Signature construction utilities used in macros. - -use crate::values::function::FunctionParameter; -use crate::values::function::FunctionSignature; -use crate::values::Value; - -/// Signature builder utility used in macros. Do not use directly. -#[derive(Default)] -pub struct SignatureBuilder { - params: Vec, - positional_count: Option, -} - -impl SignatureBuilder { - pub fn push_normal(&mut self, name: &str) { - self.params.push(FunctionParameter::Normal(name.into())); - } - - pub fn push_optional(&mut self, name: &str) { - self.params.push(FunctionParameter::Optional(name.into())); - } - - pub fn push_with_default_value>(&mut self, name: &str, default_value: V) { - self.params.push(FunctionParameter::WithDefaultValue( - name.into(), - default_value.into(), - )); - } - - pub fn push_kwargs(&mut self, name: &str) { - self.params.push(FunctionParameter::KWArgsDict(name.into())); - } - - pub fn push_args(&mut self, name: &str) { - self.params.push(FunctionParameter::ArgsArray(name.into())); - } - pub fn push_slash(&mut self) { - assert!(self.positional_count.is_none()); - self.positional_count = Some(self.params.len()); - } - - pub fn build(self) -> FunctionSignature { - FunctionSignature::new(self.params, self.positional_count.unwrap_or(0)) - } -} diff --git a/starlark/src/stdlib/mod.rs b/starlark/src/stdlib/mod.rs deleted file mode 100644 index 20011830..00000000 --- a/starlark/src/stdlib/mod.rs +++ /dev/null @@ -1,1217 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! A module with the standard function and constants that are by default in all dialect of Starlark -use codemap::CodeMap; -use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter}; -use linked_hash_map::LinkedHashMap; -use std; -use std::cmp::Ordering; -use std::num::NonZeroI64; -use std::sync; - -use crate::environment::{Environment, TypeValues}; -use crate::eval::noload::eval; -use crate::linked_hash_set; -use crate::syntax::dialect::Dialect; -use crate::values::dict::Dictionary; -use crate::values::function::WrappedMethod; -use crate::values::none::NoneType; -use crate::values::range::Range; -use crate::values::*; - -// Errors -- CR = Critical Runtime -const CHR_NOT_UTF8_CODEPOINT_ERROR_CODE: &str = "CR00"; -const DICT_ITERABLE_NOT_PAIRS_ERROR_CODE: &str = "CR01"; -const INT_CONVERSION_FAILED_ERROR_CODE: &str = "CR03"; -const ORD_EXPECT_ONE_CHAR_ERROR_CODE: &str = "CR04"; -const EMPTY_ITERABLE_ERROR_CODE: &str = "CR05"; -const NUL_RANGE_STEP_ERROR_CODE: &str = "CR06"; -const USER_FAILURE_ERROR_CODE: &str = "CR99"; - -#[macro_use] -pub mod macros; -pub mod dict; -mod inspect; -pub mod list; -pub mod string; -pub mod structs; - -starlark_module! {global_functions => - /// fail: fail the execution - /// - /// Examples: - /// ```python - /// fail("this is an error") # Will fail with "this is an error" - /// ``` - fail(call_stack st, msg) { - starlark_err!( - USER_FAILURE_ERROR_CODE, - format!( - "fail(): {}{}", - msg.to_str(), - st.print_with_newline_before(), - ), - msg.to_str() - ) - } - - /// [any]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#any - /// ): returns true if any value in the iterable object have a truth value of true. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// any([0, True]) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// any([0, 1]) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// any([0, 1, True]) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// any([0, 0]) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// any([0, False]) == False - /// # )").unwrap()); - /// ``` - any(x, /) { - for i in &x.iter()? { - if i.to_bool() { - return Ok(Value::new(true)); - } - } - Ok(Value::new(false)) - } - - /// [all]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#all - /// ): returns true if all values in the iterable object have a truth value of true. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// all([1, True]) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// all([1, 1]) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// all([0, 1, True]) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// all([0, 0]) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// all([0, False]) == False - /// # )").unwrap()); - /// ``` - all(x, /) { - for i in &x.iter()? { - if !i.to_bool() { - return Ok(Value::new(false)); - } - } - Ok(Value::new(true)) - } - - /// [bool]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#bool - /// ): returns the truth value of any starlark value. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// bool([]) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(True) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(False) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(None) == False - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(bool) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(1) == True - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// bool(0) == False - /// # )").unwrap()); - /// ``` - bool(x = false, /) { - Ok(Value::new(x.to_bool())) - } - - /// [chr]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#bool - /// ): returns a string encoding a codepoint. - /// - /// `chr(i)` returns a returns a string that encodes the single Unicode code point whose value is - /// specified by the integer `i`. `chr` fails unless `0 ≤ i ≤ 0x10FFFF`. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// chr(65) == 'A' - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// chr(1049) == 'Й' - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// chr(0x1F63F) == '😿' - /// # )").unwrap()); - /// ``` - chr(i, /) { - let cp = i.to_int()? as u32; - match std::char::from_u32(cp) { - Some(x) => Ok(Value::new(x.to_string())), - None => starlark_err!(CHR_NOT_UTF8_CODEPOINT_ERROR_CODE, - format!( - "chr() parameter value is 0x{:x} which is not a valid UTF-8 codepoint", - cp - ), - "Parameter to chr() is not a valid UTF-8 codepoint".to_owned() - ), - } - } - - - /// [dict]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#dict - /// ): creates a dictionary. - /// - /// `dict` creates a dictionary. It accepts up to one positional argument, which is interpreted - /// as an iterable of two-element sequences (pairs), each specifying a key/value pair in the - /// resulting dictionary. - /// - /// `dict` also accepts any number of keyword arguments, each of which specifies a key/value - /// pair in the resulting dictionary; each keyword is treated as a string. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// dict() == {} - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// dict([(1, 2), (3, 4)]) == {1: 2, 3: 4} - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// dict([(1, 2), ['a', 'b']]) == {1: 2, 'a': 'b'} - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// # dict(one=1, two=2) == {'one': 1, 'two': 2} - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// dict([(1, 2)], x=3) == {1: 2, 'x': 3} - /// # )").unwrap()); - /// ``` - dict(?a, /, **kwargs) { - let map = Dictionary::new(); - if let Some(a) = a { - match a.get_type() { - "dict" => { - for k in &a.iter()? { - let v = a.at(k.clone())?; - map.borrow_mut().set_at(k, v)?; - } - }, - _ => { - for el in &a.iter()? { - match el.iter() { - Ok(it) => { - let mut it = it.iter(); - let first = it.next(); - let second = it.next(); - if first.is_none() || second.is_none() || it.next().is_some() { - starlark_err!( - DICT_ITERABLE_NOT_PAIRS_ERROR_CODE, - format!( - "Found a non-pair element in the positional argument of dict(): {}", - el.to_repr(), - ), - "Non-pair element in first argument".to_owned() - ); - } - map.borrow_mut().set_at(first.unwrap(), second.unwrap())?; - } - Err(..) => - starlark_err!( - DICT_ITERABLE_NOT_PAIRS_ERROR_CODE, - format!( - "Found a non-pair element in the positional argument of dict(): {}", - el.to_repr(), - ), - "Non-pair element in first argument".to_owned() - ), - } - } - } - } - } - for (k, v) in kwargs { - map.borrow_mut().set_at(k.into(), v)?; - } - Ok(map.into()) - } - - /// [dir]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#dir - /// ): list attributes of a value. - /// - /// `dir(x)` returns a list of the names of the attributes (fields and methods) of its operand. - /// The attributes of a value `x` are the names `f` such that `x.f` is a valid expression. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(stringify!(( - /// "capitalize" in dir("abc") - /// # ))).unwrap()); - /// ``` - dir(env env, x, /) { - let mut result = env.list_type_value(&x); - if let Ok(v) = x.dir_attr() { - result.extend(v); - } - result.sort(); - Ok(Value::from(result)) - } - - - /// [enumerate]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#enumerate - /// ): return a list of (index, element) from an iterable. - /// - /// `enumerate(x)` returns a list of `(index, value)` pairs, each containing successive values of - /// the iterable sequence and the index of the value within the sequence. - /// - /// The optional second parameter, `start`, specifies an integer value to add to each index. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// enumerate(["zero", "one", "two"]) == [(0, "zero"), (1, "one"), (2, "two")] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// enumerate(["one", "two"], 1) == [(1, "one"), (2, "two")] - /// # )"#).unwrap()); - /// ``` - enumerate(it, offset: i64 = 0, /) { - let v : Vec = - it - .iter()? - .iter() - .enumerate() - .map(|(k, v)| Value::from((Value::new(k as i64 + offset), v))) - .collect(); - Ok(Value::from(v)) - } - - /// [getattr]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#getattr - /// ): returns the value of an attribute - /// - /// `getattr(x, name)` returns the value of the attribute (field or method) of x named `name`. - /// It is a dynamic error if x has no such attribute. - /// - /// `getattr(x, "f")` is equivalent to `x.f`. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#" - /// getattr("banana", "split")("a") == ["b", "n", "n", ""] # equivalent to "banana".split("a") - /// # "#).unwrap()); - /// ``` - getattr(env env, a, attr: String, default = NoneType::None, /) { - match a.get_attr(&attr) { - Ok(v) => Ok(v), - x => match env.get_type_value(&a, &attr) { - Some(v) => if v.get_type() == "function" { - // Insert self so the method see the object it is acting on - Ok(WrappedMethod::new(a.clone(), v)) - } else { - Ok(v) - } - None => if default.get_type() == "NoneType" { x } else { Ok(default) } - } - } - } - - /// [hasattr]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#hasattr - /// ): test if an object has an attribute - /// - /// `hasattr(x, name)` reports whether x has an attribute (field or method) named `name`. - hasattr(env env, a, attr: String, /) { - Ok(Value::new( - match env.get_type_value(&a, &attr) { - Some(..) => true, - None => match a.has_attr(&attr) { - Ok(v) => v, - _ => false, - } - } - )) - } - - /// [hash]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#hash - /// ): returns the hash number of a value. - /// - /// `hash(x)`` returns an integer hash value for x such that `x == y` implies - /// `hash(x) == hash(y)``. - /// - /// `hash` fails if x, or any value upon which its hash depends, is unhashable. - hash(a, /) { - Ok(Value::new(a.get_hash()? as i64)) - } - - /// [int]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#int - /// ): convert a value to integer. - /// - /// `int(x[, base])` interprets its argument as an integer. - /// - /// If x is an `int`, the result is x. - /// If x is a `float`, the result is the integer value nearest to x, - /// truncating towards zero; it is an error if x is not finite (`NaN`, - /// `+Inf`, `-Inf`). - /// If x is a `bool`, the result is 0 for `False` or 1 for `True`. - /// - /// If x is a string, it is interpreted like a string literal; - /// an optional base prefix (`0`, `0b`, `0B`, `0x`, `0X`) determines which base to use. - /// The string may specify an arbitrarily large integer, - /// whereas true integer literals are restricted to 64 bits. - /// If a non-zero `base` argument is provided, the string is interpreted - /// in that base and no base prefix is permitted; the base argument may - /// specified by name. - /// - /// `int()` with no arguments returns 0. - int(a, /, ?base) { - if a.get_type() == "string" { - let s = a.to_str(); - let base = match base { - Some(base) => base.to_int()?, - None => 0, - }; - if base == 1 || base < 0 || base > 36 { - starlark_err!( - INT_CONVERSION_FAILED_ERROR_CODE, - format!( - "{} is not a valid base, int() base must be >= 2 and <= 36", - base, - ), - format!("Invalid base {}", base) - ) - } - let (sign, s) = { - match s.chars().next() { - Some('+') => (1, s.get(1..).unwrap().to_string()), - Some('-') => (-1, s.get(1..).unwrap().to_string()), - _ => (1, s) - } - }; - let base = if base == 0 { - match s.clone().get(0..2) { - Some("0b") | Some("0B") => 2, - Some("0o") | Some("0O") => 8, - Some("0x") | Some("0X") => 16, - _ => 10 - } - } else { base as u32 }; - let s = match base { - 16 => if s.starts_with("0x") || s.starts_with("0X") { - s.get(2..).unwrap().to_string() - } else { s }, - 8 => if s.starts_with("0o") || s.starts_with("0O") { - s.get(2..).unwrap().to_string() - } else { - s - }, - 2 => if s.starts_with("0b") || s.starts_with("0B") { - s.get(2..).unwrap().to_string() - } else { s }, - _ => s - }; - match i64::from_str_radix(&s, base) { - Ok(i) => Ok(Value::new(sign * i)), - Err(x) => starlark_err!( - INT_CONVERSION_FAILED_ERROR_CODE, - format!( - "{} is not a valid number in base {}: {}", - a.to_repr(), - base, - x, - ), - format!("Not a base {} integer", base) - ), - } - } else { - match base { - Some(base) => { - starlark_err!( - INT_CONVERSION_FAILED_ERROR_CODE, - "int() cannot convert non-string with explicit base".to_owned(), - format!("Explict base '{}' provided with non-string", base.to_repr()) - ) - } - None => Ok(Value::new(a.to_int()?)), - } - } - } - - /// [len]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#len - /// ): get the length of a sequence - /// - /// `len(x)` returns the number of elements in its argument. - /// - /// It is a dynamic error if its argument is not a sequence. - len(a, /) { - Ok(Value::new(a.length()?)) - } - - /// [list]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#list - /// ): construct a list. - /// - /// `list(x)` returns a new list containing the elements of the - /// iterable sequence x. - /// - /// With no argument, `list()` returns a new empty list. - list(?a, /) { - if let Some(a) = a { - Ok(Value::from(a.to_vec()?)) - } else { - Ok(Value::from(Vec::::new())) - } - } - - /// [max]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#max - /// ): returns the maximum of a sequence. - /// - /// `max(x)` returns the greatest element in the iterable sequence x. - /// - /// It is an error if any element does not support ordered comparison, - /// or if the sequence is empty. - /// - /// The optional named parameter `key` specifies a function to be applied - /// to each element prior to comparison. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// max([3, 1, 4, 1, 5, 9]) == 9 - /// # and - /// max("two", "three", "four") == "two" # the lexicographically greatest - /// # and - /// max("two", "three", "four", key=len) == "three" # the longest - /// # )"#).unwrap()); - /// ``` - max(call_stack cs, env e, *args, ?key) { - let args = if args.len() == 1 { - args.swap_remove(0) - } else { - Value::from(args) - }; - let it = args.iter()?; - let mut it = it.iter(); - let mut max = match it.next() { - Some(x) => x, - None => starlark_err!( - EMPTY_ITERABLE_ERROR_CODE, - "Argument is an empty iterable, max() expect a non empty iterable".to_owned(), - "Empty".to_owned() - ) - }; - match key { - None => { - for i in it { - if max.compare(&i)? == Ordering::Less { - max = i; - } - } - } - Some(key) => { - let mut cached = key.call(cs, e, vec![max.clone()], LinkedHashMap::new(), None, None)?; - for i in it { - let keyi = key.call(cs, e, vec![i.clone()], LinkedHashMap::new(), None, None)?; - if cached.compare(&keyi)? == Ordering::Less { - max = i; - cached = keyi; - } - } - } - }; - Ok(max) - } - - /// [min]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#min - /// ): returns the minimum of a sequence. - /// - /// `min(x)` returns the least element in the iterable sequence x. - /// - /// It is an error if any element does not support ordered comparison, - /// or if the sequence is empty. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// min([3, 1, 4, 1, 5, 9]) == 1 - /// # and - /// min("two", "three", "four") == "four" # the lexicographically least - /// # and - /// min("two", "three", "four", key=len) == "two" # the shortest - /// # )"#).unwrap()); - /// ``` - min(call_stack cs, env e, *args, ?key) { - let args = if args.len() == 1 { - args.swap_remove(0) - } else { - Value::from(args) - }; - let it = args.iter()?; - let mut it = it.iter(); - let mut min = match it.next() { - Some(x) => x, - None => starlark_err!( - EMPTY_ITERABLE_ERROR_CODE, - "Argument is an empty iterable, min() expect a non empty iterable".to_owned(), - "Empty".to_owned() - ) - }; - match key { - None => { - for i in it { - if min.compare(&i)? == Ordering::Greater { - min = i; - } - } - } - Some(key) => { - let mut cached = key.call(cs, e, vec![min.clone()], LinkedHashMap::new(), None, None)?; - for i in it { - let keyi = key.call(cs, e, vec![i.clone()], LinkedHashMap::new(), None, None)?; - if cached.compare(&keyi)? == Ordering::Greater { - min = i; - cached = keyi; - } - } - } - }; - Ok(min) - } - - /// [ord]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.mdord - /// ): returns the codepoint of a character - /// - /// `ord(s)` returns the integer value of the sole Unicode code point encoded by the string `s`. - /// - /// If `s` does not encode exactly one Unicode code point, `ord` fails. - /// Each invalid code within the string is treated as if it encodes the - /// Unicode replacement character, U+FFFD. - /// - /// Example: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// ord("A") == 65 - /// # and - /// ord("Й") == 1049 - /// # and - /// ord("😿") == 0x1F63F - /// # and - /// ord("Й") == 1049 - /// # )"#).unwrap()); - /// ``` - ord(a, /) { - if a.get_type() != "string" || a.length()? != 1 { - starlark_err!( - ORD_EXPECT_ONE_CHAR_ERROR_CODE, - format!( - "ord(): {} is not a one character string", - a.to_repr(), - ), - "Not a one character string".to_owned() - ) - } else { - Ok(Value::new(i64::from(u32::from(a.to_string().chars().next().unwrap())))) - } - } - - /// [range]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#range - /// ): return a range of integers - /// - /// `range` returns a tuple of integers defined by the specified interval and stride. - /// - /// ```python - /// range(stop) # equivalent to range(0, stop) - /// range(start, stop) # equivalent to range(start, stop, 1) - /// range(start, stop, step) - /// ``` - /// - /// `range` requires between one and three integer arguments. - /// With one argument, `range(stop)` returns the ascending sequence of non-negative integers - /// less than `stop`. - /// With two arguments, `range(start, stop)` returns only integers not less than `start`. - /// - /// With three arguments, `range(start, stop, step)` returns integers - /// formed by successively adding `step` to `start` until the value meets or passes `stop`. - /// A call to `range` fails if the value of `step` is zero. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// list(range(10)) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - /// # and - /// list(range(3, 10)) == [3, 4, 5, 6, 7, 8, 9] - /// # and - /// list(range(3, 10, 2)) == [3, 5, 7, 9] - /// # and - /// list(range(10, 3, -2)) == [10, 8, 6, 4] - /// # )"#).unwrap()); - /// ``` - range(a1: i64, ?a2: Option, ?a3: Option, /) { - let start = match a2 { - None => 0, - Some(_) => a1, - }; - let stop = match a2 { - None => a1.to_int()?, - Some(a2) => a2, - }; - let step = match a3 { - None => 1, - Some(a3) => a3, - }; - let step = match NonZeroI64::new(step) { - Some(step) => step, - None => { - starlark_err!( - NUL_RANGE_STEP_ERROR_CODE, - "Third argument of range (step) cannot be null".to_owned(), - "Null range step".to_owned() - ) - } - }; - Ok(Value::new(Range::new(start, stop, step))) - } - - /// [repr]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#repr - /// ): formats its argument as a string. - /// - /// All strings in the result are double-quoted. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// repr(1) == '1' - /// # and - /// repr("x") == "\"x\"" - /// # and - /// repr([1, "x"]) == "[1, \"x\"]" - /// # )"#).unwrap()); - /// ``` - repr(a, /) { - Ok(Value::new(a.to_repr())) - } - - /// [reversed]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#reversed - /// ): reverse a sequence - /// - /// `reversed(x)` returns a new list containing the elements of the iterable sequence x in - /// reverse order. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// reversed(range(5)) == [4, 3, 2, 1, 0] - /// # and - /// reversed("stressed".split_codepoints()) == ["d", "e", "s", "s", "e", "r", "t", "s"] - /// # and - /// reversed({"one": 1, "two": 2}.keys()) == ["two", "one"] - /// # )"#).unwrap()); - /// ``` - reversed(a, /) { - let v: Vec = a.to_vec()?; - let v: Vec = v.into_iter().rev().collect(); - Ok(Value::from(v)) - } - - /// [sorted]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#sorted - /// ): sort a sequence - /// - /// `sorted(x)` returns a new list containing the elements of the iterable sequence x, - /// in sorted order. The sort algorithm is stable. - /// - /// The optional named parameter `reverse`, if true, causes `sorted` to - /// return results in reverse sorted order. - /// - /// The optional named parameter `key` specifies a function of one - /// argument to apply to obtain the value's sort key. - /// The default behavior is the identity function. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// sorted([3, 1, 4, 1, 5, 9]) == [1, 1, 3, 4, 5, 9] - /// # and - /// sorted([3, 1, 4, 1, 5, 9], reverse=True) == [9, 5, 4, 3, 1, 1] - /// # and - /// - /// sorted(["two", "three", "four"], key=len) == ["two", "four", "three"] # shortest to longest - /// # and - /// sorted(["two", "three", "four"], key=len, reverse=True) == ["three", "four", "two"] # longest to shortest - /// # )"#).unwrap()); - /// ``` - sorted(call_stack cs, env e, x, /, ?key, reverse = false) { - let it = x.iter()?; - let x = it.iter(); - let mut it = match key { - None => { - x.map(|x| (x.clone(), x)).collect() - } - Some(key) => { - let mut v = Vec::new(); - for el in x { - v.push(( - el.clone(), - key.call(cs, e, vec![el], LinkedHashMap::new(), None, None)? - )); - } - v - } - }; - - let mut compare_ok = Ok(()); - - let reverse = reverse.to_bool(); - it.sort_by( - |x : &(Value, Value), y : &(Value, Value)| { - let ord_or_err = if reverse { - x.1.compare(&y.1).map(Ordering::reverse) - } else { - x.1.compare(&y.1) - }; - match ord_or_err { - Ok(r) => r, - Err(e) => { - compare_ok = Err(e); - Ordering::Equal // does not matter - } - } - } - ); - - compare_ok?; - - let result : Vec = it.into_iter().map(|x| x.0).collect(); - Ok(Value::from(result)) - } - - /// [str]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#str - /// ): formats its argument as a string. - /// - /// If x is a string, the result is x (without quotation). - /// All other strings, such as elements of a list of strings, are double-quoted. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// str(1) == '1' - /// # and - /// str("x") == 'x' - /// # and - /// str([1, "x"]) == "[1, \"x\"]" - /// # )"#).unwrap()); - /// ``` - _str(a, /) { - Ok(Value::new(a.to_str())) - } - - /// [tuple]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#tuple - /// ): returns a tuple containing the elements of the iterable x. - /// - /// With no arguments, `tuple()` returns the empty tuple. - tuple(?a, /) { - if let Some(a) = a { - Ok(Value::new(tuple::Tuple::new(a.to_vec()?))) - } else { - Ok(Value::new(tuple::Tuple::new(Vec::new()))) - } - } - - /// [type]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#type - /// ): returns a string describing the type of its operand. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// type(None) == "NoneType" - /// # and - /// type(0) == "int" - /// # )"#).unwrap()); - /// ``` - _type(a, /) { - Ok(Value::new(a.get_type().to_owned())) - } - - /// [zip]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#zip - /// ): zip several iterables together - /// - /// `zip()` returns a new list of n-tuples formed from corresponding - /// elements of each of the n iterable sequences provided as arguments to - /// `zip`. That is, the first tuple contains the first element of each of - /// the sequences, the second element contains the second element of each - /// of the sequences, and so on. The result list is only as long as the - /// shortest of the input sequences. - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// zip() == [] - /// # and - /// zip(range(5)) == [(0,), (1,), (2,), (3,), (4,)] - /// # and - /// zip(range(5), "abc".split_codepoints()) == [(0, "a"), (1, "b"), (2, "c")] - /// # )"#).unwrap()); - /// ``` - zip(*args) { - let mut v = Vec::new(); - - for arg in args { - let first = v.is_empty(); - let mut idx = 0; - for e in &arg.iter()? { - if first { - v.push(Value::from((e.clone(),))); - idx += 1; - } else if idx < v.len() { - v[idx] = v[idx].add(Value::from((e.clone(),)))?; - idx += 1; - } - } - v.truncate(idx); - } - Ok(Value::from(v)) - } -} - -/// Return the default global environment, it is not yet frozen so that a caller can refine it. -/// -/// For example `stdlib::global_environment().freeze().child("test")` create a child environment -/// of this global environment that have been frozen. -pub fn global_environment() -> (Environment, TypeValues) { - let mut env = Environment::new("global"); - let mut type_values = TypeValues::default(); - env.set("None", Value::new(NoneType::None)).unwrap(); - env.set("True", Value::new(true)).unwrap(); - env.set("False", Value::new(false)).unwrap(); - global_functions(&mut env, &mut type_values); - string::global(&mut env, &mut type_values); - list::global(&mut env, &mut type_values); - dict::global(&mut env, &mut type_values); - (env, type_values) -} - -/// Default global environment with added non-standard `struct` and `set` extensions. -pub fn global_environment_with_extensions() -> (Environment, TypeValues) { - let (mut env, mut type_values) = global_environment(); - structs::global(&mut env, &mut type_values); - linked_hash_set::global(&mut env, &mut type_values); - (env, type_values) -} - -/// Default global environment with functions usable in test and REPL. -pub fn global_environment_for_repl_and_tests() -> (Environment, TypeValues) { - let (mut env, mut type_values) = global_environment_with_extensions(); - // TODO: do not add to global context - inspect::global(&mut env, &mut type_values); - (env, type_values) -} - -/// Execute a starlark snippet with the default environment for test and return the truth value -/// of the last statement. Used for tests and documentation tests. -#[doc(hidden)] -pub fn starlark_default(snippet: &str) -> Result { - let map = sync::Arc::new(sync::Mutex::new(CodeMap::new())); - let (env, type_values) = global_environment_for_repl_and_tests(); - let mut test_env = env.freeze().child("test"); - match eval( - &map, - "", - snippet, - Dialect::Bzl, - &mut test_env, - &type_values, - ) { - Ok(v) => Ok(v.to_bool()), - Err(d) => { - Emitter::stderr(ColorConfig::Always, Some(&map.lock().unwrap())).emit(&[d.clone()]); - Err(d) - } - } -} - -#[cfg(test)] -pub mod tests { - use super::global_environment; - use super::Dialect; - use crate::eval::noload::eval; - use codemap::CodeMap; - use codemap_diagnostic::Diagnostic; - use std::sync; - - pub fn starlark_default_fail(snippet: &str) -> Result { - let map = sync::Arc::new(sync::Mutex::new(CodeMap::new())); - let (env, type_values) = global_environment(); - let mut env = env.freeze().child("test"); - match eval( - &map, - "", - snippet, - Dialect::Bzl, - &mut env, - &type_values, - ) { - Ok(v) => Ok(v.to_bool()), - Err(d) => Err(d), - } - } - - #[test] - fn test_const() { - starlark_ok!("(not None)"); - starlark_ok!("(not False)"); - starlark_ok!("True"); - } - - #[test] - fn test_any() { - starlark_ok!("any([0, True])"); - starlark_ok!("any([0, 1])"); - starlark_ok!("any([0, 1, True])"); - starlark_ok!("(not any([0, 0]))"); - starlark_ok!("(not any([0, False]))"); - } - - #[test] - fn test_all() { - starlark_ok!("all([True, True])"); - starlark_ok!("all([True, 1])"); - starlark_ok!("all([True, 1, True])"); - starlark_ok!("(not all([True, 0]))"); - starlark_ok!("(not all([1, False]))"); - } - - #[test] - fn test_bool() { - // bool - starlark_ok!("bool(True)"); - starlark_ok!("(not bool(False))"); - // NoneType - starlark_ok!("(not bool(None))"); - // int - starlark_ok!("bool(1)"); - starlark_ok!("(not bool(0))"); - // dict - starlark_ok!("(not bool({}))"); - starlark_ok!("bool({1:2})"); - // tuple - starlark_ok!("(not bool(()))"); - starlark_ok!("bool((1,))"); - // list - starlark_ok!("(not bool([]))"); - starlark_ok!("bool([1])"); - // string - starlark_ok!("(not bool(''))"); - starlark_ok!("bool('1')"); - // function - starlark_ok!("bool(bool)"); - } - - #[test] - fn test_chr() { - starlark_ok!("(chr(65) == 'A')"); - starlark_ok!("(chr(1049) == 'Й')"); - starlark_ok!("(chr(0x1F63F) == '😿')"); - starlark_fail!("chr(0x110000)", super::CHR_NOT_UTF8_CODEPOINT_ERROR_CODE); - } - - #[test] - fn test_ord() { - starlark_ok!("(65 == ord('A'))"); - starlark_ok!("(1049 == ord('Й'))"); - starlark_ok!("(0x1F63F == ord('😿'))"); - } - - #[test] - fn test_dict() { - starlark_ok!("(dict() == {})"); - starlark_ok!("(dict([(1, 2), (3, 4)]) == {1: 2, 3: 4})"); - starlark_ok!("(dict([(1, 2), ['a', 'b']]) == {1: 2, 'a': 'b'})"); - starlark_ok!("(dict(one=1, two=2) == {'one': 1, 'two': 2})"); - starlark_ok!("(dict([(1, 2)], x=3) == {1: 2, 'x': 3})"); - } - - #[test] - fn test_enumerate() { - starlark_ok!( - "(enumerate(['zero', 'one', 'two']) == [(0, 'zero'), (1, 'one'), (2, 'two')])" - ); - starlark_ok!("(enumerate(['one', 'two'], 1) == [(1, 'one'), (2, 'two')])"); - } - - #[test] - fn test_hash() { - starlark_ok!("(hash(1) == 1)"); - starlark_ok!("(hash(2) == 2)"); - } - - #[test] - fn test_int() { - starlark_ok!("(int(1) == 1)"); - starlark_ok!("(int(False) == 0)"); - starlark_ok!("(int(True) == 1)"); - starlark_ok!("(int('1') == 1)"); - starlark_ok!("(int('16') == 16)"); - starlark_ok!("(int('16', 10) == 16)"); - starlark_ok!("(int('16', 8) == 14)"); - starlark_ok!("(int('16', 16) == 22)"); - } - - #[test] - fn test_len() { - starlark_ok!("(len(()) == 0)"); - starlark_ok!("(len({}) == 0)"); - starlark_ok!("(len([]) == 0)"); - starlark_ok!("(len([1]) == 1)"); - starlark_ok!("(len([1,2]) == 2)"); - starlark_ok!("(len({'16': 10}) == 1)"); - } - - #[test] - fn test_list() { - starlark_ok!("(list() == [])"); - starlark_ok!("(list((1,2,3)) == [1, 2, 3])"); - } - - #[test] - fn test_repr() { - starlark_ok!("(repr(1) == '1')"); - starlark_ok!("(repr('x') == '\"x\"')"); - starlark_ok!("(repr([1, 'x']) == '[1, \"x\"]')"); - } - - #[test] - fn test_str() { - starlark_ok!("(str(1) == '1')"); - starlark_ok!("(str('x') == 'x')"); - starlark_ok!("(str([1, 'x']) == '[1, \"x\"]')"); - } - - #[test] - fn test_tuple() { - starlark_ok!("(tuple() == ())"); - starlark_ok!("(tuple([1,2,3]) == (1, 2, 3))"); - } - - #[test] - fn test_type() { - starlark_ok!("(type(()) == 'tuple')"); - starlark_ok!("(type(1) == 'int')"); - starlark_ok!("(type('string') == 'string')"); - starlark_ok!("(type(None) == 'NoneType')"); - } - - #[test] - fn test_min() { - starlark_ok!("(min([3, 1, 4, 1, 5, 9]) == 1)"); - starlark_ok!("(min('two', 'three', 'four') == 'four')"); - starlark_ok!("(min('two', 'three', 'four', key=len) == 'two')"); - } - - #[test] - fn test_max() { - starlark_ok!("(max([3, 1, 4, 1, 5, 9]) == 9)"); - starlark_ok!("(max('two', 'three', 'four') == 'two')"); - starlark_ok!("(max('two', 'three', 'four', key=len) == 'three')"); - } - - #[test] - fn test_reversed() { - starlark_ok!("(reversed(['a', 'b', 'c']) == ['c', 'b', 'a'])"); - starlark_ok!("(reversed(range(5)) == [4, 3, 2, 1, 0])"); - starlark_ok!("(reversed({'one': 1, 'two': 2}.keys()) == ['two', 'one'])"); - } - - #[test] - fn test_range() { - starlark_ok!("(list(range(10)) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])"); - starlark_ok!("(list(range(3, 10)) == [3, 4, 5, 6, 7, 8, 9])"); - starlark_ok!("(list(range(3, 10, 2)) == [3, 5, 7, 9])"); - starlark_ok!("(list(range(10, 3, -2)) == [10, 8, 6, 4])"); - } - - #[test] - fn test_sorted() { - starlark_ok!("(sorted([3, 1, 4, 1, 5, 9]) == [1, 1, 3, 4, 5, 9])"); - starlark_ok!("(sorted([3, 1, 4, 1, 5, 9], reverse=True) == [9, 5, 4, 3, 1, 1])"); - - starlark_ok!("(sorted(['two', 'three', 'four'], key=len) == ['two', 'four', 'three'])"); - starlark_ok!( - "(sorted(['two', 'three', 'four'], key=len, reverse=True) == ['three', 'four', 'two'])" - ); - } - - #[test] - fn test_zip() { - starlark_ok!("(zip() == [])"); - starlark_ok!("(zip(range(5)) == [(0,), (1,), (2,), (3,), (4,)])"); - starlark_ok!("(zip(range(5), 'abc'.split_codepoints()) == [(0, 'a'), (1, 'b'), (2, 'c')])"); - } -} diff --git a/starlark/src/stdlib/string.rs b/starlark/src/stdlib/string.rs deleted file mode 100644 index ef0cbb66..00000000 --- a/starlark/src/stdlib/string.rs +++ /dev/null @@ -1,1659 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Methods for the `string` type. - -use crate::values::error::*; -use crate::values::none::NoneType; -use crate::values::*; -use std::convert::TryFrom; -use std::str::FromStr; - -// Errors -- UF = User Failure -- Failure that should be expected by the user (e.g. from a fail()). -pub const SUBSTRING_INDEX_FAILED_ERROR_CODE: &str = "UF00"; -pub const FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE: &str = "UF01"; -pub const FORMAT_STRING_ORDER_INDEX_MIX_ERROR_CODE: &str = "UF02"; -pub const FORMAT_STRING_INVALID_SPECIFIER_ERROR_CODE: &str = "UF03"; -pub const FORMAT_STRING_INVALID_CHARACTER_ERROR_CODE: &str = "UF04"; - -macro_rules! ok { - ($e:expr) => { - return Ok(Value::from($e)); - }; -} - -macro_rules! check_string { - ($e:ident, $fn:ident) => { - check_type!($e, concat!("string.", stringify!($fn)), string) - }; -} - -fn format_capture>( - capture: &str, - it: &mut T, - captured_by_index: &mut bool, - captured_by_order: &mut bool, - args: &Value, - kwargs: &Value, -) -> Result { - let (n, conv) = { - if let Some(x) = capture.find('!') { - (capture.get(1..x).unwrap(), capture.get(x + 1..).unwrap()) - } else { - (capture.get(1..).unwrap(), "s") - } - }; - let conv_s = |x: Value| x.to_str(); - let conv_r = |x: Value| x.to_repr(); - let conv: &dyn Fn(Value) -> String = match conv { - "s" => &conv_s, - "r" => &conv_r, - c => starlark_err!( - FORMAT_STRING_INVALID_SPECIFIER_ERROR_CODE, - format!( - concat!( - "'{}' is not a valid format string specifier, only ", - "'s' and 'r' are valid specifiers", - ), - c - ), - "Invalid format string specifier".to_owned() - ), - }; - if n.is_empty() { - if *captured_by_index { - starlark_err!( - FORMAT_STRING_ORDER_INDEX_MIX_ERROR_CODE, - concat!( - "Cannot mix manual field specification and ", - "automatic field numbering in format string" - ) - .to_owned(), - "Mixed manual and automatic field numbering".to_owned() - ) - } else { - *captured_by_order = true; - if let Some(x) = it.next() { - return Ok(conv(x)); - } else { - starlark_err!( - OUT_OF_BOUND_ERROR_CODE, - "Not enough parameters in format string".to_owned(), - "Not enough parameters".to_owned() - ) - } - } - } else if n.chars().all(|c| c.is_ascii_digit()) { - if *captured_by_order { - starlark_err!( - FORMAT_STRING_ORDER_INDEX_MIX_ERROR_CODE, - concat!( - "Cannot mix manual field specification and ", - "automatic field numbering in format string" - ) - .to_owned(), - "Mixed manual and automatic field numbering".to_owned() - ) - } else { - *captured_by_index = true; - Ok(conv(args.at(Value::from(i64::from_str(n).unwrap()))?)) - } - } else { - if let Some(x) = n.chars().find(|c| match c { - '.' | ',' | '[' | ']' => true, - _ => false, - }) { - starlark_err!( - FORMAT_STRING_INVALID_CHARACTER_ERROR_CODE, - format!("Invalid character '{}' inside replacement field", x), - format!("Invalid character '{}'", x) - ) - } - Ok(conv(kwargs.at(Value::from(n))?)) - } -} - -// This does not exists in rust, split would cut the string incorrectly and split_whitespace -// cannot take a n parameter. -fn splitn_whitespace(s: &str, maxsplit: usize) -> Vec { - let mut v = Vec::new(); - let mut cur = String::new(); - let mut split = 1; - let mut eat_ws = true; - for c in s.chars() { - if split >= maxsplit && !eat_ws { - cur.push(c) - } else if c.is_whitespace() { - if !cur.is_empty() { - v.push(cur); - cur = String::new(); - split += 1; - eat_ws = true; - } - } else { - eat_ws = false; - cur.push(c) - } - } - if !cur.is_empty() { - v.push(cur) - } - v -} - -fn rsplitn_whitespace(s: &str, maxsplit: usize) -> Vec { - let mut v = Vec::new(); - let mut cur = String::new(); - let mut split = 1; - let mut eat_ws = true; - for c in s.chars().rev() { - if split >= maxsplit && !eat_ws { - cur.push(c) - } else if c.is_whitespace() { - if !cur.is_empty() { - v.push(cur.chars().rev().collect()); - cur = String::new(); - split += 1; - eat_ws = true; - } - } else { - eat_ws = false; - cur.push(c) - } - } - if !cur.is_empty() { - v.push(cur.chars().rev().collect()); - } - v.reverse(); - v -} - -starlark_module! {global => - /// [string.elems]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·elems - /// ): returns an iterable of the bytes values of a string. - /// - /// `S.elems()` returns an iterable value containing the - /// sequence of numeric bytes values in the string S. - /// - /// To materialize the entire sequence of bytes, apply `list(...)` to the result. - /// - /// Example: - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// list("Hello, 世界".elems()) == [ - /// 72, 101, 108, 108, 111, 44, 32, 228, 184, 150, 231, 149, 140] - /// # )"#).unwrap()); - /// ``` - string.elems(this) { - // Note that we return a list here... Which is not equivalent to the go implementation. - ok!(this.to_str().into_bytes()) - } - - /// [string.capitalize]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·capitalize - /// ): returns a copy of string, with each first letter of a word in upper case. - /// - /// `S.capitalize()` returns a copy of string S with all Unicode letters - /// that begin words changed to their title case. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "hello, world!".capitalize() == "Hello, World!" - /// # )"#).unwrap()); - /// ``` - string.capitalize(this: String) { - let mut last_space = true; - let mut result = String::new(); - for c in this.chars() { - if !c.is_alphanumeric() { - last_space = true; - result.push(c); - } else { - if last_space { - for c1 in c.to_uppercase() { - result.push(c1); - } - } else { - result.push(c); - } - last_space = false; - } - } - ok!(result) - } - - /// [string.codepoints]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·codepoints - /// ): returns an iterable of the unicode codepoint of a string. - /// - /// `S.codepoints()` returns an iterable value containing the - /// sequence of integer Unicode code points encoded by the string S. - /// Each invalid code within the string is treated as if it encodes the - /// Unicode replacement character, U+FFFD. - /// - /// By returning an iterable, not a list, the cost of decoding the string - /// is deferred until actually needed; apply `list(...)` to the result to - /// materialize the entire sequence. - /// - /// Example: - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// list("Hello, 世界".codepoints()) == [72, 101, 108, 108, 111, 44, 32, 19990, 30028] - /// # )"#).unwrap()); - /// ``` - string.codepoints(this: String) { - // Note that we return a list here... Which is not equivalent to the go implementation. - let v : Vec = this.chars().map(|x| i64::from(u32::from(x))).collect(); - ok!(v) - } - - /// [string.count]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·count - /// ): count the number of occurrences of a string in another string. - /// - /// `S.count(sub[, start[, end]])` returns the number of occcurences of - /// `sub` within the string S, or, if the optional substring indices - /// `start` and `end` are provided, within the designated substring of S. - /// They are interpreted according to Skylark's [indexing conventions]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#indexing). - /// - /// This implementation does not count occurence of `sub` in the string `S` - /// that overlap other occurence of S (which can happen if some suffix of S is a prefix of S). - /// For instance, `"abababa".count("aba")` returns 2 for `[aba]a[aba]`, not counting the middle - /// occurence: `ab[aba]ba` (this is following Python behavior). - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "hello, world!".count("o") == 2 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "abababa".count("aba") == 2 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "hello, world!".count("o", 7, 12) == 1 # in "world" - /// # )"#).unwrap()); - /// ``` - string.count(this: String, needle: String, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - let n = needle.as_str(); - let mut counter = 0 as i64; - let mut s = this.as_str().get(start..end).unwrap(); - loop { - if let Some(offset) = s.find(n) { - counter += 1; - s = s.get(offset + n.len()..).unwrap_or(""); - } else { - ok!(counter) - } - } - } - - /// [string.endswith]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·endswith - /// ): determine if a string ends with a given suffix. - /// - /// `S.endswith(suffix)` reports whether the string S has the specified suffix. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "filename.sky".endswith(".sky") == True - /// # )"#).unwrap()); - /// ``` - string.endswith(this: String, suffix: String, /) { - ok!(this.ends_with(suffix.as_str())) - } - - /// [string.find]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·find - /// ): find a substring in a string. - /// - /// `S.find(sub[, start[, end]])` returns the index of the first - /// occurrence of the substring `sub` within S. - /// - /// If either or both of `start` or `end` are specified, - /// they specify a subrange of S to which the search should be restricted. - /// They are interpreted according to Skylark's [indexing conventions](#indexing). - /// - /// If no occurrence is found, `found` returns -1. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "bonbon".find("on") == 1 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".find("on", 2) == 4 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".find("on", 2, 5) == -1 - /// # )"#).unwrap()); - /// ``` - string.find(this: String, needle: String, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - let needle = needle.to_str(); - if let Some(substring) = this.as_str().get(start..end) { - if let Some(offset) = substring.find(needle.as_str()) { - ok!((offset + start) as i64); - } - } - ok!(-1); - } - - /// [string.format]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·format - /// ): format a string. - /// - /// `S.format(*args, **kwargs)` returns a version of the format string S - /// in which bracketed portions `{...}` are replaced - /// by arguments from `args` and `kwargs`. - /// - /// Within the format string, a pair of braces `{{` or `}}` is treated as - /// a literal open or close brace. - /// Each unpaired open brace must be matched by a close brace `}`. - /// The optional text between corresponding open and close braces - /// specifies which argument to use and how to format it, and consists of - /// three components, all optional: - /// a field name, a conversion preceded by '`!`', and a format specifier - /// preceded by '`:`'. - /// - /// ```text - /// {field} - /// {field:spec} - /// {field!conv} - /// {field!conv:spec} - /// ``` - /// - /// The *field name* may be either a decimal number or a keyword. - /// A number is interpreted as the index of a positional argument; - /// a keyword specifies the value of a keyword argument. - /// If all the numeric field names form the sequence 0, 1, 2, and so on, - /// they may be omitted and those values will be implied; however, - /// the explicit and implicit forms may not be mixed. - /// - /// The *conversion* specifies how to convert an argument value `x` to a - /// string. It may be either `!r`, which converts the value using - /// `repr(x)`, or `!s`, which converts the value using `str(x)` and is - /// the default. - /// - /// The *format specifier*, after a colon, specifies field width, - /// alignment, padding, and numeric precision. - /// Currently it must be empty, but it is reserved for future use. - /// - /// Examples: - /// - /// ```rust - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "a{x}b{y}c{}".format(1, x=2, y=3) == "a2b3c1" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "a{}b{}c".format(1, 2) == "a1b2c" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "({1}, {0})".format("zero", "one") == "(one, zero)" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Is {0!r} {0!s}?".format("heterological") == "Is \"heterological\" heterological?" - /// # )"#).unwrap()); - /// ``` - string.format(this: String, *args, **kwargs) { - let mut it = args.iter().cloned(); - let mut captured_by_index = false; - let mut captured_by_order = false; - let mut result = String::new(); - let mut capture = String::new(); - for c in this.chars() { - match (c, capture.as_str()) { - ('{', "") | ('}', "") => capture.push(c), - (.., "") => result.push(c), - ('{', "{") => { - result.push('{'); - capture.clear(); - }, - ('{', "}") => starlark_err!( - FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE, - "Standalone '}' in format string".to_owned(), - "standalone '}'".to_owned() - ), - ('{', ..) => starlark_err!( - FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE, - "Unmatched '{' in format string".to_owned(), - "unmatched '{'".to_owned() - ), - ('}', "}") => { - result.push('}'); - capture.clear(); - }, - ('}', ..) => { - result += &format_capture( - &capture, - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &Value::from(args.clone()), - &Value::try_from(kwargs.clone()).unwrap())?; - capture.clear(); - }, - (.., "}") => starlark_err!( - FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE, - "Standalone '}' in format string".to_owned(), - "standalone '}'".to_owned() - ), - _ => capture.push(c) - } - } - match capture.as_str() { - "}" => starlark_err!( - FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE, - "Standalone '}' in format string".to_owned(), - "standalone '}'".to_owned() - ), - "" => ok!(result), - _ => starlark_err!( - FORMAT_STRING_UNMATCHED_BRACKET_ERROR_CODE, - "Unmatched '{' in format string".to_owned(), - "unmatched '{'".to_owned() - ), - } - - } - - /// [string.index]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·index - /// ): search a substring inside a string, failing on not found. - /// - /// `S.index(sub[, start[, end]])` returns the index of the first - /// occurrence of the substring `sub` within S, like `S.find`, except - /// that if the substring is not found, the operation fails. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "bonbon".index("on") == 1 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".index("on", 2) == 4 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".index("on", 2, 5) # error: substring not found (in "nbo") - /// # )"#).is_err()); - /// ``` - string.index(this: String, needle: String, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - if let Some(substring) = this.as_str().get(start..end) { - if let Some(offset) = substring.find(needle.as_str()) { - ok!((offset + start) as i64); - } - } - starlark_err!( - SUBSTRING_INDEX_FAILED_ERROR_CODE, - format!("Substring '{}' not found in '{}'", needle, this), - "substring not found".to_owned() - ); - } - - /// [string.isalnum]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·isalnum - /// ): test if a string is composed only of letters and digits. - /// - /// `S.isalnum()` reports whether the string S is non-empty and consists only - /// Unicode letters and digits. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "base64".isalnum() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".isalnum() == False - /// # )"#).unwrap()); - /// ``` - string.isalnum(this: String) { - if this.is_empty() { - ok!(false); - } - for c in this.chars() { - if !c.is_alphanumeric() { - ok!(false); - } - } - ok!(true); - } - - /// [string.isalpha]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·isalpha - /// ): test if a string is composed only of letters. - /// - /// `S.isalpha()` reports whether the string S is non-empty and consists only of Unicode letters. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "ABC".isalpha() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".isalpha() == False - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "".isalpha() == False - /// # )"#).unwrap()); - /// ``` - string.isalpha(this: String) { - if this.is_empty() { - ok!(false); - } - for c in this.chars() { - if !c.is_alphabetic() { - ok!(false); - } - } - ok!(true); - } - - /// [string.isdigit]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·isdigit - /// ): test if a string is composed only of digits. - /// - /// `S.isdigit()` reports whether the string S is non-empty and consists only of Unicode digits. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "123".isdigit() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".isdigit() == False - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "".isdigit() == False - /// # )"#).unwrap()); - /// ``` - string.isdigit(this: String) { - if this.is_empty() { - ok!(false); - } - for c in this.chars() { - if !c.is_numeric() { - ok!(false); - } - } - ok!(true); - } - - /// [string.islower]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·islower - /// ): test if all letters of a string are lowercase. - /// - /// `S.islower()` reports whether the string S contains at least one cased Unicode - /// letter, and all such letters are lowercase. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "hello, world".islower() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".islower() == False - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "123".islower() == False - /// # )"#).unwrap()); - /// ``` - string.islower(this: String) { - let mut result = false; - for c in this.chars() { - if c.is_uppercase() { - ok!(false); - } else if c.is_lowercase() { - result = true; - } - } - ok!(result); - } - - /// [string.isspace]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·isspace - /// ): test if all characters of a string are whitespaces. - /// - /// `S.isspace()` reports whether the string S is non-empty and consists only of Unicode spaces. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// " ".isspace() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "\r\t\n".isspace() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "".isspace() == False - /// # )"#).unwrap()); - /// ``` - string.isspace(this: String) { - if this.is_empty() { - ok!(false); - } - for c in this.chars() { - if !c.is_whitespace() { - ok!(false); - } - } - ok!(true); - } - - /// [string.istitle]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·istitle - /// ): test if the string is title cased. - /// - /// `S.istitle()` reports whether the string S contains at least one cased Unicode - /// letter, and all such letters that begin a word are in title case. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "Hello, World!".istitle() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".istitle() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "HAL-9000".istitle() == False - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "123".istitle() == False - /// # )"#).unwrap()); - /// ``` - string.istitle(this: String) { - let mut last_space = true; - let mut result = false; - - for c in this.chars() { - if !c.is_alphabetic() { - last_space = true; - } else { - if last_space { - if c.is_lowercase() { - ok!(false); - } - } else if c.is_uppercase() { - ok!(false); - } - if c.is_alphabetic() { - result = true; - } - last_space = false; - } - } - ok!(result); - } - - /// [string.isupper]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·isupper - /// ): test if all letters of a string are uppercase. - /// - /// `S.isupper()` reports whether the string S contains at least one cased Unicode - /// letter, and all such letters are uppercase. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "HAL-9000".isupper() == True - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "Catch-22".isupper() == False - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "123".isupper() == False - /// # )"#).unwrap()); - /// ``` - string.isupper(this: String) { - let mut result = false; - for c in this.chars() { - if c.is_lowercase() { - ok!(false); - } else if c.is_uppercase() { - result = true; - } - } - ok!(result); - } - - /// [string.join]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·join - /// ): join elements with a separator. - /// - /// `S.join(iterable)` returns the string formed by concatenating each - /// element of its argument, with a copy of the string S between - /// successive elements. The argument must be an iterable whose elements - /// are strings. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// ", ".join(["one", "two", "three"]) == "one, two, three" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "a".join("ctmrn".split_codepoints()) == "catamaran" - /// # )"#).unwrap()); - /// ``` - string.join(this: String, to_join, /) { - let mut r = String::new(); - let to_join_iter = to_join.iter()?; - for (index, item) in to_join_iter.iter().enumerate() { - if index != 0 { - r.push_str(&this); - } - check_string!(item, join); - let item = item.downcast_ref::().unwrap(); - r.push_str(&*item); - } - ok!(r) - } - - /// [string.lower]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·lower - /// ): test if all letters of a string are lowercased. - /// - /// `S.lower()` returns a copy of the string S with letters converted to lowercase. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "Hello, World!".lower() == "hello, world!" - /// # )"#).unwrap()); - /// ``` - string.lower(this: String) { - ok!(this.to_lowercase()) - } - - /// [string.lstrip]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·lstrip - /// ): trim leading whitespaces. - /// - /// `S.lstrip()` returns a copy of the string S with leading whitespace removed. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// " hello ".lstrip() == "hello " - /// # )"#).unwrap()); - /// ``` - string.lstrip(this: String) { - ok!(this.trim_start()) - } - - /// [string.partition]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·partition - /// ): partition a string in 3 components - /// - /// `S.partition(x = " ")` splits string S into three parts and returns them as - /// a tuple: the portion before the first occurrence of string `x`, `x` itself, - /// and the portion following it. - /// If S does not contain `x`, `partition` returns `(S, "", "")`. - /// - /// `partition` fails if `x` is not a string, or is the empty string. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "one/two/three".partition("/") == ("one", "/", "two/three") - /// # )"#).unwrap()); - /// ``` - string.partition(this: String, needle = " ", /) { - check_string!(needle, partition); - let needle = needle.to_str(); - if needle.is_empty() { - starlark_err!( - INCORRECT_PARAMETER_TYPE_ERROR_CODE, - "Empty separator cannot be used for partitioning".to_owned(), - "Empty separtor".to_owned() - ) - } - if let Some(offset) = this.find(needle.as_str()) { - let offset2 = offset + needle.len(); - ok!(( - this.as_str().get(..offset).unwrap(), - needle, - this.as_str().get(offset2..).unwrap() - )) - } else { - ok!((this, "", "")) - } - } - - /// [string.replace]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·replace - /// ): replace all occurences of a subtring. - /// - /// `S.replace(old, new[, count])` returns a copy of string S with all - /// occurrences of substring `old` replaced by `new`. If the optional - /// argument `count`, which must be an `int`, is non-negative, it - /// specifies a maximum number of occurrences to replace. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "banana".replace("a", "o") == "bonono" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "banana".replace("a", "o", 2) == "bonona" - /// # )"#).unwrap()); - /// ``` - string.replace(this: String, old: String, new: String, ?count: Option, /) { - ok!( - match count { - None => this.replace(old.as_str(), new.as_str()), - Some(count) => this.replacen(old.as_str(), new.as_str(), count as usize), - } - ) - } - - /// [string.rfind]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·rfind - /// ): find the last index of a substring. - /// - /// `S.rfind(sub[, start[, end]])` returns the index of the substring `sub` within - /// S, like `S.find`, except that `rfind` returns the index of the substring's - /// _last_ occurrence. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "bonbon".rfind("on") == 4 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".rfind("on", None, 5) == 1 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".rfind("on", 2, 5) == -1 - /// # )"#).unwrap()); - /// ``` - string.rfind(this: String, needle: String, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - if let Some(substring) = this.as_str().get(start..end) { - if let Some(offset) = substring.rfind(needle.as_str()) { - ok!((offset + start) as i64); - } - } - ok!(-1); - } - - /// [string.rindex]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·rindex - /// ): find the last index of a substring, failing on not found. - /// - /// `S.rindex(sub[, start[, end]])` returns the index of the substring `sub` within - /// S, like `S.index`, except that `rindex` returns the index of the substring's - /// _last_ occurrence. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "bonbon".rindex("on") == 4 - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".rindex("on", None, 5) == 1 # in "bonbo" - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "bonbon".rindex("on", 2, 5) # error: substring not found (in "nbo") - /// # )"#).is_err()); - /// ``` - string.rindex(this: String, needle: String, start = 0, end = NoneType::None, /) { - convert_indices!(this, start, end); - if let Some(substring) = this.get(start..end) { - if let Some(offset) = substring.rfind(needle.as_str()) { - ok!((offset + start) as i64); - } - } - starlark_err!( - SUBSTRING_INDEX_FAILED_ERROR_CODE, - format!("Substring '{}' not found in '{}'", needle, this), - "substring not found".to_owned() - ); - } - - /// [string.rpartition]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·rpartition - /// ): partition a string in 3 elements. - /// - /// `S.rpartition([x = ' '])` is like `partition`, but splits `S` at the last occurrence of `x`. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "one/two/three".rpartition("/") == ("one/two", "/", "three") - /// # )"#).unwrap()); - /// ``` - string.rpartition(this: String, needle: String = " ".to_owned(), /) { - if needle.is_empty() { - starlark_err!( - INCORRECT_PARAMETER_TYPE_ERROR_CODE, - "Empty separator cannot be used for partitioning".to_owned(), - "Empty separtor".to_owned() - ) - } - let this = this.to_str(); - if let Some(offset) = this.rfind(needle.as_str()) { - let offset2 = offset + needle.len(); - ok!(( - this.as_str().get(..offset).unwrap(), - needle, - this.as_str().get(offset2..).unwrap() - )) - } else { - ok!(("", "", this)) - } - } - - /// [string.rsplit]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·rsplit - /// ): splits a string into substrings. - /// - /// `S.rsplit([sep[, maxsplit]])` splits a string into substrings like `S.split`, - /// except that when a maximum number of splits is specified, `rsplit` chooses the - /// rightmost splits. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "banana".rsplit("n") == ["ba", "a", "a"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "banana".rsplit("n", 1) == ["bana", "a"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "one two three".rsplit(None, 1) == ["one two", "three"] - /// # )"#).unwrap()); - /// ``` - string.rsplit(this: String, sep = NoneType::None, maxsplit = NoneType::None, /) { - let maxsplit = if maxsplit.get_type() == "NoneType" { - None - } else { - let v = maxsplit.to_int()?; - if v < 0 { - None - } else { - Some((v + 1) as usize) - } - }; - if sep.get_type() == "NoneType" { - if maxsplit.is_none() { - let v : Vec<&str> = this.split_whitespace().collect(); - ok!(v) - } else { - ok!(rsplitn_whitespace(&this, maxsplit.unwrap())) - } - } else { - check_string!(sep, split); - let sep = sep.to_str(); - let mut v : Vec<&str> = if maxsplit.is_none() { - this.rsplit(sep.as_str()).collect() - } else { - this.rsplitn(maxsplit.unwrap(), sep.as_str()).collect() - }; - v.reverse(); - ok!(v) - }; - } - - /// [string.rstrip]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·rstrip - /// ): trim trailing whitespace. - /// - /// `S.rstrip()` returns a copy of the string S with trailing whitespace removed. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// " hello ".rstrip() == " hello" - /// # )"#).unwrap()); - /// ``` - string.rstrip(this: String) { - ok!(this.to_str().trim_end()) - } - - /// [string.split]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·split - /// ): split a string in substrings. - /// - /// `S.split([sep [, maxsplit]])` returns the list of substrings of S, - /// splitting at occurrences of the delimiter string `sep`. - /// - /// Consecutive occurrences of `sep` are considered to delimit empty - /// strings, so `'food'.split('o')` returns `['f', '', 'd']`. - /// Splitting an empty string with a specified separator returns `['']`. - /// If `sep` is the empty string, `split` fails. - /// - /// If `sep` is not specified or is `None`, `split` uses a different - /// algorithm: it removes all leading spaces from S - /// (or trailing spaces in the case of `rsplit`), - /// then splits the string around each consecutive non-empty sequence of - /// Unicode white space characters. - /// - /// If S consists only of white space, `split` returns the empty list. - /// - /// If `maxsplit` is given and non-negative, it specifies a maximum number of splits. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "one two three".split() == ["one", "two", "three"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "one two three".split(" ") == ["one", "two", "", "three"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "one two three".split(None, 1) == ["one", "two three"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "banana".split("n") == ["ba", "a", "a"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "banana".split("n", 1) == ["ba", "ana"] - /// # )"#).unwrap()); - /// ``` - string.split(this: String, sep = NoneType::None, maxsplit = NoneType::None, /) { - let this = this.to_str(); - let maxsplit = if maxsplit.get_type() == "NoneType" { - None - } else { - let v = maxsplit.to_int()?; - if v < 0 { - None - } else { - Some((v + 1) as usize) - } - }; - let v : Vec<&str> = - if sep.get_type() == "NoneType" { - if maxsplit.is_none() { - this.split_whitespace().collect() - } else { - ok!(splitn_whitespace(&this, maxsplit.unwrap())) - } - } else { - check_string!(sep, split); - let sep = sep.to_str(); - if maxsplit.is_none() { - this.split(sep.as_str()).collect() - } else { - this.splitn(maxsplit.unwrap(), sep.as_str()).collect() - } - }; - ok!(v) - } - - /// [string.split_codepoints]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·split_codepoints - /// ): split a string into characters. - /// - /// `S.split_codepoints()` returns an iterable value containing the sequence of - /// substrings of S that each encode a single Unicode code point. - /// Each invalid code within the string is treated as if it encodes the - /// Unicode replacement character, U+FFFD. - /// - /// By returning an iterable, not a list, the cost of decoding the string - /// is deferred until actually needed; apply `list(...)` to the result to - /// materialize the entire sequence. - /// - /// Example: - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// list("Hello, 世界".split_codepoints()) == ["H", "e", "l", "l", "o", ",", " ", "世", "界"] - /// # )"#).unwrap()); - /// ``` - string.split_codepoints(this: String) { - let v : Vec = this.to_str().chars().map(|x| x.to_string()).collect(); - ok!(v) - } - - /// [string.splitlines]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·splitlines - /// ): return the list of lines of a string. - /// - /// `S.splitlines([keepends])` returns a list whose elements are the - /// successive lines of S, that is, the strings formed by splitting S at - /// line terminators ('\n', '\r' or '\r\n'). - /// - /// The optional argument, `keepends`, is interpreted as a Boolean. - /// If true, line terminators are preserved in the result, though - /// the final element does not necessarily end with a line terminator. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "one\n\ntwo".splitlines() == ["one", "", "two"] - /// # )"#).unwrap()); - /// # assert!(starlark_default(r#"( - /// "one\n\ntwo".splitlines(True) == ["one\n", "\n", "two"] - /// # )"#).unwrap()); - /// ``` - string.splitlines(this: String, keepends = false, /) { - check_type!(keepends, "string.splitlines", bool); - let this = this.to_str(); - let mut s = this.as_str(); - let keepends = keepends.to_bool(); - let mut lines = Vec::new(); - loop { - if let Some(x) = s.find(|x| x == '\n' || x == '\r') { - let y = x; - let x = match s.get(y..y+2) { - Some("\r\n") => y + 2, - _ => y + 1 - }; - if keepends { - lines.push(s.get(..x).unwrap()) - } else { - lines.push(s.get(..y).unwrap()) - } - if x == s.len() { - ok!(lines); - } - s = s.get(x..).unwrap(); - } else { - if !s.is_empty() { - lines.push(s); - } - ok!(lines); - } - } - } - - /// [string.startswith]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·startswith - /// ): test wether a string starts with a given prefix. - /// - /// `S.startswith(suffix)` reports whether the string S has the specified prefix. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "filename.sky".startswith("filename") == True - /// # )"#).unwrap()); - /// ``` - string.startswith(this: String, prefix, /) { - check_string!(prefix, startswith); - ok!(this.to_str().starts_with(prefix.to_str().as_str())) - } - - /// [string.strip]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·strip - /// ): trim leading and trailing whitespaces. - /// - /// `S.strip()` returns a copy of the string S with leading and trailing whitespace removed. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// " hello ".strip() == "hello" - /// # )"#).unwrap()); - /// ``` - string.strip(this: String) { - ok!(this.trim()) - } - - /// [string.title]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·title - /// ): convert a string to title case. - /// - /// `S.lower()` returns a copy of the string S with letters converted to titlecase. - /// - /// Letters are converted to uppercase at the start of words, lowercase elsewhere. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "hElLo, WoRlD!".title() == "Hello, World!" - /// # )"#).unwrap()); - /// ``` - string.title(this: String) { - let mut last_space = true; - let mut result = String::new(); - for c in this.to_str().chars() { - if !c.is_alphabetic() { - last_space = true; - for c1 in c.to_lowercase() { - result.push(c1); - } - } else { - if last_space { - for c1 in c.to_uppercase() { - result.push(c1); - } - } else { - for c1 in c.to_lowercase() { - result.push(c1); - } - } - last_space = false; - } - } - ok!(result); - } - - /// [string.upper]( - /// https://github.com/google/skylark/blob/3705afa472e466b8b061cce44b47c9ddc6db696d/doc/spec.md#string·upper - /// ): convert a string to all uppercase. - /// - /// `S.lower()` returns a copy of the string S with letters converted to lowercase. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default(r#"( - /// "Hello, World!".upper() == "HELLO, WORLD!" - /// # )"#).unwrap()); - /// ``` - string.upper(this: String) { - ok!(this.to_str().to_uppercase()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::values::dict; - - #[test] - fn test_format_capture() { - let args = Value::from(vec!["1", "2", "3"]); - let mut kwargs: Value = dict::Dictionary::new().into(); - let it = args.iter().unwrap(); - let mut it = it.iter(); - let mut captured_by_index = false; - let mut captured_by_order = false; - - kwargs.set_at(Value::from("a"), Value::from("x")).unwrap(); - kwargs.set_at(Value::from("b"), Value::from("y")).unwrap(); - kwargs.set_at(Value::from("c"), Value::from("z")).unwrap(); - assert_eq!( - format_capture( - "{", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "1" - ); - assert_eq!( - format_capture( - "{!s", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "2" - ); - assert_eq!( - format_capture( - "{!r", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "\"3\"" - ); - assert_eq!( - format_capture( - "{a!r", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "\"x\"" - ); - assert_eq!( - format_capture( - "{a!s", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "x" - ); - assert!(format_capture( - "{1", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .is_err()); - captured_by_order = false; - let it = args.iter().unwrap(); - let mut it = it.iter(); - assert_eq!( - format_capture( - "{1", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .unwrap(), - "2" - ); - assert!(format_capture( - "{", - &mut it, - &mut captured_by_index, - &mut captured_by_order, - &args, - &kwargs, - ) - .is_err()); - } - - #[test] - fn test_elems() { - starlark_ok!( - r#"(list("Hello, 世界".elems()) == [ - 72, 101, 108, 108, 111, 44, 32, 228, 184, 150, 231, 149, 140])"# - ); - } - - #[test] - fn test_capitalize() { - starlark_ok!(r#"("hello, world!".capitalize() == "Hello, World!")"#); - } - - #[test] - fn test_codepoints() { - starlark_ok!( - r#"(list("Hello, 世界".codepoints()) == [ - 72, 101, 108, 108, 111, 44, 32, 19990, 30028])"# - ); - } - - #[test] - fn test_count() { - starlark_ok!(r#"("hello, world!".count("o") == 2)"#); - starlark_ok!(r#"("abababa".count("aba") == 2)"#); - starlark_ok!(r#"("hello, world!".count("o", 7, 12) == 1)"#); - } - - #[test] - fn test_endswith() { - starlark_ok!(r#"("filename.sky".endswith(".sky") == True)"#); - } - - #[test] - fn test_find() { - starlark_ok!(r#"("bonbon".find("on") == 1)"#); - starlark_ok!(r#"("bonbon".find("on", 2) == 4)"#); - starlark_ok!(r#"("bonbon".find("on", 2, 5) == -1)"#); - } - - #[test] - fn test_format() { - starlark_ok!(r#"("a{x}b{y}c{}".format(1, x=2, y=3) == "a2b3c1")"#); - starlark_ok!(r#"("a{}b{}c".format(1, 2) == "a1b2c")"#); - starlark_ok!(r#"("({1}, {0})".format("zero", "one") == "(one, zero)")"#); - starlark_ok!( - r#"("Is {0!r} {0!s}?".format('heterological') == - "Is \"heterological\" heterological?")"# - ); - } - - #[test] - fn test_index() { - starlark_ok!(r#"("bonbon".index("on") == 1)"#); - starlark_ok!(r#"("bonbon".index("on", 2) == 4)"#); - starlark_fail!( - r#""bonbon".index("on", 2, 5)"#, - SUBSTRING_INDEX_FAILED_ERROR_CODE - ); - } - - #[test] - fn test_isalnum() { - starlark_ok!(r#"("base64".isalnum() == True)"#); - starlark_ok!(r#"("Catch-22".isalnum() == False)"#); - } - - #[test] - fn test_isalpha() { - starlark_ok!(r#"("ABC".isalpha() == True)"#); - starlark_ok!(r#"("Catch-22".isalpha() == False)"#); - starlark_ok!(r#"("".isalpha() == False)"#); - } - - #[test] - fn test_isdigit() { - starlark_ok!(r#"("123".isdigit() == True)"#); - starlark_ok!(r#"("Catch-22".isdigit() == False)"#); - starlark_ok!(r#"("".isdigit() == False)"#); - } - - #[test] - fn test_islower() { - starlark_ok!(r#"("hello, world".islower() == True)"#); - starlark_ok!(r#"("Catch-22".islower() == False)"#); - starlark_ok!(r#"("123".islower() == False)"#); - } - - #[test] - fn test_isspace() { - starlark_ok!(r#"(" ".isspace() == True)"#); - starlark_ok!(r#"("\r\t\n".isspace() == True)"#); - starlark_ok!(r#"("".isspace() == False)"#); - } - - #[test] - fn test_istitle() { - starlark_ok!(r#"("Hello, World!".istitle() == True)"#); - starlark_ok!(r#"("Catch-22".istitle() == True)"#); - starlark_ok!(r#"("HAL-9000".istitle() == False)"#); - starlark_ok!(r#"("123".istitle() == False)"#); - } - - #[test] - fn test_isupper() { - starlark_ok!(r#"("HAL-9000".isupper() == True)"#); - starlark_ok!(r#"("Catch-22".isupper() == False)"#); - starlark_ok!(r#"("123".isupper() == False)"#); - } - - #[test] - fn test_join() { - starlark_ok!(r#"(", ".join(["one", "two", "three"]) == "one, two, three")"#); - starlark_ok!(r#"("a".join("ctmrn".split_codepoints()) == "catamaran")"#); - } - - #[test] - fn test_lower() { - starlark_ok!(r#"("Hello, World!".lower() == "hello, world!")"#); - } - - #[test] - fn test_lstrip() { - starlark_ok!(r#"(" hello ".lstrip() == "hello ")"#); - } - - #[test] - fn test_partition() { - starlark_ok!(r#"("one/two/three".partition("/") == ("one", "/", "two/three"))"#); - } - - #[test] - fn test_replace() { - starlark_ok!(r#"("banana".replace("a", "o") == "bonono")"#); - starlark_ok!(r#"("banana".replace("a", "o", 2) == "bonona")"#); - } - - #[test] - fn test_rfind() { - starlark_ok!(r#"("bonbon".rfind("on") == 4)"#); - starlark_ok!(r#"("bonbon".rfind("on", None, 5) == 1)"#); - starlark_ok!(r#"("bonbon".rfind("on", 2, 5) == -1)"#); - } - - #[test] - fn test_rindex() { - starlark_ok!(r#"("bonbon".rindex("on") == 4)"#); - starlark_ok!(r#"("bonbon".rindex("on", None, 5) == 1)"#); - starlark_fail!( - r#""bonbon".rindex("on", 2, 5)"#, - SUBSTRING_INDEX_FAILED_ERROR_CODE - ); - } - - #[test] - fn test_rpartition() { - starlark_ok!(r#"("one/two/three".rpartition("/") == ("one/two", "/", "three"))"#); - } - - #[test] - fn test_rsplit() { - starlark_ok!(r#"("banana".rsplit("n") == ["ba", "a", "a"])"#); - starlark_ok!(r#"("banana".rsplit("n", 1) == ["bana", "a"])"#); - starlark_ok!(r#"("one two three".rsplit(None, 1) == ["one two", "three"])"#); - } - - #[test] - fn test_rstrip() { - starlark_ok!(r#"(" hello ".rstrip() == " hello")"#); - } - - #[test] - fn test_split() { - starlark_ok!(r#"("one two three".split() == ["one", "two", "three"])"#); - starlark_ok!(r#"("one two three".split(" ") == ["one", "two", "", "three"])"#); - starlark_ok!(r#"("one two three".split(None, 1) == ["one", "two three"])"#); - starlark_ok!(r#"("banana".split("n") == ["ba", "a", "a"])"#); - starlark_ok!(r#"("banana".split("n", 1) == ["ba", "ana"])"#); - } - - #[test] - fn test_split_codepoints() { - starlark_ok!( - r#"(list('Hello, 世界'.split_codepoints()) == ['H', 'e', 'l', 'l', 'o', ',', ' ', '世', '界'])"# - ); - } - - #[test] - fn test_splitlines() { - starlark_ok!(r#"("one\n\ntwo".splitlines() == ["one", "", "two"])"#); - starlark_ok!(r#"("one\n\ntwo".splitlines(True) == ["one\n", "\n", "two"])"#); - starlark_ok!(r#"("a\nb".splitlines() == ["a", "b"])"#); - } - - #[test] - fn test_startswith() { - starlark_ok!(r#"("filename.sky".startswith("filename") == True)"#); - } - - #[test] - fn test_strip() { - starlark_ok!(r#"(" hello ".strip() == "hello")"#); - } - - #[test] - fn test_title() { - starlark_ok!(r#"("hElLo, WoRlD!".title() == "Hello, World!")"#); - } - - #[test] - fn test_upper() { - starlark_ok!(r#"("Hello, World!".upper() == "HELLO, WORLD!")"#); - } -} diff --git a/starlark/src/stdlib/structs.rs b/starlark/src/stdlib/structs.rs deleted file mode 100644 index acffbb61..00000000 --- a/starlark/src/stdlib/structs.rs +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Implementation of `struct` function. - -use crate::values::error::UnsupportedOperation; -use crate::values::error::ValueError; -use crate::values::string::rc::RcString; -use crate::values::*; -use linked_hash_map::LinkedHashMap; -use std::fmt; -use std::fmt::Write as _; - -/// `struct()` implementation. -pub struct StarlarkStruct { - fields: LinkedHashMap, -} - -impl StarlarkStruct { - pub(crate) fn new(fields: LinkedHashMap) -> StarlarkStruct { - StarlarkStruct { fields } - } -} - -impl TypedValue for StarlarkStruct { - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(self.fields.values().cloned()) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "struct(")?; - for (i, (name, value)) in self.fields.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - write!(buf, "{}=", name)?; - value.to_repr_impl(buf)?; - } - write!(buf, ")")?; - Ok(()) - } - - const TYPE: &'static str = "struct"; - - fn equals(&self, other: &StarlarkStruct) -> Result { - if self.fields.len() != other.fields.len() { - return Ok(false); - } - - for (field, a) in &self.fields { - match other.fields.get(field) { - None => return Ok(false), - Some(b) => { - if !a.equals(b)? { - return Ok(false); - } - } - } - } - - Ok(true) - } - - fn get_attr(&self, attribute: &str) -> Result { - match self.fields.get(attribute) { - Some(v) => Ok(v.clone()), - None => Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::GetAttr(attribute.to_owned()), - left: self.to_repr(), - right: None, - }), - } - } - - fn has_attr(&self, attribute: &str) -> Result { - Ok(self.fields.contains_key(attribute)) - } - - fn dir_attr(&self) -> Result, ValueError> { - Ok(self.fields.keys().cloned().collect()) - } -} - -starlark_module! { global => - /// Creates a struct. - /// - /// `struct` creates a struct. It accepts keyword arguments, keys become struct field names, - /// and values become field values. - /// - /// Examples: - /// - /// ``` - /// # use starlark::stdlib::starlark_default; - /// # assert!(starlark_default("( - /// struct(host='localhost', port=80).port == 80 - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// dir(struct(host='localhost', port=80)) == ['host', 'port'] - /// # )").unwrap()); - /// # assert!(starlark_default("( - /// dir(struct()) == [] - /// # )").unwrap()); - /// ``` - struct_(**kwargs) { - Ok(Value::new(StarlarkStruct::new(kwargs))) - } -} diff --git a/starlark/src/syntax/ast.rs b/starlark/src/syntax/ast.rs deleted file mode 100644 index ee53ac26..00000000 --- a/starlark/src/syntax/ast.rs +++ /dev/null @@ -1,987 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! AST for parsed starlark files. - -use super::lexer; -use crate::eval::locals::LocalsBuilder; -use crate::syntax::fmt::comma_separated_fmt; -use crate::syntax::fmt::fmt_string_literal; -use crate::syntax::fmt::indent; -use crate::values::string::rc::RcString; -use codemap::{Span, Spanned}; -use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle}; -use lalrpop_util; -use std::collections::HashSet; -use std::fmt; -use std::fmt::{Display, Formatter}; - -// Boxed types used for storing information from the parsing will be used especially for the -// location of the AST item -#[doc(hidden)] -pub(crate) type AstExpr = Box>; -#[doc(hidden)] -pub type AstAugmentedAssignTargetExpr = Spanned; -#[doc(hidden)] -pub type AstAssignTargetExpr = Spanned; -#[doc(hidden)] -pub type AstArgument = Spanned; -#[doc(hidden)] -pub type AstString = Spanned; -#[doc(hidden)] -pub type AstParameter = Spanned; -#[doc(hidden)] -pub type AstClause = Spanned; -#[doc(hidden)] -pub type AstInt = Spanned; -#[doc(hidden)] -pub type AstStatement = Box>; - -// Critical Semantic -const POSITIONAL_ARGUMENT_AFTER_NON_POSITIONAL_ERROR_CODE: &str = "CS00"; -const NAMED_ARGUMENT_AFTER_KWARGS_DICT_ERROR_CODE: &str = "CS01"; -const ARGS_ARRAY_AFTER_ANOTHER_ARGS_OR_KWARGS_ERROR_CODE: &str = "CS02"; -const MULTIPLE_KWARGS_DICT_IN_ARGS_ERROR_CODE: &str = "CS03"; -const POSITIONAL_PARAMETER_AFTER_NON_POSITIONAL_ERROR_CODE: &str = "CS04"; -const DEFAULT_PARAM_AFTER_ARGS_OR_KWARGS_ERROR_CODE: &str = "CS05"; -const ARGS_AFTER_ARGS_OR_KWARGS_ERROR_CODE: &str = "CS06"; -const MULTIPLE_KWARGS_DICTS_IN_PARAMS_ERROR_CODE: &str = "CS07"; -const DUPLICATED_PARAM_NAME_ERROR_CODE: &str = "CS08"; -const BREAK_OR_CONTINUE_OUTSIDE_OF_LOOP_ERROR_CODE: &str = "CS09"; -const INCORRECT_AUGMENTED_ASSIGNMENT_TARGET_ERROR_CODE: &str = "CS10"; -const INCORRECT_ASSIGNMENT_TARGET_ERROR_CODE: &str = "CS11"; -const AUGMENTED_ASSIGN_IN_MOD: &str = "CS12"; - -#[doc(hidden)] -pub trait ToAst { - fn to_ast(self, span: Span) -> T; -} - -macro_rules! to_ast_trait { - ($t1:ty, $t2:ty, $t3:ident) => { - impl ToAst<$t2> for $t1 { - fn to_ast(self, span: Span) -> $t2 { - $t3::new(Spanned { span, node: self }) - } - } - }; - ($t1:ty, $t2:ty) => { - impl ToAst<$t2> for $t1 { - fn to_ast(self, span: Span) -> $t2 { - Spanned { span, node: self } - } - } - }; -} - -to_ast_trait!(i64, AstInt); - -impl ToAst for String { - fn to_ast(self, span: Span) -> Spanned { - Spanned { - span, - node: RcString::from(self), - } - } -} - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum Argument { - Positional(AstExpr), - Named(AstString, AstExpr), - ArgsArray(AstExpr), - KWArgsDict(AstExpr), -} -to_ast_trait!(Argument, AstArgument); - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum Parameter { - Normal(AstString), - WithDefaultValue(AstString, AstExpr), - Args(AstString), - KWArgs(AstString), -} -to_ast_trait!(Parameter, AstParameter); - -impl Parameter { - pub(crate) fn name(&self) -> RcString { - match self { - Parameter::Normal(n) => n.node.clone(), - Parameter::WithDefaultValue(n, ..) => n.node.clone(), - Parameter::Args(n) => n.node.clone(), - Parameter::KWArgs(n) => n.node.clone(), - } - } -} - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum Expr { - Tuple(Vec), - Dot(AstExpr, AstString), - Call( - AstExpr, - Vec, - Vec<(AstString, AstExpr)>, - Option, - Option, - ), - ArrayIndirection(AstExpr, AstExpr), - Slice(AstExpr, Option, Option, Option), - Identifier(AstString), - IntLiteral(AstInt), - StringLiteral(AstString), - Not(AstExpr), - And(AstExpr, AstExpr), - Or(AstExpr, AstExpr), - BinOp(BinOp, AstExpr, AstExpr), - UnOp(UnOp, AstExpr), - If(AstExpr, AstExpr, AstExpr), // Order: condition, v1, v2 <=> v1 if condition else v2 - List(Vec), - Set(Vec), - Dict(Vec<(AstExpr, AstExpr)>), - ListComprehension(AstExpr, Vec), - SetComprehension(AstExpr, Vec), - DictComprehension((AstExpr, AstExpr), Vec), -} -to_ast_trait!(Expr, AstExpr, Box); - -/// `x` in `x = a` -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum AssignTargetExpr { - Identifier(AstString), - Dot(AstExpr, AstString), - ArrayIndirection(AstExpr, AstExpr), - Subtargets(Vec), -} -to_ast_trait!(AssignTargetExpr, AstAssignTargetExpr); - -/// `x` in `x += a` -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum AugmentedAssignTargetExpr { - Identifier(AstString), - Dot(AstExpr, AstString), - ArrayIndirection(AstExpr, AstExpr), -} -to_ast_trait!(AugmentedAssignTargetExpr, AstAugmentedAssignTargetExpr); - -impl Expr { - pub fn check_call( - f: AstExpr, - args: Vec, - ) -> Result> { - let mut pos_args = Vec::new(); - let mut named_args = Vec::new(); - let mut args_array = None; - let mut kwargs_dict = None; - let mut stage = 0; - for arg in args { - match arg.node { - Argument::Positional(s) => { - if stage > 0 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: POSITIONAL_ARGUMENT_AFTER_NON_POSITIONAL_ERROR_CODE, - label: "positional argument after non positional", - }, - }); - } else { - pos_args.push(s); - } - } - Argument::Named(n, v) => { - if stage > 2 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: NAMED_ARGUMENT_AFTER_KWARGS_DICT_ERROR_CODE, - label: "named argument after kwargs dictionary", - }, - }); - } else { - if stage == 0 { - stage = 1; - } - named_args.push((n, v)); - } - } - Argument::ArgsArray(v) => { - if stage > 1 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: ARGS_ARRAY_AFTER_ANOTHER_ARGS_OR_KWARGS_ERROR_CODE, - label: "Args array after another args or kwargs", - }, - }); - } else { - stage = 2; - args_array = Some(v); - } - } - Argument::KWArgsDict(d) => { - if stage == 3 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: MULTIPLE_KWARGS_DICT_IN_ARGS_ERROR_CODE, - label: "Multiple kwargs dictionary in arguments", - }, - }); - } else { - stage = 3; - kwargs_dict = Some(d); - } - } - } - } - Ok(Expr::Call(f, pos_args, named_args, args_array, kwargs_dict)) - } - - pub(crate) fn collect_locals(expr: &AstExpr, locals_builder: &mut LocalsBuilder) { - match expr.node { - Expr::Tuple(ref exprs) | Expr::List(ref exprs) | Expr::Set(ref exprs) => { - for expr in exprs { - Expr::collect_locals(expr, locals_builder); - } - } - Expr::Dict(ref pairs) => { - for pair in pairs { - Expr::collect_locals(&pair.0, locals_builder); - Expr::collect_locals(&pair.1, locals_builder); - } - } - Expr::Dot(ref object, ref _field) => { - Expr::collect_locals(object, locals_builder); - } - Expr::ArrayIndirection(ref array, ref index) => { - Expr::collect_locals(array, locals_builder); - Expr::collect_locals(index, locals_builder); - } - Expr::Call(ref func, ref args, ref named, ref star, ref star_star) => { - Expr::collect_locals(func, locals_builder); - for arg in args { - Expr::collect_locals(arg, locals_builder); - } - for arg in named { - Expr::collect_locals(&arg.1, locals_builder); - } - if let Some(star) = star { - Expr::collect_locals(star, locals_builder); - } - if let Some(star_star) = star_star { - Expr::collect_locals(star_star, locals_builder); - } - } - Expr::Slice(ref array, ref a, ref b, ref c) => { - Expr::collect_locals(array, locals_builder); - if let Some(ref a) = a { - Expr::collect_locals(a, locals_builder); - } - if let Some(ref b) = b { - Expr::collect_locals(b, locals_builder); - } - if let Some(ref c) = c { - Expr::collect_locals(c, locals_builder); - } - } - Expr::Identifier(..) | Expr::IntLiteral(..) | Expr::StringLiteral(..) => {} - Expr::Not(ref expr) | Expr::UnOp(_, ref expr) => { - Expr::collect_locals(expr, locals_builder); - } - Expr::BinOp(_, ref lhs, ref rhs) - | Expr::And(ref lhs, ref rhs) - | Expr::Or(ref lhs, ref rhs) => { - Expr::collect_locals(lhs, locals_builder); - Expr::collect_locals(rhs, locals_builder); - } - Expr::If(ref cond, ref then_expr, ref else_expr) => { - Expr::collect_locals(cond, locals_builder); - Expr::collect_locals(then_expr, locals_builder); - Expr::collect_locals(else_expr, locals_builder); - } - Expr::ListComprehension(ref expr, ref clauses) - | Expr::SetComprehension(ref expr, ref clauses) => { - Self::collect_locals_from_compr_clauses(&[expr], clauses, locals_builder); - } - Expr::DictComprehension((ref k, ref v), ref clauses) => { - Self::collect_locals_from_compr_clauses(&[k, v], clauses, locals_builder); - } - } - } - - fn collect_locals_from_compr_clauses( - exprs: &[&AstExpr], - clauses: &[AstClause], - locals_builder: &mut LocalsBuilder, - ) { - match clauses.split_first() { - Some((clause, rem)) => { - match clause.node { - Clause::If(ref expr) => { - Expr::collect_locals(expr, locals_builder); - } - Clause::For(ref target, ref over) => { - Expr::collect_locals(over, locals_builder); - locals_builder.push_scope(); - AssignTargetExpr::collect_locals_from_assign_expr(target, locals_builder); - } - } - Self::collect_locals_from_compr_clauses(exprs, rem, locals_builder); - match clause.node { - Clause::If(..) => {} - Clause::For(..) => { - locals_builder.pop_scope(); - } - } - } - None => { - for expr in exprs { - Expr::collect_locals(expr, locals_builder); - } - } - } - } -} - -impl AssignTargetExpr { - // Performing this transformation in Rust code rather than in grammar - // to deal with ambiguous grammar. - pub(crate) fn from_expr( - expr: AstExpr, - ) -> Result> - { - Ok(Spanned { - span: expr.span, - node: match expr.node { - Expr::Identifier(ident) => AssignTargetExpr::Identifier(ident), - Expr::ArrayIndirection(array, index) => { - AssignTargetExpr::ArrayIndirection(array, index) - } - Expr::Dot(object, field) => AssignTargetExpr::Dot(object, field), - Expr::List(subtargets) | Expr::Tuple(subtargets) => AssignTargetExpr::Subtargets( - subtargets - .into_iter() - .map(AssignTargetExpr::from_expr) - .collect::>()?, - ), - _ => { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: expr.span, - code: INCORRECT_ASSIGNMENT_TARGET_ERROR_CODE, - label: "incorrect assignment target", - }, - }) - } - }, - }) - } - - pub(crate) fn collect_locals_from_assign_expr( - expr: &AstAssignTargetExpr, - locals_builder: &mut LocalsBuilder, - ) { - match expr.node { - AssignTargetExpr::Identifier(ref ident) => { - locals_builder.register_local(ident.node.clone()); - } - AssignTargetExpr::Subtargets(ref subtargets) => { - for s in subtargets { - AssignTargetExpr::collect_locals_from_assign_expr(s, locals_builder); - } - } - _ => {} - } - } -} - -impl AugmentedAssignTargetExpr { - // Performing this transformation in Rust code rather than in grammar - // to deal with ambiguous grammar. - pub(crate) fn from_expr( - expr: AstExpr, - ) -> Result< - AstAugmentedAssignTargetExpr, - lalrpop_util::ParseError, - > { - Ok(Spanned { - span: expr.span, - node: match expr.node { - Expr::Identifier(ident) => AugmentedAssignTargetExpr::Identifier(ident), - Expr::ArrayIndirection(array, index) => { - AugmentedAssignTargetExpr::ArrayIndirection(array, index) - } - Expr::Dot(object, field) => AugmentedAssignTargetExpr::Dot(object, field), - _ => { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: expr.span, - code: INCORRECT_AUGMENTED_ASSIGNMENT_TARGET_ERROR_CODE, - label: "incorrect augmented assignment target", - }, - }) - } - }, - }) - } - - pub(crate) fn collect_locals_from_assign_expr( - expr: &AstAugmentedAssignTargetExpr, - locals_builder: &mut LocalsBuilder, - ) { - match expr.node { - AugmentedAssignTargetExpr::Identifier(ref ident) => { - locals_builder.register_local(ident.node.clone()); - } - _ => {} - } - } -} - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum Clause { - For(AstAssignTargetExpr, AstExpr), - If(AstExpr), -} -to_ast_trait!(Clause, AstClause); - -#[doc(hidden)] -#[derive(Debug, Clone, Copy)] -pub enum BinOp { - EqualsTo, - Different, - LowerThan, - GreaterThan, - LowerOrEqual, - GreaterOrEqual, - In, - NotIn, - Substraction, - Addition, - Multiplication, - Percent, - Division, - FloorDivision, - Pipe, -} - -#[doc(hidden)] -#[derive(Debug, Clone, Copy)] -pub enum UnOp { - Plus, - Minus, -} - -impl fmt::Display for UnOp { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - UnOp::Plus => write!(f, "+"), - UnOp::Minus => write!(f, "-"), - } - } -} - -#[doc(hidden)] -#[derive(Debug, Clone, Copy)] -pub enum AugmentedAssignOp { - Increment, - Decrement, - Multiplier, - Divider, - FloorDivider, - Percent, -} - -#[doc(hidden)] -#[derive(Debug, Clone)] -pub enum Statement { - Break, - Continue, - Pass, - Return(Option), - Expression(AstExpr), - Assign(AstAssignTargetExpr, AstExpr), - AugmentedAssign(AstAugmentedAssignTargetExpr, AugmentedAssignOp, AstExpr), - Statements(Vec), - If(AstExpr, AstStatement), - IfElse(AstExpr, AstStatement, AstStatement), - For(AstAssignTargetExpr, AstExpr, AstStatement), - Def(AstString, Vec, AstStatement), - Load(AstString, Vec<(AstString, AstString)>), -} -to_ast_trait!(Statement, AstStatement, Box); - -macro_rules! test_param_name { - ($argset:ident, $n:ident, $arg:ident) => {{ - if $argset.contains(&$n.node) { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: $arg.span, - code: DUPLICATED_PARAM_NAME_ERROR_CODE, - label: "duplicated parameter name", - }, - }); - } - $argset.insert($n.node.clone()); - }}; -} - -impl Statement { - pub fn check_def( - name: AstString, - parameters: Vec, - stmts: AstStatement, - ) -> Result> { - { - let mut stage = 0; - let mut argset = HashSet::new(); - for arg in parameters.iter() { - match arg.node { - Parameter::Normal(ref n) => { - if stage > 0 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: POSITIONAL_PARAMETER_AFTER_NON_POSITIONAL_ERROR_CODE, - label: "positional parameter after non positional", - }, - }); - } - test_param_name!(argset, n, arg); - } - Parameter::WithDefaultValue(ref n, ..) => { - if stage > 1 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: DEFAULT_PARAM_AFTER_ARGS_OR_KWARGS_ERROR_CODE, - label: - "Default parameter after args array or kwargs dictionary", - }, - }); - } else if stage == 0 { - stage = 1; - } - test_param_name!(argset, n, arg); - } - Parameter::Args(ref n) => { - if stage > 1 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: ARGS_AFTER_ARGS_OR_KWARGS_ERROR_CODE, - label: "Args parameter after another args or kwargs parameter", - }, - }); - } else { - stage = 2; - } - test_param_name!(argset, n, arg); - } - Parameter::KWArgs(ref n) => { - if stage == 3 { - return Err(lalrpop_util::ParseError::User { - error: lexer::LexerError::WrappedError { - span: arg.span, - code: MULTIPLE_KWARGS_DICTS_IN_PARAMS_ERROR_CODE, - label: "Multiple kwargs dictionary in parameters", - }, - }); - } else { - stage = 3; - } - test_param_name!(argset, n, arg); - } - } - } - } - Ok(Statement::Def(name, parameters, stmts)) - } - - /// Validate `break` and `continue` is only used inside loops - pub(crate) fn validate_break_continue(stmt: &AstStatement) -> Result<(), Diagnostic> { - match stmt.node { - Statement::Break | Statement::Continue => { - let kw = if let Statement::Break = stmt.node { - "break" - } else { - "continue" - }; - Err(Diagnostic { - level: Level::Error, - message: format!("{} cannot be used outside of loop", kw), - code: Some(BREAK_OR_CONTINUE_OUTSIDE_OF_LOOP_ERROR_CODE.to_owned()), - spans: vec![SpanLabel { - span: stmt.span, - label: None, - style: SpanStyle::Primary, - }], - }) - } - Statement::Def(.., ref stmt) => Statement::validate_break_continue(stmt), - Statement::If(.., ref then_block) => Statement::validate_break_continue(then_block), - Statement::IfElse(.., ref then_block, ref else_block) => { - Statement::validate_break_continue(then_block)?; - Statement::validate_break_continue(else_block)?; - Ok(()) - } - Statement::Statements(ref stmts) => { - for stmt in stmts { - Statement::validate_break_continue(stmt)?; - } - Ok(()) - } - Statement::For(..) => { - // No need to check loop body, because `break` and `continue` - // are valid anywhere in loop body. - Ok(()) - } - Statement::Return(..) - | Statement::Expression(..) - | Statement::Pass - | Statement::Assign(..) - | Statement::AugmentedAssign(..) - | Statement::Load(..) => { - // These statements do not contain nested statements - Ok(()) - } - } - } - - pub(crate) fn validate_augmented_assignment_in_module( - stmt: &AstStatement, - ) -> Result<(), Diagnostic> { - match &stmt.node { - Statement::Break - | Statement::Continue - | Statement::Pass - | Statement::Return(..) - | Statement::Expression(..) - | Statement::Assign(..) - | Statement::Def(..) - | Statement::Load(..) => Ok(()), - Statement::AugmentedAssign(target, _, _) => match &target.node { - AugmentedAssignTargetExpr::Identifier(ident) => { - return Err(Diagnostic { - level: Level::Error, - message: format!( - "Augmented assignment is a binding \ - and not allowed on a global variable" - ), - code: Some(AUGMENTED_ASSIGN_IN_MOD.to_owned()), - spans: vec![SpanLabel { - span: ident.span, - label: Some(format!("global variable")), - style: SpanStyle::Primary, - }], - }); - } - _ => Ok(()), - }, - Statement::Statements(stmts) => { - for stmt in stmts { - Self::validate_augmented_assignment_in_module(stmt)?; - } - Ok(()) - } - // Although top-level if and for are not allowed, - // it's better to safer against possible future extensions - Statement::If(_, then_block) => { - Self::validate_augmented_assignment_in_module(then_block)?; - Ok(()) - } - Statement::IfElse(_, then_block, else_block) => { - Self::validate_augmented_assignment_in_module(then_block)?; - Self::validate_augmented_assignment_in_module(else_block)?; - Ok(()) - } - Statement::For(_, _, body) => Self::validate_augmented_assignment_in_module(body), - } - } -} - -impl Display for BinOp { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - BinOp::EqualsTo => f.write_str("=="), - BinOp::Different => f.write_str("!="), - BinOp::LowerThan => f.write_str("<"), - BinOp::GreaterThan => f.write_str(">"), - BinOp::LowerOrEqual => f.write_str("<="), - BinOp::GreaterOrEqual => f.write_str(">="), - BinOp::In => f.write_str("in"), - BinOp::NotIn => f.write_str("not in"), - BinOp::Substraction => f.write_str("-"), - BinOp::Addition => f.write_str("+"), - BinOp::Multiplication => f.write_str("*"), - BinOp::Percent => f.write_str("%"), - BinOp::Division => f.write_str("/"), - BinOp::FloorDivision => f.write_str("//"), - BinOp::Pipe => f.write_str("|"), - } - } -} - -impl Display for AugmentedAssignOp { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - AugmentedAssignOp::Increment => f.write_str(" += "), - AugmentedAssignOp::Decrement => f.write_str(" += "), - AugmentedAssignOp::Multiplier => f.write_str(" *= "), - AugmentedAssignOp::Divider => f.write_str(" /= "), - AugmentedAssignOp::FloorDivider => f.write_str(" //= "), - AugmentedAssignOp::Percent => f.write_str(" %= "), - } - } -} - -impl Display for Expr { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - Expr::Tuple(ref e) => { - f.write_str("(")?; - comma_separated_fmt(f, e, |x, f| write!(f, "{}", &x.node), true)?; - f.write_str(")") - } - Expr::Dot(ref e, ref s) => write!(f, "{}.{}", e.node, s.node), - Expr::Call(ref e, ref pos, ref named, ref args, ref kwargs) => { - write!(f, "{}(", e.node)?; - let mut first = true; - for a in pos { - if !first { - f.write_str(", ")?; - } - first = false; - a.node.fmt(f)?; - } - for &(ref k, ref v) in named { - if !first { - f.write_str(", ")?; - } - first = false; - write!(f, "{} = {}", k.node, v.node)?; - } - if let Some(ref x) = args { - if !first { - f.write_str(", ")?; - } - first = false; - write!(f, "*{}", x.node)?; - } - if let Some(ref x) = kwargs { - if !first { - f.write_str(", ")?; - } - write!(f, "**{}", x.node)?; - } - f.write_str(")") - } - Expr::ArrayIndirection(ref e, ref i) => write!(f, "{}[{}]", e.node, i.node), - Expr::Slice(ref e, ref i1, ref i2, ref i3) => { - write!(f, "{}[]", e.node)?; - if let Some(ref x) = i1 { - write!(f, "{}:", x.node)? - } else { - f.write_str(":")? - } - if let Some(ref x) = i2 { - x.node.fmt(f)? - } - if let Some(ref x) = i3 { - write!(f, ":{}", x.node)? - } - Ok(()) - } - Expr::Identifier(ref s) => s.node.fmt(f), - Expr::IntLiteral(ref i) => i.node.fmt(f), - Expr::Not(ref e) => write!(f, "(not {})", e.node), - Expr::UnOp(op, ref e) => write!(f, "{}{}", op, e.node), - Expr::And(ref l, ref r) => write!(f, "({} and {})", l.node, r.node), - Expr::Or(ref l, ref r) => write!(f, "({} or {})", l.node, r.node), - Expr::BinOp(ref op, ref l, ref r) => write!(f, "({} {} {})", l.node, op, r.node), - Expr::If(ref cond, ref v1, ref v2) => { - write!(f, "({} if {} else {})", v1.node, cond.node, v2.node) - } - Expr::List(ref v) => { - f.write_str("[")?; - comma_separated_fmt(f, v, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("]") - } - Expr::Set(ref v) => { - f.write_str("{")?; - comma_separated_fmt(f, v, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("}") - } - Expr::Dict(ref v) => { - f.write_str("{")?; - comma_separated_fmt(f, v, |x, f| write!(f, "{}: {}", x.0.node, x.1.node), false)?; - f.write_str("}") - } - Expr::ListComprehension(ref e, ref v) => { - write!(f, "[{}", e.node)?; - comma_separated_fmt(f, v, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("]") - } - Expr::SetComprehension(ref e, ref v) => { - write!(f, "{{{}", e.node)?; - comma_separated_fmt(f, v, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("}}") - } - Expr::DictComprehension((ref k, ref v), ref c) => { - write!(f, "{{{}: {}", k.node, v.node)?; - comma_separated_fmt(f, c, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("}}") - } - Expr::StringLiteral(ref s) => fmt_string_literal(f, s.node.as_str()), - } - } -} - -impl Display for AugmentedAssignTargetExpr { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - AugmentedAssignTargetExpr::Dot(object, field) => { - write!(f, "{}.{}", object.node, field.node) - } - AugmentedAssignTargetExpr::ArrayIndirection(array, index) => { - write!(f, "{}[{}]", array.node, index.node) - } - AugmentedAssignTargetExpr::Identifier(s) => s.node.fmt(f), - } - } -} - -impl Display for AssignTargetExpr { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - AssignTargetExpr::Dot(object, field) => write!(f, "{}.{}", object.node, field.node), - AssignTargetExpr::ArrayIndirection(array, index) => { - write!(f, "{}[{}]", array.node, index.node) - } - AssignTargetExpr::Identifier(s) => s.node.fmt(f), - AssignTargetExpr::Subtargets(subtargets) => { - write!(f, "[")?; - for (i, s) in subtargets.iter().enumerate() { - if i != 0 { - write!(f, ", ")?; - s.node.fmt(f)?; - } - } - write!(f, "]")?; - Ok(()) - } - } - } -} - -impl Display for Argument { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - Argument::Positional(ref s) => s.node.fmt(f), - Argument::Named(ref s, ref e) => write!(f, "{} = {}", s.node, e.node), - Argument::ArgsArray(ref s) => write!(f, "*{}", s.node), - Argument::KWArgsDict(ref s) => write!(f, "**{}", s.node), - } - } -} - -impl Display for Parameter { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - Parameter::Normal(ref s) => s.node.fmt(f), - Parameter::WithDefaultValue(ref s, ref e) => write!(f, "{} = {}", s.node, e.node), - Parameter::Args(ref s) => write!(f, "*{}", s.node), - Parameter::KWArgs(ref s) => write!(f, "**{}", s.node), - } - } -} - -impl Display for Clause { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match *self { - Clause::For(ref t, ref e) => write!(f, " for {} in {}", t.node, e.node), - Clause::If(ref t) => write!(f, " if {}", t.node), - } - } -} - -impl Statement { - fn fmt_with_tab(&self, f: &mut dyn fmt::Write, tab: &str) -> fmt::Result { - match *self { - Statement::Break => writeln!(f, "{}break", tab), - Statement::Continue => writeln!(f, "{}continue", tab), - Statement::Pass => writeln!(f, "{}pass", tab), - Statement::Return(Some(ref e)) => writeln!(f, "{}return {}", tab, e.node), - Statement::Return(None) => writeln!(f, "{}return", tab), - Statement::Expression(ref e) => writeln!(f, "{}{}", tab, e.node), - Statement::Assign(ref l, ref r) => writeln!(f, "{}{} = {}", tab, l.node, r.node), - Statement::AugmentedAssign(ref l, ref op, ref r) => { - writeln!(f, "{}{}{}{}", tab, l.node, op, r.node) - } - Statement::Statements(ref v) => { - for s in v { - s.node.fmt_with_tab(f, tab.clone())?; - } - Ok(()) - } - Statement::If(ref cond, ref suite) => { - writeln!(f, "{}if {}:", tab, cond.node)?; - suite.node.fmt_with_tab(f, &indent(tab)) - } - Statement::IfElse(ref cond, ref suite1, ref suite2) => { - writeln!(f, "{}if {}:", tab, cond.node)?; - suite1.node.fmt_with_tab(f, &indent(tab))?; - writeln!(f, "{}else:", tab)?; - suite2.node.fmt_with_tab(f, &indent(tab)) - } - Statement::For(ref bind, ref coll, ref suite) => { - writeln!(f, "{}for {} in {}:", tab, bind.node, coll.node)?; - suite.node.fmt_with_tab(f, &indent(tab)) - } - Statement::Def(ref name, ref params, ref suite) => { - write!(f, "{}def {}(", tab, name.node)?; - comma_separated_fmt(f, params, |x, f| write!(f, "{}", &x.node), false)?; - f.write_str("):\n")?; - suite.node.fmt_with_tab(f, &indent(tab)) - } - Statement::Load(ref filename, ref v) => { - write!(f, "{}load(", tab)?; - fmt_string_literal(f, filename.node.as_str())?; - comma_separated_fmt( - f, - v, - |x, f| { - write!(f, "{} = ", x.0.node)?; - fmt_string_literal(f, x.1.node.as_str()) - }, - false, - )?; - f.write_str(")\n") - } - } - } -} - -impl Display for Statement { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - self.fmt_with_tab(f, "") - } -} diff --git a/starlark/src/syntax/dialect.rs b/starlark/src/syntax/dialect.rs deleted file mode 100644 index c0769ce6..00000000 --- a/starlark/src/syntax/dialect.rs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/// Starlark language dialect. -#[derive(Copy, Debug, Clone)] -pub enum Dialect { - // Build file dialect which is used to interpret Bazel's BUILD files - Build, - // Full Starlark language that is available in Bazel's .bzl files - Bzl, -} diff --git a/starlark/src/syntax/errors.rs b/starlark/src/syntax/errors.rs deleted file mode 100644 index de9210b0..00000000 --- a/starlark/src/syntax/errors.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Syntax error trait definition -//! -//! This module define the SyntaxError trait that all error that we pass around implements. It -//! identify all error by a unique code to identify easily an error even if changing the error -//! message. -//! -//! An error code is of the form AB00 where A is the error code level and B a module specific -//! prefix. 00 is the error number itself. -//! -//! # Error code levels: -//! -//! * __C__ -> _Critical / Fatal_: stopped the parsing / evaluation -//! * __E__ -> _Error_: was able to recover from the error but the state is incorrect. (e.g. a -//! token was ignored during parsing). Prevent next phase from being run. -//! * __W__ -> _Warning_: a warning for the user, this did not cause any error but are bad -//! patterns that can lead to dubious behaviors. -//! * __N__ -> _Notice_: notice of harmless improvement (e.g. dead code). -//! -//! # Modules prefix: -//! -//! * __L__ -> Lexer -//! * __P__ -> Parsing error -//! * __S__ -> Syntaxic error -//! * __E__ -> Evaluation - -use codemap::Span; -use codemap_diagnostic::Diagnostic; - -/// The trait that all syntax error / error linked to a location in the code must implement. -pub(crate) trait SyntaxError { - /// Convert the error to a codemap diagnostic. - /// - /// To build this diagnostic, the method needs the file span corresponding to the parsed - /// file. - fn to_diagnostic(self, file_span: Span) -> Diagnostic; -} diff --git a/starlark/src/syntax/fmt.rs b/starlark/src/syntax/fmt.rs deleted file mode 100644 index cf33bf30..00000000 --- a/starlark/src/syntax/fmt.rs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! AST printing utilities. - -use std::fmt; - -const TAB: &str = " "; - -/// Increment indent -pub(crate) fn indent(indent: &str) -> String { - format!("{}{}", TAB, indent) -} - -pub(crate) fn fmt_string_literal(f: &mut dyn fmt::Write, s: &str) -> fmt::Result { - f.write_str("\"")?; - for c in s.chars() { - match c { - '\n' => f.write_str("\\n")?, - '\t' => f.write_str("\\t")?, - '\r' => f.write_str("\\r")?, - '\0' => f.write_str("\\0")?, - '"' => f.write_str("\\\"")?, - '\\' => f.write_str("\\\\")?, - x => f.write_str(&x.to_string())?, - } - } - f.write_str("\"") -} - -pub(crate) fn comma_separated_fmt( - f: &mut dyn fmt::Write, - v: &[I], - converter: F, - for_tuple: bool, -) -> fmt::Result -where - F: Fn(&I, &mut dyn fmt::Write) -> fmt::Result, -{ - for (i, e) in v.iter().enumerate() { - f.write_str(if i == 0 { "" } else { ", " })?; - converter(e, f)?; - } - if v.len() == 1 && for_tuple { - f.write_str(",")?; - } - Ok(()) -} diff --git a/starlark/src/syntax/grammar.lalrpop b/starlark/src/syntax/grammar.lalrpop deleted file mode 100644 index 1baf0637..00000000 --- a/starlark/src/syntax/grammar.lalrpop +++ /dev/null @@ -1,416 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use codemap::Span; -use super::lexer; -use super::ast::*; - -grammar<'input>(text: &'input str, file_span: Span); - -#[inline] -ASTS: AstStatement = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -ASTE: AstExpr = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -ASTP: AstParameter = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -ASTA: AstArgument = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -ASTC: AstClause = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -integer: AstInt = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -string: AstString = - => e.to_ast(file_span.subspan(l, r)); - -#[inline] -identifier: AstString = - => e.to_ast(file_span.subspan(l, r)); - -COMMA: Vec = - ",")*> - => v0.into_iter().chain(e1).collect(); - -pub Starlark: AstStatement = ASTS; -starlark_: Statement = "\n"* <( "\n"*)*> - => Statement::Statements(<>); - -pub BuildFile: AstStatement = ASTS; -build_file_: Statement = "\n"* <( "\n"+)*> - => Statement::Statements(<>); - -TopStmt: AstStatement = { DefStmt, SimpleStmt }; -BuildTopStmt: AstStatement = { AssignStmt, AugmentedAssignStmt, ExprStmt, LoadStmt }; - -DefStmt: AstStatement = ASTS; -DefStmt_: Statement = - "def" "(" > ")" ":" - =>? Statement::check_def(<>); - -Parameter: AstParameter = ASTP; -Parameter_: Parameter = { - "=" => Parameter::WithDefaultValue(<>), - => Parameter::Normal(<>), - "*" => Parameter::Args(<>), - "**" => Parameter::KWArgs(<>), -}; - -Suite: AstStatement = { - SimpleStmt, - "\n"+ "INDENT" "\n"* "\n"*)+> "DEDENT" - => Statement::Statements(v).to_ast(file_span.subspan(l, r)) -}; - -Stmt: AstStatement = { IfStmt, ForStmt, SimpleStmt }; - -IfBody: AstStatement = ASTS; -IfBody_: Statement = ":" => { - match el { - None => Statement::If(c, s), - Some(e) => Statement::IfElse(c, s, e) - } -}; - -IfStmt: AstStatement = "if" ; -ElseStmt: AstStatement = { - "elif" , - "else" ":" -}; - -ForStmt: AstStatement = ASTS; -ForStmt_: Statement = "for" "in" ":" - =>? Ok(Statement::For(AssignTargetExpr::from_expr(e)?, c, s)); - -SimpleStmt: AstStatement = - )*> ";"? "\n" => { - if v.is_empty() { - e - } else { - Statement::Statements( - vec![e].into_iter().chain(v.into_iter()).collect()) - .to_ast(file_span.subspan(l, r)) - } - }; - -SmallStmt: AstStatement = { - "return" - => Statement::Return(e).to_ast(file_span.subspan(l, r)), - <@L> "break" <@R> - => Statement::Break.to_ast(file_span.subspan(<>)), - <@L> "continue" <@R> - => Statement::Continue.to_ast(file_span.subspan(<>)), - <@L> "pass" <@R> - => Statement::Pass.to_ast(file_span.subspan(<>)), - AssignStmt, - AugmentedAssignStmt, - ExprStmt -}; - -AssignStmt: AstStatement = ASTS; -AssignStmt_: Statement = "=" - =>? Ok(Statement::Assign(AssignTargetExpr::from_expr(t)?, e)); - -AugmentedAssignOp: AugmentedAssignOp = { - "+=" => AugmentedAssignOp::Increment, - "-=" => AugmentedAssignOp::Decrement, - "*=" => AugmentedAssignOp::Multiplier, - "/=" => AugmentedAssignOp::Divider, - "//=" => AugmentedAssignOp::FloorDivider, - "%=" => AugmentedAssignOp::Percent, -}; - -AugmentedAssignStmt: AstStatement = ASTS; -AugmentedAssignStmt_: Statement = - =>? Ok(Statement::AugmentedAssign(AugmentedAssignTargetExpr::from_expr(lhs)?, op, rhs)); - -// In python ExprStmt is an AssignStmt ( -// https://docs.python.org/3/reference/grammar.html). This ExprStmt is -// according to the spec provided on https://github.com/google/skylark. It -// enable parsing docstring and method calls. -ExprStmt: AstStatement = ASTS; -ExprStmt_: Statement = => Statement::Expression(<>); - -LoadStmt: AstStatement = ASTS; -LoadStmt_: Statement = "load" "(" <("," )+> ","? ")" - => Statement::Load(<>); - -LoadStmtBindingName: AstString = "="; - -LoadStmtSyms: (AstString, AstString) = - => (id.unwrap_or(n.clone()), n); - -// Expression -L: AstExpr = ",")*> - => { - if f.is_some() || !v.is_empty() { - Expr::Tuple(v.into_iter().chain(vec![e].into_iter()).collect()) - .to_ast(file_span.subspan(l, r)) - } else { - e - } - }; - -ExprList: AstExpr = L; - -TestList: AstExpr = L; - -PipedExpr: AstExpr = { - "|" - => Expr::BinOp(BinOp::Pipe, e1, e2).to_ast(file_span.subspan(l, r)), - ArithExpr -}; - -PrimaryExpr: AstExpr = { - "." - => Expr::Dot(e, i).to_ast(file_span.subspan(l, r)), - "(" > ")" - =>? Ok(Expr::check_call(e, a)?.to_ast(file_span.subspan(l, r))), - "[" ":" )?> "]" - => { - Expr::Slice(e, i1, i2, i3.unwrap_or(None)) - .to_ast(file_span.subspan(l, r)) - }, - "[" "]" - => Expr::ArrayIndirection(e, i).to_ast(file_span.subspan(l, r)), - Operand -}; - -OptionalSlice: AstExpr = ":" ; - -// Note that the order of arguments (args, named, *args, **kwargs) is enforced -// at the syntax evaluation, not by the Grammar. -Argument: AstArgument = ASTA; -Argument_: Argument = { - => Argument::Positional(<>), - "=" => Argument::Named(<>), - "*" => Argument::ArgsArray(<>), - "**" => Argument::KWArgsDict(<>) -}; - -Operand: AstExpr = { - - => Expr::Identifier(i).to_ast(file_span.subspan(l, r)), - - => Expr::IntLiteral(i).to_ast(file_span.subspan(l, r)), - - => Expr::StringLiteral(s).to_ast(file_span.subspan(l, r)), - "[" > "]" - => Expr::List(e).to_ast(file_span.subspan(l, r)), - ListComp, - "{" > "}" - => Expr::Dict(e).to_ast(file_span.subspan(l, r)), - "{" "}" => Expr::Set(vec![e]).to_ast(file_span.subspan(l, r)), - SetComp, - // Must contain at least one element - {} is an empty dict not set. - "{" "," > "}" - => { - let mut es = es; - es.insert(0, e1); - Expr::Set(es).to_ast(file_span.subspan(l, r)) - }, - DictComp, - "(" ")" - => match e { - Some(t) => t, - None => Expr::Tuple(vec![]).to_ast(file_span.subspan(l, r)) - } -}; - -DictEntry: (AstExpr, AstExpr) = ":" => (<>); - -ListComp: AstExpr = ASTE; -ListComp_: Expr = "[" "]" - => Expr::ListComprehension(<>); - -SetComp: AstExpr = ASTE; -SetComp_: Expr = "{" "}" - => Expr::SetComprehension(<>); - -DictComp: AstExpr = ASTE; -DictComp_: Expr = "{" "}" - => Expr::DictComprehension(<>); - -CompClause: Vec = - => vec![e].into_iter().chain(v.into_iter()).collect(); - -CompIter = {ForInClause, IfClause}; - -ForInClause: AstClause = ASTC; -ForInClause_: Clause = "for" "in" - =>? Ok(Clause::For(AssignTargetExpr::from_expr(var)?, iter)); -IfClause: AstClause = ASTC; -IfClause_: Clause = "if" - => Clause::If(<>); - -// Base expression. Priorities are taken from Python 3 grammar. -Test = IfTest; - -IfTest: AstExpr = { - "if" "else" - => Expr::If(t, e1, e2).to_ast(file_span.subspan(l, r)), - OrTest -}; - -// Binary operators -OrTest: AstExpr = { - "or" - => Expr::Or(e1, e2).to_ast(file_span.subspan(l, r)), - AndTest, -}; - -AndTest: AstExpr = { - "and" - => Expr::And(e1, e2).to_ast(file_span.subspan(l, r)), - NotTest, -}; - -NotTest: AstExpr = { - "not" - => Expr::Not(e).to_ast(file_span.subspan(l, r)), - CompTest, -}; - -CompTest: AstExpr = { - "==" - => Expr::BinOp(BinOp::EqualsTo, e1, e2).to_ast(file_span.subspan(l, r)), - "!=" - => Expr::BinOp(BinOp::Different, e1, e2).to_ast(file_span.subspan(l, r)), - "<" - => Expr::BinOp(BinOp::LowerThan, e1, e2).to_ast(file_span.subspan(l, r)), - ">" - => Expr::BinOp(BinOp::GreaterThan, e1, e2).to_ast(file_span.subspan(l, r)), - "<=" - => Expr::BinOp(BinOp::LowerOrEqual, e1, e2).to_ast(file_span.subspan(l, r)), - ">=" - => Expr::BinOp(BinOp::GreaterOrEqual, e1, e2) - .to_ast(file_span.subspan(l, r)), - "in" - => Expr::BinOp(BinOp::In, e1, e2).to_ast(file_span.subspan(l, r)), - "not in" - => Expr::BinOp(BinOp::NotIn, e1, e2).to_ast(file_span.subspan(l, r)), - Expr -}; - -Expr: AstExpr = { - "|" - => Expr::BinOp(BinOp::Pipe, e1, e2).to_ast(file_span.subspan(l, r)), - ArithExpr, -}; - -ArithExpr: AstExpr = { - "+" - => Expr::BinOp(BinOp::Addition, e1, e2).to_ast(file_span.subspan(l, r)), - "-" - => Expr::BinOp(BinOp::Substraction, e1, e2).to_ast(file_span.subspan(l, r)), - ProductExpr, -}; - -ProductExpr: AstExpr = { - "*" - => Expr::BinOp(BinOp::Multiplication, e1, e2) - .to_ast(file_span.subspan(l, r)), - "%" - => Expr::BinOp(BinOp::Percent, e1, e2).to_ast(file_span.subspan(l, r)), - "/" - => Expr::BinOp(BinOp::Division, e1, e2).to_ast(file_span.subspan(l, r)), - "//" - => Expr::BinOp(BinOp::FloorDivision, e1, e2).to_ast(file_span.subspan(l, r)), - FactorExpr -}; - -FactorExpr: AstExpr = { - "+" - => Expr::UnOp(UnOp::Plus, e).to_ast(file_span.subspan(l, r)), - "-" - => Expr::UnOp(UnOp::Minus, e).to_ast(file_span.subspan(l, r)), - PrimaryExpr -}; - -extern { - type Location = u64; - type Error = lexer::LexerError; - - enum lexer::Token { - "INDENT" => lexer::Token::Indent, - "DEDENT" => lexer::Token::Dedent, - "\n" => lexer::Token::Newline, - // Keywords - "and" => lexer::Token::And, - "else" => lexer::Token::Else, - "load" => lexer::Token::Load, - "break" => lexer::Token::Break, - "for" => lexer::Token::For, - "not" => lexer::Token::Not, - "not in" => lexer::Token::NotIn, - "continue" => lexer::Token::Continue, - "if" => lexer::Token::If, - "or" => lexer::Token::Or, - "def" => lexer::Token::Def, - "in" => lexer::Token::In, - "pass" => lexer::Token::Pass, - "elif" => lexer::Token::Elif, - "return" => lexer::Token::Return, - // Symbols - "," => lexer::Token::Comma, - ";" => lexer::Token::Semicolon, - ":" => lexer::Token::Colon, - "+=" => lexer::Token::PlusEqual, - "-=" => lexer::Token::MinusEqual, - "*=" => lexer::Token::StarEqual, - "/=" => lexer::Token::SlashEqual, - "//=" => lexer::Token::DoubleSlashEqual, - "%=" => lexer::Token::PercentEqual, - "==" => lexer::Token::DoubleEqual, - "!=" => lexer::Token::BangEqual, - "<=" => lexer::Token::LowerEqual, - ">=" => lexer::Token::GreaterEqual, - "**" => lexer::Token::Doublestar, - "=" => lexer::Token::Equal, - "<" => lexer::Token::LowerThan, - ">" => lexer::Token::GreaterThan, - "-" => lexer::Token::Minus, - "+" => lexer::Token::Plus, - "*" => lexer::Token::Star, - "%" => lexer::Token::Percent, - "/" => lexer::Token::Slash, - "//" => lexer::Token::DoubleSlash, - "." => lexer::Token::Dot, - "|" => lexer::Token::Pipe, - // Brackets - "[" => lexer::Token::OpeningBracket, - "{" => lexer::Token::OpeningCurlyBracket, - "(" => lexer::Token::OpeningParenthesis, - "]" => lexer::Token::ClosingBracket, - "}" => lexer::Token::ClosingCurlyBracket, - ")" => lexer::Token::ClosingParenthesis, - - - "RESERVED" => lexer::Token::Reserved(), - "IDENTIFIER" => lexer::Token::Identifier(), - "INTEGER" => lexer::Token::IntegerLiteral(), - "STRING" => lexer::Token::StringLiteral() - } -} diff --git a/starlark/src/syntax/grammar_tests.rs b/starlark/src/syntax/grammar_tests.rs deleted file mode 100644 index 39573e03..00000000 --- a/starlark/src/syntax/grammar_tests.rs +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#![cfg(test)] - -use crate::syntax::ast::Statement; -use crate::syntax::dialect::Dialect; -use crate::syntax::errors::SyntaxError; -use crate::syntax::grammar::StarlarkParser; -use crate::syntax::parser::parse_file; -use codemap; -use codemap_diagnostic; -use std::fs; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -macro_rules! unwrap_parse { - ($e: expr) => {{ - let lexer = super::lexer::Lexer::new($e); - let mut codemap = codemap::CodeMap::new(); - let filespan = codemap.add_file("".to_owned(), $e.to_string()).span; - match StarlarkParser::new().parse($e, filespan, lexer) { - Ok(x) => match x.node { - Statement::Statements(bv) => format!("{}", Statement::Statements(bv)), - y => panic!("Expected statements, got {:?}", y), - }, - Err(e) => { - let codemap = Arc::new(Mutex::new(codemap)); - let d = [e.to_diagnostic(filespan)]; - assert_diagnostics!(d, codemap); - panic!("Got errors!"); - } - } - }}; -} - -#[test] -fn test_empty() { - assert!(unwrap_parse!("\n").is_empty()); -} - -#[test] -fn test_top_level_comment() { - assert!(unwrap_parse!("# Test").is_empty()); -} - -#[test] -fn test_top_level_load() { - assert!(!unwrap_parse!("\nload(\"//top/level/load.bzl\", \"top-level\")\n").is_empty()); - assert!(!unwrap_parse!("\nload(\"//top/level/load.bzl\", \"top-level\")").is_empty()); - assert!( - !unwrap_parse!("\nload(\n \"//top/level/load.bzl\",\n \"top-level\",\n)\n").is_empty() - ); -} - -#[test] -fn test_top_level_assignation() { - assert!(!unwrap_parse!("\n_ASSIGNATION = 'top-level'\n").is_empty()); -} - -#[test] -fn test_top_level_docstring() { - assert!(!unwrap_parse!("\n\"\"\"Top-level docstring\"\"\"\n").is_empty()); -} - -#[test] -fn test_top_level_def() { - assert_eq!( - unwrap_parse!("def toto():\n pass\n"), - "def toto():\n pass\n" - ); - // no new line at end of file - assert_eq!( - unwrap_parse!("def toto():\n pass"), - "def toto():\n pass\n" - ); - assert_eq!( - unwrap_parse!("def toto():\n pass\ndef titi(): return 1"), - "def toto():\n pass\ndef titi():\n return 1\n" - ); - assert_eq!( - unwrap_parse!("def toto():\n pass\n\ndef titi(): return 1"), - "def toto():\n pass\ndef titi():\n return 1\n" - ); - assert_eq!(unwrap_parse!("def t():\n\n pass"), "def t():\n pass\n"); -} - -#[test] -fn test_top_level_def_with_docstring() { - assert_eq!( - unwrap_parse!( - "\"\"\"Top-level docstring\"\"\" - -def toto(): - pass -" - ), - "\"Top-level docstring\"\ndef toto():\n pass\n" - ); -} - -#[test] -fn test_ifelse() { - assert_eq!( - unwrap_parse!("def d():\n if True:\n a\n else:\n b"), - "def d():\n if True:\n a\n else:\n b\n" - ); -} - -#[test] -fn test_kwargs_passing() { - assert_eq!( - unwrap_parse!("f(x, *a, **b); f(x, *a, **{a:b}); f(x, *[a], **b)"), - "f(x, *a, **b)\nf(x, *a, **{a: b})\nf(x, *[a], **b)\n" - ); -} - -#[test] -fn test_unary_op() { - assert_eq!(unwrap_parse!("a = -1"), "a = -1\n"); - assert_eq!(unwrap_parse!("a = +1"), "a = +1\n"); - assert_eq!(unwrap_parse!("a = -a"), "a = -a\n"); - assert_eq!(unwrap_parse!("a = +a"), "a = +a\n"); -} - -#[test] -fn test_tuples() { - assert_eq!(unwrap_parse!("a = (-1)"), "a = -1\n"); // Not a tuple - assert_eq!(unwrap_parse!("a = (+1,)"), "a = (+1,)\n"); // But this is one - assert_eq!(unwrap_parse!("a = ()"), "a = ()\n"); -} - -#[test] -fn test_return() { - assert_eq!( - unwrap_parse!("def fn(): return 1"), - "def fn():\n return 1\n" - ); - assert_eq!( - unwrap_parse!("def fn(): return a()"), - "def fn():\n return a()\n" - ); - assert_eq!(unwrap_parse!("def fn(): return"), "def fn():\n return\n"); -} - -// Regression test for https://github.com/google/starlark-rust/issues/44. -#[test] -fn test_optional_whitespace() { - assert_eq!(unwrap_parse!("6 or()"), "(6 or ())\n"); - assert_eq!(unwrap_parse!("6or()"), "(6 or ())\n"); -} - -// Regression test for https://github.com/google/starlark-rust/issues/56. -#[test] -fn test_optional_whitespace_after_0() { - assert_eq!(unwrap_parse!("0in[1,2,3]"), "(0 in [1, 2, 3])\n"); -} - -#[test] -fn test_fncall_span() { - let content = r#"def fn(a): - fail(a) - -fn(1) - -fail(2) -"#; - let lexer = super::lexer::Lexer::new(content); - let mut codemap = codemap::CodeMap::new(); - let filespan = codemap - .add_file("".to_owned(), content.to_string()) - .span; - match StarlarkParser::new().parse(content, filespan, lexer) { - Ok(x) => match x.node { - Statement::Statements(bv) => { - let lines: Vec = bv - .iter() - .map(|x| codemap.look_up_pos(x.span.low()).position.line) - .collect(); - assert_eq!(lines, vec![0, 3, 5]) - } - y => panic!("Expected statements, got {:?}", y), - }, - Err(e) => { - let codemap = Arc::new(Mutex::new(codemap)); - let d = [e.to_diagnostic(filespan)]; - assert_diagnostics!(d, codemap); - panic!("Got errors!"); - } - } -} - -#[test] -fn augmented_assignment_incorrect_target() { - let program = "[] += 1"; - let lexer = super::lexer::Lexer::new(program); - let mut codemap = codemap::CodeMap::new(); - let filespan = codemap - .add_file("".to_owned(), program.to_owned()) - .span; - match StarlarkParser::new().parse(program, filespan, lexer) { - Ok(..) => panic!("expecting error"), - Err(e) => { - assert!(format!("{:?}", e).contains("incorrect augmented assignment target")); - } - }; -} - -#[test] -fn smoke_test() { - let map = Arc::new(Mutex::new(codemap::CodeMap::new())); - let mut diagnostics = Vec::new(); - let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - d.push("src/syntax/testcases"); - let paths = fs::read_dir(d.as_path()).unwrap(); - for p in paths { - let path_entry = p.unwrap().path(); - let path = path_entry.to_str().unwrap(); - if path.ends_with(".bzl") { - if let Result::Err(err) = parse_file(&map, path, Dialect::Bzl) { - diagnostics.push(err); - } - } - } - assert_diagnostics!(diagnostics, map); -} diff --git a/starlark/src/syntax/lexer.rs b/starlark/src/syntax/lexer.rs deleted file mode 100644 index 1906fb8b..00000000 --- a/starlark/src/syntax/lexer.rs +++ /dev/null @@ -1,1532 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use super::errors::SyntaxError; -use codemap::Span; -use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle}; -use std::char; -use std::collections::linked_list::IntoIter; -use std::collections::LinkedList; -use std::fmt; - -// TODO: move that code in some common error code list? -// CL prefix = Critical Lexing -const LEX_ERROR_CODE: &str = "CL00"; -const INDENT_ERROR_CODE: &str = "CL01"; -const UNFINISHED_STRING_LITERAL_CODE: &str = "CL02"; -const INVALID_ESCAPE_SEQUENCE_CODE: &str = "CL03"; - -/// Errors that can be generated during lexing -#[doc(hidden)] -#[derive(Debug, Clone, PartialEq, Copy)] -pub enum LexerError { - Indentation(u64, u64), - InvalidCharacter(u64), - UnfinishedStringLiteral(u64, u64), - InvalidEscapeSequence(u64, u64), - WrappedError { - span: Span, - code: &'static str, - label: &'static str, - }, -} - -impl SyntaxError for LexerError { - /// Convert the error to a codemap diagnostic. - /// - /// To build this diagnostic, the method needs the file span corresponding to the parsed - /// file. - fn to_diagnostic(self, file_span: Span) -> Diagnostic { - let sl = SpanLabel { - span: match self { - LexerError::Indentation(x, y) - | LexerError::UnfinishedStringLiteral(x, y) - | LexerError::InvalidEscapeSequence(x, y) => file_span.subspan(x, y), - LexerError::InvalidCharacter(x) => file_span.subspan(x, x), - LexerError::WrappedError { span, .. } => span, - }, - style: SpanStyle::Primary, - label: Some( - match self { - LexerError::Indentation(..) => "Incorrect indentation", - LexerError::InvalidCharacter(..) => "Character not valid at present location", - LexerError::UnfinishedStringLiteral(..) => "Unfinished string literal", - LexerError::InvalidEscapeSequence(..) => "Invalid string escape sequence", - LexerError::WrappedError { label, .. } => label, - } - .to_owned(), - ), - }; - Diagnostic { - level: Level::Error, - message: "Parse error".to_owned(), - code: Some( - match self { - LexerError::Indentation(..) => INDENT_ERROR_CODE, - LexerError::InvalidCharacter(..) => LEX_ERROR_CODE, - LexerError::UnfinishedStringLiteral(..) => UNFINISHED_STRING_LITERAL_CODE, - LexerError::InvalidEscapeSequence(..) => INVALID_ESCAPE_SEQUENCE_CODE, - LexerError::WrappedError { code, .. } => code, - } - .to_owned(), - ), - spans: vec![sl], - } - } -} - -/// All token that can be generated by the lexer -#[doc(hidden)] -#[derive(Debug, Clone, PartialEq)] -pub enum Token { - // Indentation block & meaningfull spaces - Indent, // New indentation block - Dedent, // Leaving an indentation block - Newline, // Newline outside a string - // Keywords - And, // "and" keyword - Else, // "else" keyword - Load, // "load" keyword - Break, // "break" keyword - For, // "for" keyword - Not, // "not" keyword - NotIn, // "not in" keyword (taken as keyword) - Continue, // "continue" keyword - If, // "if" keyword - Or, // "or" keyword - Def, // "def" keyword - In, // "in" keyword - Pass, // "pass" keyword - Elif, // "elif" keyword - Return, // "return" keyword - // Symbols - Comma, // ',' - Semicolon, // ';' - Colon, // ':' - PlusEqual, // '+=' - MinusEqual, // '-=' - StarEqual, // '*=' - SlashEqual, // '/=' - DoubleSlashEqual, // '//=' - PercentEqual, // '%=' - DoubleEqual, // '==' - BangEqual, // '!=' - LowerEqual, // '<=' - GreaterEqual, // '>=' - Doublestar, // '**' - Equal, // '=' - LowerThan, // '<' - GreaterThan, // '>' - Minus, // '-' - Plus, // '+' - Star, // '*' - Percent, // '%' - Slash, // '/' - DoubleSlash, // '//' - Dot, // '.' - Pipe, // '|' - // Brackets - OpeningBracket, // '[' - OpeningCurlyBracket, // '{' - OpeningParenthesis, // '(' - ClosingBracket, // ']' - ClosingCurlyBracket, // '}' - ClosingParenthesis, // ')' - - Reserved(String), // One of the reserved keywords - Identifier(String), // An identifier - IntegerLiteral(i64), // An integer literal (123, 0x1, 0b1011, 0755, ...) - StringLiteral(String), // A string literal -} - -impl fmt::Display for Token { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Token::Indent => write!(f, "new indentation block"), - Token::Dedent => write!(f, "end of indentation block"), - Token::Newline => write!(f, "new line"), - Token::And => write!(f, "keyword 'and'"), - Token::Else => write!(f, "keyword 'else'"), - Token::Load => write!(f, "keyword 'load'"), - Token::Break => write!(f, "keyword 'break'"), - Token::For => write!(f, "keyword 'for'"), - Token::Not => write!(f, "keyword 'not'"), - Token::NotIn => write!(f, "keyword 'not in'"), - Token::Continue => write!(f, "keyword 'continue'"), - Token::If => write!(f, "keyword 'if'"), - Token::Or => write!(f, "keyword 'or'"), - Token::Def => write!(f, "keyword 'def'"), - Token::In => write!(f, "keyword 'in'"), - Token::Pass => write!(f, "keyword 'pass'"), - Token::Elif => write!(f, "keyword 'elif'"), - Token::Return => write!(f, "keyword 'return'"), - Token::Comma => write!(f, "symbol ','"), - Token::Semicolon => write!(f, "symbol ';'"), - Token::Colon => write!(f, "symbol ':'"), - Token::PlusEqual => write!(f, "symbol '+='"), - Token::MinusEqual => write!(f, "symbol '-='"), - Token::StarEqual => write!(f, "symbol '*='"), - Token::SlashEqual => write!(f, "symbol '/='"), - Token::DoubleSlashEqual => write!(f, "symbol '//='"), - Token::PercentEqual => write!(f, "symbol '%='"), - Token::DoubleEqual => write!(f, "symbol '=='"), - Token::BangEqual => write!(f, "symbol '!='"), - Token::LowerEqual => write!(f, "symbol '<='"), - Token::GreaterEqual => write!(f, "symbol '>='"), - Token::Doublestar => write!(f, "symbol '**'"), - Token::Equal => write!(f, "symbol '='"), - Token::LowerThan => write!(f, "symbol '<'"), - Token::GreaterThan => write!(f, "symbol '>'"), - Token::Minus => write!(f, "symbol '-'"), - Token::Plus => write!(f, "symbol '+'"), - Token::Star => write!(f, "symbol '*'"), - Token::Percent => write!(f, "symbol '%'"), - Token::Slash => write!(f, "symbol '/'"), - Token::DoubleSlash => write!(f, "symbol '//'"), - Token::Dot => write!(f, "symbol '.'"), - Token::Pipe => write!(f, "symbol '|'"), - Token::OpeningBracket => write!(f, "symbol '['"), - Token::OpeningCurlyBracket => write!(f, "symbol '{{'"), - Token::OpeningParenthesis => write!(f, "symbol '('"), - Token::ClosingBracket => write!(f, "symbol ']'"), - Token::ClosingCurlyBracket => write!(f, "symbol '}}'"), - Token::ClosingParenthesis => write!(f, "symbol ')'"), - Token::Reserved(ref s) => write!(f, "reserved keyword '{}'", s), - Token::Identifier(ref s) => write!(f, "identifier '{}'", s), - Token::IntegerLiteral(ref i) => write!(f, "integer literal '{}'", i), - Token::StringLiteral(ref s) => write!(f, "string literal '{}'", s), - } - } -} - -#[doc(hidden)] -pub type LexerItem = Result<(u64, Token, u64), LexerError>; -#[doc(hidden)] -pub trait LexerIntoIter>: - IntoIterator -{ -} -impl, T2: IntoIterator> - LexerIntoIter for T2 -{ -} - -/// An iterator over a string slice that convert it to a list of token, i.e. the lexer. -#[derive(Debug)] -#[doc(hidden)] -pub struct Lexer { - input: String, - /// Byte offset of the next char in `input` - pos_bytes: usize, - offset: u64, - process_end_of_file: bool, - last_new_line: bool, - last_pos: u64, - last_next: Option<(u64, char)>, - indentation_stack: LinkedList, - parentheses: i32, - backlog: LinkedList, -} - -/// An iterator that buffer a Lexer in order to wait for end of block / parentheses. -/// Two consecutive new lines are considered also the end of input to buffer. -#[doc(hidden)] -pub struct BufferedLexer { - backlog: LinkedList, - lexer: Lexer, - last_colon: bool, -} - -impl BufferedLexer { - pub fn new(input: &str) -> Self { - let mut r = BufferedLexer { - backlog: LinkedList::new(), - lexer: Lexer::new(input), - last_colon: false, - }; - r.lexer.process_eof(false); - r.consume(); - r - } - - fn consume(&mut self) { - loop { - match self.lexer.next() { - Some(Ok((i, Token::Colon, j))) => { - self.last_colon = true; - self.backlog.push_back(Ok((i, Token::Colon, j))); - } - Some(Ok((i, Token::Newline, j))) => { - self.backlog.push_back(Ok((i, Token::Newline, j))); - } - Some(x) => { - self.last_colon = false; - self.backlog.push_back(x); - } - None => return, - } - } - } - - pub fn need_more(&self) -> bool { - self.last_colon || !self.lexer.indentation_stack.is_empty() || self.lexer.parentheses > 0 - } - - pub fn input(&mut self, input: &str) { - if input.is_empty() || (input.len() == 1 && Lexer::is_nl(input.chars().next().unwrap())) { - self.lexer.process_eof(true); - } - self.lexer.replace_input(input); - self.consume(); - } -} - -impl IntoIterator for BufferedLexer { - type Item = LexerItem; - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.backlog.into_iter() - } -} - -impl Lexer { - /// Create a new lexer from a string slice - pub fn new(input: &str) -> Self { - let input = input.to_owned(); - Lexer { - input, - pos_bytes: 0, - offset: 0, - process_end_of_file: true, - last_new_line: true, - last_pos: 0, - last_next: None, - indentation_stack: LinkedList::new(), - parentheses: 0, - backlog: LinkedList::new(), - } - } - - /// Mark this Lexer to process or not the end of iterator as end of file - fn process_eof(&mut self, process: bool) { - self.process_end_of_file = process - } - - /// Replace the input by a new one, useful in interactive mode. - fn replace_input(&mut self, input: &str) { - self.offset = if let Some((p, _)) = self.peek() { - p - } else if let Some((i, c)) = self.last_next { - i + (c.len_utf8() as u64) - } else { - self.last_pos - }; - assert!(self.offset >= self.last_pos); - self.input = input.to_owned(); - self.pos_bytes = 0; - } - - /// Enqueue a - fn is_nl(c: char) -> bool { - match c { - '\n' | '\r' | '\u{2028}' | '\u{2029}' => true, - _ => false, - } - } - - fn peek(&mut self) -> Option<(u64, char)> { - match self.input[self.pos_bytes..].chars().next() { - Some(c) => Some((self.pos_bytes as u64 + self.offset, c)), - None => None, - } - } - - fn pop(&mut self) -> Option<(u64, char)> { - let mut char_indices = self.input[self.pos_bytes..].char_indices(); - self.last_next = match char_indices.next() { - Some((_, c)) => { - let pos = self.pos_bytes; - self.pos_bytes = match char_indices.next() { - Some((len, _)) => self.pos_bytes + len, - None => self.input.len(), - }; - self.last_new_line = Lexer::is_nl(c); - Some((pos as u64 + self.offset, c)) - } - None => { - self.last_new_line = false; - None - } - }; - self.last_next - } - - fn terminate(&mut self) { - self.pos_bytes = self.input.len(); - self.indentation_stack.clear(); - self.parentheses = 0; - } - - fn next_char(&mut self) -> char { - self.pop().unwrap_or((0, '\0')).1 - } - - fn peek_char(&mut self) -> char { - self.peek().unwrap_or((0, '\0')).1 - } - - fn return_none(&mut self) -> Option<::Item> { - // Emit a newline and N DEDENT at EOF - let p = self.end_pos(); - if !self.last_new_line { - self.last_new_line = true; - Some(Ok((p.1, Token::Newline, p.1))) - } else if self.ihead() > 0 && self.process_end_of_file { - self.indentation_stack.pop_front(); - Some(Ok((p.1, Token::Dedent, p.1))) - } else { - None - } - } - - fn ihead(&self) -> u32 { - if self.indentation_stack.is_empty() { - 0 - } else { - *self.indentation_stack.front().unwrap() - } - } - - fn begin(&mut self) { - if let Some((i, ..)) = self.peek() { - self.last_pos = i; - } - } - - fn end_pos(&mut self) -> (u64, u64) { - if let Some((end, ..)) = self.peek() { - (self.last_pos, end) - } else if let Some((i, c)) = self.last_next { - (self.last_pos, i + (c.len_utf8() as u64)) - } else { - (self.last_pos, self.last_pos) - } - } - - fn end(&mut self, res: Token) -> Option<::Item> { - let p = self.end_pos(); - assert!(p.0 <= p.1, "{} > {}", p.0, p.1); - Some(Ok((p.0, res, p.1))) - } - - fn consume(&mut self, res: Token) -> Option<::Item> { - self.pop(); - self.end(res) - } - - fn invalid(&mut self) -> Option<::Item> { - let p = self.end_pos(); - Some(Err(LexerError::InvalidCharacter(p.1))) - } - - fn internal_next(&mut self) -> Option<::Item> { - if !self.backlog.is_empty() { - return self.backlog.pop_front(); - } - if self.peek().is_none() { - return self.return_none(); - } - let r = self.consume_token(); - if let Some(Err(_)) = r { - // In case of errors, consume the whole input so we stop on next call - self.terminate(); - } else if r.is_none() { - return self.return_none(); - } - r - } -} - -impl Iterator for Lexer { - type Item = LexerItem; - - #[cfg(feature = "trace")] - fn next(&mut self) -> Option { - let r = self.internal_next(); - println!("[TOKEN] {:?}", r); - r - } - - #[cfg(not(feature = "trace"))] - fn next(&mut self) -> Option { - self.internal_next() - } -} - -// Consumers to actually consume token -impl Lexer { - fn token_from_identifier(identifier: &str) -> Token { - match identifier { - "and" => Token::And, - "else" => Token::Else, - "load" => Token::Load, - "break" => Token::Break, - "for" => Token::For, - "not" => Token::Not, - "continue" => Token::Continue, - "if" => Token::If, - "or" => Token::Or, - "def" => Token::Def, - "in" => Token::In, - "pass" => Token::Pass, - "elif" => Token::Elif, - "return" => Token::Return, - "as" | "import" | "assert" | "is" | "class" | "nonlocal" | "del" | "raise" - | "except" | "try" | "finally" | "while" | "from" | "with" | "global" | "yield" => { - Token::Reserved(identifier.to_owned()) - } - _ => Token::Identifier(identifier.to_owned()), - } - } - - fn skip_comment(&mut self) { - assert_eq!(self.next_char(), '#'); - loop { - match self.peek_char() { - '\n' | '\r' | '\u{2028}' | '\u{2029}' | '\0' => return, - _ => { - self.pop(); - } - } - } - } - - fn skip_spaces(&mut self, newline: bool) -> Option<::Item> { - loop { - match self.peek_char() { - '\n' | '\r' | '\u{2028}' | '\u{2029}' => { - if newline { - self.pop(); - } else { - return None; - } - } - '\\' => { - self.pop(); - if self.peek_char() != '\n' { - return self.invalid(); - } else { - self.pop(); - } - } - '\t' | ' ' => { - self.pop(); - } - '#' => self.skip_comment(), - _ => return None, - }; - } - } - - fn consume_spaces(&mut self) -> u32 { - let mut result = 0; - loop { - match self.peek_char() { - '\t' => result += 8 - (result % 8), - ' ' => result += 1, - _ => return result, - }; - self.pop(); - } - } - - fn consume_indentation(&mut self) -> Option<::Item> { - loop { - self.begin(); - let spaces = self.consume_spaces(); - let p = self.peek_char(); - if Lexer::is_nl(p) { - // ignore because it is an empty line, but still return new line - return None; - } else if p == '#' { - // Ignore the comment and start again - self.skip_comment(); - self.consume_nl(); - continue; - } else if spaces > self.ihead() { - self.indentation_stack.push_front(spaces); - return self.end(Token::Indent); - } else if spaces == self.ihead() { - return None; - } else { - let mut step = 0; - while spaces < self.ihead() { - self.indentation_stack.pop_front(); - step += 1; - } - if spaces == self.ihead() { - let r = self.end(Token::Dedent); - while step > 1 { - self.backlog.push_front(r.clone().unwrap()); - step -= 1; - } - return r; - } else { - let p = self.end_pos(); - return Some(Err(LexerError::Indentation(p.0, p.1))); - } - } - } - } - - fn consume_nl(&mut self) -> Option<::Item> { - self.begin(); - match (self.next_char(), self.peek_char()) { - ('\n', '\r') | ('\r', '\n') => self.consume(Token::Newline), - _ => self.end(Token::Newline), - } - } - - fn consume_identifier_queue(&mut self, head: &str) -> Option<::Item> { - let mut result = head.to_owned(); - while self.peek_char().is_alphabetic() - || self.peek_char().is_digit(10) - || self.peek_char() == '_' - { - result.push(self.next_char()); - } - assert!(!result.is_empty()); - let r = self.end(Self::token_from_identifier(&result)); - match r { - Some(Ok((b, Token::Not, ..))) => { - // Special handling of "not in" - self.consume_spaces(); - if self.peek_char() == 'i' { - match self.consume_identifier() { - Some(Ok((.., Token::In, e))) => Some(Ok((b, Token::NotIn, e))), - Some(next_id) => { - self.backlog.push_front(next_id); - r - } - None => r, // This should never happen but it is safe to just return r. - } - } else { - r - } - } - _ => r, - } - } - - fn consume_identifier(&mut self) -> Option<::Item> { - self.begin(); - assert!(!self.peek_char().is_digit(10)); - self.consume_identifier_queue("") - } - - fn consume_int_r(&mut self, radix: u32) -> Result { - let mut number = String::new(); - while self.peek_char().is_digit(radix) { - number.push(self.next_char()); - } - let val = i64::from_str_radix(&number, radix); - if val.is_err() { - Err(()) - } else { - Ok(val.unwrap()) - } - } - - fn consume_int_radix(&mut self, radix: u32) -> Option<::Item> { - let val = self.consume_int_r(radix); - if val.is_err() { - self.invalid() - } else { - self.end(Token::IntegerLiteral(val.unwrap())) - } - } - - fn consume_int(&mut self) -> Option<::Item> { - self.begin(); - let cur = self.peek_char(); - if cur == '0' { - self.pop(); - let cur = self.peek_char(); - match cur { - 'o' | 'O' => { - self.pop(); - self.consume_int_radix(8) - } - '0'..='7' => self.consume_int_radix(8), - 'x' | 'X' => { - self.pop(); - self.consume_int_radix(16) - } - 'b' | 'B' => { - self.pop(); - self.consume_int_radix(2) - } - c if !c.is_numeric() => self.end(Token::IntegerLiteral(0)), - _ => self.invalid(), - } - } else { - self.consume_int_radix(10) - } - } - - fn consume_escape_sequence(&mut self, triple: bool) -> Result, LexerError> { - if let Some((pos, c)) = self.pop() { - assert_eq!(c, '\\'); - if let Some((pos2, c2)) = self.peek() { - match c2 { - 'n' => { - self.pop(); - Ok(Some('\n')) - } - 'r' => { - self.pop(); - Ok(Some('\r')) - } - 't' => { - self.pop(); - Ok(Some('\t')) - } - '0' => { - self.pop(); - if self.peek_char().is_digit(8) { - if let Ok(r) = self.consume_int_r(8) { - Ok(Some(char::from_u32(r as u32).unwrap())) - } else { - let p = self.end_pos(); - Err(LexerError::InvalidEscapeSequence(pos, p.1)) - } - } else { - Ok(Some('\0')) - } - } - 'x' => { - self.pop(); - if let Ok(r) = self.consume_int_r(16) { - Ok(Some(char::from_u32(r as u32).unwrap())) - } else { - let p = self.end_pos(); - Err(LexerError::InvalidEscapeSequence(pos, p.1)) - } - } - '1'..='9' => { - self.pop(); - Err(LexerError::InvalidEscapeSequence(pos, pos2 + 1)) - } - '\n' => { - self.pop(); - if triple { - Ok(None) - } else { - Err(LexerError::InvalidEscapeSequence(pos, pos2 + 1)) - } - } - 'u' => { - self.pop(); - let c = self.next_char(); - if c != '{' { - let p = self.end_pos(); - Err(LexerError::InvalidEscapeSequence(pos, p.1)) - } else if let Ok(r) = self.consume_int_r(16) { - let c = self.next_char(); - if c != '}' { - let p = self.end_pos(); - Err(LexerError::InvalidEscapeSequence(pos, p.1)) - } else { - Ok(Some(char::from_u32(r as u32).unwrap())) - } - } else { - let p = self.end_pos(); - Err(LexerError::InvalidEscapeSequence(pos, p.1)) - } - } - '"' | '\'' | '\\' => { - self.pop(); - Ok(Some(c2)) - } - _ => Ok(Some('\\')), - } - } else { - Err(LexerError::InvalidEscapeSequence(pos, pos + 1)) - } - } else { - panic!("This is a bug"); - } - } - - fn consume_string(&mut self, raw: bool) -> Option<::Item> { - self.begin(); - let mut res = String::new(); - let quote = self.next_char(); - let mut triple = false; - if self.peek_char() == quote { - self.next_char(); - if self.peek_char() == quote { - self.next_char(); - triple = true; - } else { - return self.end(Token::StringLiteral(res)); - } - } - loop { - match self.peek_char() { - '\\' => { - if raw { - self.pop(); - if self.peek_char() == quote { - self.pop(); - res.push(quote); - } else { - res.push('\\'); - } - } else { - match self.consume_escape_sequence(triple) { - Ok(Some(x)) => res.push(x), - Ok(None) => {} - Err(c) => return Some(Result::Err(c)), - } - } - } - '\n' | '\r' | '\u{2028}' | '\u{2029}' => { - if triple { - res.push(self.next_char()); - } else { - let p = self.end_pos(); - return Some(Err(LexerError::UnfinishedStringLiteral(p.0, p.1))); - } - } - '\0' => { - let p = self.end_pos(); - return Some(Err(LexerError::UnfinishedStringLiteral(p.0, p.1))); - } - x if x == quote => { - self.pop(); - if triple { - let n = self.next_char(); - if n == quote { - if self.next_char() == quote { - break; - } else { - res.push(quote); - res.push(quote); - } - } else { - res.push(quote); - res.push(n); - } - } else { - break; - } - } - x => { - self.pop(); - res.push(x); - } - } - } - self.end(Token::StringLiteral(res)) - } - - fn consume_token(&mut self) -> Option<::Item> { - if self.last_new_line && self.parentheses == 0 { - if let Some(r) = self.consume_indentation() { - return Some(r); - } - } else { - let skip_newline = self.parentheses > 0; - if let Some(x) = self.skip_spaces(skip_newline) { - return Some(x); - } - } - self.begin(); - match self.peek_char() { - '\0' => None, - '\n' | '\r' | '\u{2028}' | '\u{2029}' => self.consume_nl(), - '\'' | '"' => self.consume_string(false), - 'r' => { - self.pop(); - let p = self.peek_char(); - if p == '\'' || p == '"' { - self.consume_string(true) - } else { - self.consume_identifier_queue("r") - } - } - '0'..='9' => self.consume_int(), - '_' => self.consume_identifier(), - c if c.is_alphabetic() => self.consume_identifier(), - ',' => self.consume(Token::Comma), - ';' => self.consume(Token::Semicolon), - ':' => self.consume(Token::Colon), - '+' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::PlusEqual) - } else { - self.end(Token::Plus) - } - } - '-' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::MinusEqual) - } else { - self.end(Token::Minus) - } - } - '*' => { - self.pop(); - match self.peek_char() { - '=' => self.consume(Token::StarEqual), - '*' => self.consume(Token::Doublestar), - _ => self.end(Token::Star), - } - } - '/' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::SlashEqual) - } else if self.peek_char() == '/' { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::DoubleSlashEqual) - } else { - self.end(Token::DoubleSlash) - } - } else { - self.end(Token::Slash) - } - } - '%' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::PercentEqual) - } else { - self.end(Token::Percent) - } - } - '=' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::DoubleEqual) - } else { - self.end(Token::Equal) - } - } - '!' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::BangEqual) - } else { - self.invalid() - } - } - '<' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::LowerEqual) - } else { - self.end(Token::LowerThan) - } - } - '>' => { - self.pop(); - if self.peek_char() == '=' { - self.consume(Token::GreaterEqual) - } else { - self.end(Token::GreaterThan) - } - } - '|' => self.consume(Token::Pipe), - '.' => self.consume(Token::Dot), - '[' => { - self.parentheses += 1; - self.consume(Token::OpeningBracket) - } - ']' => { - self.parentheses -= 1; - self.consume(Token::ClosingBracket) - } - '(' => { - self.parentheses += 1; - self.consume(Token::OpeningParenthesis) - } - ')' => { - self.parentheses -= 1; - self.consume(Token::ClosingParenthesis) - } - '{' => { - self.parentheses += 1; - self.consume(Token::OpeningCurlyBracket) - } - '}' => { - self.parentheses -= 1; - self.consume(Token::ClosingCurlyBracket) - } - _ => self.invalid(), - } - } -} - -#[cfg(test)] -mod tests { - use super::Token; - use crate::syntax::errors::SyntaxError; - use codemap; - use codemap_diagnostic; - use std::fs; - use std::fs::File; - use std::io::Read; - use std::path::PathBuf; - use std::sync::{Arc, Mutex}; - - fn collect_result_buffered(s: Vec<&'static str>) -> Vec { - let codemap = Arc::new(Mutex::new(codemap::CodeMap::new())); - let mut diagnostics = Vec::new(); - let mut result = Vec::new(); - let content = s.iter().fold("".to_string(), |a, it| a + it); - let file_span = { - codemap - .lock() - .unwrap() - .add_file("".to_owned(), content) - .span - }; - let mut lexer = super::BufferedLexer::new(s[0]); - for v in s.iter().skip(1) { - assert!(lexer.need_more(), "Should need more before '{}'", v,); - - lexer.input(&v) - } - assert!(!lexer.need_more()); - let mut pos = 0; - for x in lexer.into_iter() { - if x.is_err() { - diagnostics.push(x.err().unwrap().to_diagnostic(file_span)); - } else { - let (i, t, j) = x.unwrap(); - let span_incorrect = format!("Span of {:?} incorrect", t); - assert!(pos <= i, "{}: {} > {}", span_incorrect, pos, i); - result.push(t); - assert!(i <= j, "{}: {} > {}", span_incorrect, i, j); - pos = j; - } - } - assert_diagnostics!(diagnostics, codemap); - result - } - - fn collect_result(s: &'static str) -> Vec { - let codemap = Arc::new(Mutex::new(codemap::CodeMap::new())); - let mut diagnostics = Vec::new(); - let mut result = Vec::new(); - let file_span = { - codemap - .lock() - .unwrap() - .add_file("".to_owned(), s.to_owned()) - .span - }; - let mut pos = 0; - super::Lexer::new(s).for_each(|x| { - if x.is_err() { - diagnostics.push(x.err().unwrap().to_diagnostic(file_span)); - } else { - let (i, t, j) = x.unwrap(); - let span_incorrect = format!("Span of {:?} incorrect", t); - assert!(pos <= i, "{}: {} > {}", span_incorrect, pos, i); - result.push(t); - assert!(i <= j, "{}: {} > {}", span_incorrect, i, j); - pos = j; - } - }); - assert_diagnostics!(diagnostics, codemap); - result - } - - #[test] - fn test_int_lit() { - let get_result = |s: &'static str| -> Vec { - collect_result(s) - .iter() - .filter_map(|v| match v { - Token::IntegerLiteral(r) => Some(*r), - Token::Newline => None, - _ => panic!("{:?} is not a integer literal", v), - }) - .collect() - }; - assert_eq!(vec![0, 123], get_result("0 123")); - assert_eq!(vec![0x7f, 0x7f], get_result("0x7F 0x7f")); - assert_eq!(vec![0b1011, 0b1011], get_result("0B1011 0b1011")); - assert_eq!(vec![0o755, 0o755, 0o755], get_result("0o755 0O755 0755")); - } - - #[test] - fn test_indentation() { - let r = collect_result( - " -+ - - - / - * - = - % - . -+= -", - ); - assert_eq!( - &[ - Token::Newline, - Token::Plus, - Token::Newline, - Token::Indent, - Token::Minus, - Token::Newline, - Token::Indent, - Token::Slash, - Token::Newline, - Token::Star, - Token::Newline, - Token::Dedent, - Token::Equal, - Token::Newline, - Token::Indent, - Token::Percent, - Token::Newline, - Token::Indent, - Token::Dot, - Token::Newline, - Token::Dedent, - Token::Dedent, - Token::Dedent, - Token::PlusEqual, - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_symbols() { - let r = collect_result( - ", ; : += -= *= /= //= %= == != <= >= ** = < > - + * % / // . { } [ ] ( ) |", - ); - assert_eq!( - &[ - Token::Comma, - Token::Semicolon, - Token::Colon, - Token::PlusEqual, - Token::MinusEqual, - Token::StarEqual, - Token::SlashEqual, - Token::DoubleSlashEqual, - Token::PercentEqual, - Token::DoubleEqual, - Token::BangEqual, - Token::LowerEqual, - Token::GreaterEqual, - Token::Doublestar, - Token::Equal, - Token::LowerThan, - Token::GreaterThan, - Token::Minus, - Token::Plus, - Token::Star, - Token::Percent, - Token::Slash, - Token::DoubleSlash, - Token::Dot, - Token::OpeningCurlyBracket, - Token::ClosingCurlyBracket, - Token::OpeningBracket, - Token::ClosingBracket, - Token::OpeningParenthesis, - Token::ClosingParenthesis, - Token::Pipe, - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_keywords() { - let r = collect_result( - "and else load break for not not in continue if or def in pass elif return", - ); - assert_eq!( - &[ - Token::And, - Token::Else, - Token::Load, - Token::Break, - Token::For, - Token::Not, - Token::NotIn, - Token::Continue, - Token::If, - Token::Or, - Token::Def, - Token::In, - Token::Pass, - Token::Elif, - Token::Return, - Token::Newline, - ], - &r[..] - ); - } - - // Regression test for https://github.com/google/starlark-rust/issues/44. - #[test] - fn test_number_collated_with_keywords_or_identifier() { - let r = collect_result( - "0in 1and 2else 3load 4break 5for 6not 7not in 8continue 10identifier11", - ); - assert_eq!( - &[ - Token::IntegerLiteral(0), - Token::In, - Token::IntegerLiteral(1), - Token::And, - Token::IntegerLiteral(2), - Token::Else, - Token::IntegerLiteral(3), - Token::Load, - Token::IntegerLiteral(4), - Token::Break, - Token::IntegerLiteral(5), - Token::For, - Token::IntegerLiteral(6), - Token::Not, - Token::IntegerLiteral(7), - Token::NotIn, - Token::IntegerLiteral(8), - Token::Continue, - Token::IntegerLiteral(10), - Token::Identifier("identifier11".to_owned()), - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_reserved() { - let r = collect_result( - "as import assert is class nonlocal del raise except try finally \ - while from with global yield", - ); - assert_eq!( - &[ - Token::Reserved("as".to_owned()), - Token::Reserved("import".to_owned()), - Token::Reserved("assert".to_owned()), - Token::Reserved("is".to_owned()), - Token::Reserved("class".to_owned()), - Token::Reserved("nonlocal".to_owned()), - Token::Reserved("del".to_owned()), - Token::Reserved("raise".to_owned()), - Token::Reserved("except".to_owned()), - Token::Reserved("try".to_owned()), - Token::Reserved("finally".to_owned()), - Token::Reserved("while".to_owned()), - Token::Reserved("from".to_owned()), - Token::Reserved("with".to_owned()), - Token::Reserved("global".to_owned()), - Token::Reserved("yield".to_owned()), - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_comment() { - // Comment should be ignored - assert!(collect_result("# a comment\n").is_empty()); - assert!(collect_result(" # a comment\n").is_empty()); - let r = collect_result("a # a comment\n"); - assert_eq!(&[Token::Identifier("a".to_owned()), Token::Newline], &r[..]); - // But it should not eat everything - let r = collect_result("[\n# a comment\n]"); - assert_eq!( - &[Token::OpeningBracket, Token::ClosingBracket, Token::Newline], - &r[..] - ); - } - - #[test] - fn test_identifier() { - let r = collect_result("a identifier CAPS _CAPS _0123"); - assert_eq!( - &[ - Token::Identifier("a".to_owned()), - Token::Identifier("identifier".to_owned()), - Token::Identifier("CAPS".to_owned()), - Token::Identifier("_CAPS".to_owned()), - Token::Identifier("_0123".to_owned()), - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_string_lit() { - let r = collect_result("'123' \"123\" '' \"\" '\\'' \"\\\"\" '\"' \"'\" '\\n' '\\w'"); - assert_eq!( - &[ - Token::StringLiteral("123".to_owned()), - Token::StringLiteral("123".to_owned()), - Token::StringLiteral("".to_owned()), - Token::StringLiteral("".to_owned()), - Token::StringLiteral("'".to_owned()), - Token::StringLiteral("\"".to_owned()), - Token::StringLiteral("\"".to_owned()), - Token::StringLiteral("'".to_owned()), - Token::StringLiteral("\n".to_owned()), - Token::StringLiteral("\\w".to_owned()), - Token::Newline, - ], - &r[..] - ); - - // unfinished string literal - assert_eq!( - super::Lexer::new("'\n'").next().unwrap(), - Err(super::LexerError::UnfinishedStringLiteral(0, 1)) - ); - assert_eq!( - super::Lexer::new("\"\n\"").next().unwrap(), - Err(super::LexerError::UnfinishedStringLiteral(0, 1)) - ); - // Multiline string - let r = - collect_result("'''''' '''\\n''' '''\n''' \"\"\"\"\"\" \"\"\"\\n\"\"\" \"\"\"\n\"\"\""); - assert_eq!( - &[ - Token::StringLiteral("".to_owned()), - Token::StringLiteral("\n".to_owned()), - Token::StringLiteral("\n".to_owned()), - Token::StringLiteral("".to_owned()), - Token::StringLiteral("\n".to_owned()), - Token::StringLiteral("\n".to_owned()), - Token::Newline, - ], - &r[..] - ); - // Raw string - let r = collect_result("r'' r\"\" r'\\'' r\"\\\"\" r'\"' r\"'\" r'\\n'"); - assert_eq!( - &[ - Token::StringLiteral("".to_owned()), - Token::StringLiteral("".to_owned()), - Token::StringLiteral("'".to_owned()), - Token::StringLiteral("\"".to_owned()), - Token::StringLiteral("\"".to_owned()), - Token::StringLiteral("'".to_owned()), - Token::StringLiteral("\\n".to_owned()), - Token::Newline, - ], - &r[..] - ); - - let r = collect_result(r#""""foo"bar""""#); - assert_eq!( - &[Token::StringLiteral("foo\"bar".to_owned()), Token::Newline], - &r[..] - ); - let r = collect_result(r#""""foo'bar""""#); - assert_eq!( - &[Token::StringLiteral("foo\'bar".to_owned()), Token::Newline], - &r[..] - ); - let r = collect_result(r#"'''foo'bar'''"#); - assert_eq!( - &[Token::StringLiteral("foo\'bar".to_owned()), Token::Newline], - &r[..] - ); - let r = collect_result(r#"'''foo\"bar'''"#); - assert_eq!( - &[Token::StringLiteral("foo\"bar".to_owned()), Token::Newline], - &r[..] - ); - } - - #[test] - fn test_simple_example() { - let r = collect_result( - "\"\"\"A docstring.\"\"\" - -def _impl(ctx): - # Print Hello, World! - print('Hello, World!') -", - ); - assert_eq!( - &[ - Token::StringLiteral("A docstring.".to_owned()), - Token::Newline, - Token::Newline, - Token::Def, - Token::Identifier("_impl".to_owned()), - Token::OpeningParenthesis, - Token::Identifier("ctx".to_owned()), - Token::ClosingParenthesis, - Token::Colon, - Token::Newline, - Token::Indent, - Token::Identifier("print".to_owned()), - Token::OpeningParenthesis, - Token::StringLiteral("Hello, World!".to_owned()), - Token::ClosingParenthesis, - Token::Newline, - Token::Dedent, - ], - &r[..] - ); - } - - #[test] - fn test_escape_newline() { - let r = collect_result("a \\\nb"); - assert_eq!( - &[ - Token::Identifier("a".to_owned()), - Token::Identifier("b".to_owned()), - Token::Newline, - ], - &r[..] - ); - } - - #[test] - fn test_span() { - let expected = vec![ - (0, Token::Newline, 1), - (1, Token::Def, 4), - (5, Token::Identifier("test".to_owned()), 9), - (9, Token::OpeningParenthesis, 10), - (10, Token::Identifier("a".to_owned()), 11), - (11, Token::ClosingParenthesis, 12), - (12, Token::Colon, 13), - (13, Token::Newline, 14), - (14, Token::Indent, 16), - (16, Token::Identifier("fail".to_owned()), 20), - (20, Token::OpeningParenthesis, 21), - (21, Token::Identifier("a".to_owned()), 22), - (22, Token::ClosingParenthesis, 23), - (23, Token::Newline, 24), - (24, Token::Newline, 25), - (25, Token::Dedent, 25), - (25, Token::Identifier("test".to_owned()), 29), - (29, Token::OpeningParenthesis, 30), - (30, Token::StringLiteral("abc".to_owned()), 35), - (35, Token::ClosingParenthesis, 36), - (36, Token::Newline, 37), - ]; - let actual: Vec<(u64, Token, u64)> = super::Lexer::new( - r#" -def test(a): - fail(a) - -test("abc") -"#, - ) - .map(Result::unwrap) - .collect(); - assert_eq!(expected, actual); - } - - #[test] - fn test_buffered() { - let r = collect_result_buffered(vec!["\"\"\"A docstring.\"\"\"\n"]); - assert_eq!( - &[ - Token::StringLiteral("A docstring.".to_owned()), - Token::Newline, - ], - &r[..] - ); - let r = collect_result_buffered(vec!["\n"]); - assert_eq!(&[Token::Newline], &r[..]); - let r = collect_result_buffered(vec![ - "def _impl(ctx):\n", - " # Print Hello, World!\n", - " print('Hello, World!')\n", - "\n", - ]); - assert_eq!( - &[ - Token::Def, - Token::Identifier("_impl".to_owned()), - Token::OpeningParenthesis, - Token::Identifier("ctx".to_owned()), - Token::ClosingParenthesis, - Token::Colon, - Token::Newline, - Token::Indent, - Token::Identifier("print".to_owned()), - Token::OpeningParenthesis, - Token::StringLiteral("Hello, World!".to_owned()), - Token::ClosingParenthesis, - Token::Newline, - Token::Newline, - Token::Dedent, - ], - &r[..] - ); - } - - #[test] - fn smoke_test() { - let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - let codemap = Arc::new(Mutex::new(codemap::CodeMap::new())); - let mut diagnostics = Vec::new(); - - d.push("src/syntax/testcases"); - let paths = fs::read_dir(d.as_path()).unwrap(); - for p in paths { - let entry = p.unwrap(); - let filename = entry.file_name().into_string().unwrap(); - if filename.ends_with(".bzl") { - let mut content = String::new(); - let mut file = File::open(entry.path()).unwrap(); - file.read_to_string(&mut content).unwrap(); - let file_span = { - codemap - .lock() - .unwrap() - .add_file(filename, content.clone()) - .span - }; - super::Lexer::new(&content).for_each(|x| { - if x.is_err() { - diagnostics.push(x.err().unwrap().to_diagnostic(file_span)); - } - }); - } - } - assert_diagnostics!(diagnostics, codemap); - } -} diff --git a/starlark/src/syntax/mod.rs b/starlark/src/syntax/mod.rs deleted file mode 100644 index 377efa46..00000000 --- a/starlark/src/syntax/mod.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! The syntax module that handle lexing and parsing - -#[doc(hidden)] -pub mod errors; - -#[cfg(test)] -#[macro_use] -mod testutil; - -#[doc(hidden)] -pub mod ast; -pub mod dialect; -#[doc(hidden)] -pub mod lexer; - -#[allow(unused_parens)] // lalrpop generated code includes unused parens -mod grammar { - include!(concat!(env!("OUT_DIR"), "/syntax/grammar.rs")); -} - -mod grammar_tests; - -#[doc(hidden)] -pub mod parser; - -pub(crate) mod fmt; diff --git a/starlark/src/syntax/parser.rs b/starlark/src/syntax/parser.rs deleted file mode 100644 index 860278d0..00000000 --- a/starlark/src/syntax/parser.rs +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use super::dialect::Dialect; -use super::errors::SyntaxError; -use super::grammar::{BuildFileParser, StarlarkParser}; -use super::lexer::{Lexer, LexerError, LexerIntoIter, LexerItem, Token}; -use codemap::{CodeMap, Span}; -use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle}; -use std::fs::File; -use std::io::prelude::*; -use std::sync::{Arc, Mutex}; - -use crate::eval::module::Module; -use lalrpop_util as lu; - -// TODO: move that code in some common error code list? -// CP Prefix = Critical Parsing -const INVALID_TOKEN_ERROR_CODE: &str = "CP00"; -const UNEXPECTED_TOKEN_ERROR_CODE: &str = "CP01"; -const EXTRA_TOKEN_ERROR_CODE: &str = "CP02"; -const RESERVED_KEYWORD_ERROR_CODE: &str = "CP03"; -const IO_ERROR_CODE: &str = "CP04"; - -fn one_of(expected: &[String]) -> String { - let mut result = String::new(); - for (i, e) in expected.iter().enumerate() { - let sep = match i { - 0 => "one of", - _ if i < expected.len() - 1 => ",", - // Last expected message to be written - _ => " or", - }; - result.push_str(&format!("{} {}", sep, e)); - } - result -} - -impl SyntaxError for lu::ParseError { - /// Convert the error to a codemap diagnostic. - /// - /// To build this diagnostic, the method needs the file span corresponding to the parsed - /// file. - fn to_diagnostic(self, file_span: Span) -> Diagnostic { - let (label, message) = match self { - lu::ParseError::InvalidToken { .. } => ( - Some("Invalid token".to_owned()), - "Parse error: invalid token".to_owned(), - ), - lu::ParseError::UnrecognizedToken { - token: (_x, Token::Reserved(ref s), _y), - expected: ref _unused, - } => ( - Some("Reserved keyword".to_owned()), - format!("Parse error: cannot use reserved keyword {}", s), - ), - lu::ParseError::ExtraToken { - token: (_x, Token::Reserved(ref s), _y), - } => ( - Some("Reserved keyword".to_owned()), - format!("Parse error: cannot use reserved keyword {}", s), - ), - lu::ParseError::UnrecognizedToken { - token: (_x, ref t, ..), - ref expected, - } => ( - Some(format!("Expected {}", one_of(expected))), - format!( - "Parse error: unexpected {} here, expected {}", - t, - one_of(expected) - ), - ), - lu::ParseError::ExtraToken { - token: (_x, ref t, ..), - } => ( - Some(format!("Extraneous {}", t)), - format!("Parse error: extraneous token {}", t), - ), - lu::ParseError::UnrecognizedEOF { .. } => { - (None, "Parse error: unexpected end of file".to_owned()) - } - lu::ParseError::User { ref error } => return error.to_diagnostic(file_span), - }; - let sl = SpanLabel { - span: match self { - lu::ParseError::InvalidToken { ref location } => { - file_span.subspan(*location, *location) - } - lu::ParseError::UnrecognizedToken { - token: (x, .., y), .. - } => file_span.subspan(x, y), - lu::ParseError::UnrecognizedEOF { .. } => { - let x = file_span.high() - file_span.low(); - file_span.subspan(x, x) - } - lu::ParseError::ExtraToken { token: (x, .., y) } => file_span.subspan(x, y), - lu::ParseError::User { .. } => unreachable!(), - }, - style: SpanStyle::Primary, - label, - }; - - Diagnostic { - level: Level::Error, - message, - code: Some( - match self { - lu::ParseError::InvalidToken { .. } => INVALID_TOKEN_ERROR_CODE, - lu::ParseError::UnrecognizedToken { - token: (_x, Token::Reserved(..), ..), - .. - } - | lu::ParseError::ExtraToken { - token: (_x, Token::Reserved(..), ..), - } => RESERVED_KEYWORD_ERROR_CODE, - lu::ParseError::UnrecognizedToken { .. } - | lu::ParseError::UnrecognizedEOF { .. } => UNEXPECTED_TOKEN_ERROR_CODE, - lu::ParseError::ExtraToken { .. } => EXTRA_TOKEN_ERROR_CODE, - lu::ParseError::User { .. } => unreachable!(), - } - .to_owned(), - ), - spans: vec![sl], - } - } -} - -macro_rules! iotry { - ($e:expr) => { - match $e { - Ok(val) => val, - Err(err) => { - return Err(Diagnostic { - level: Level::Error, - message: format!("IOError: {}", err), - code: Some(IO_ERROR_CODE.to_owned()), - spans: vec![], - }); - } - } - }; -} - -/// Parse a build file (if build is true) or a starlark file provided as a content using a custom -/// lexer. -/// -/// # arguments -/// -/// * codemap: the codemap object used for diagnostics -/// * filename: the name of the file being parsed, for diagnostics -/// * content: the content to parse -/// * dialect: starlark language dialect -/// * lexer: the lexer to use for parsing -#[doc(hidden)] -pub fn parse_lexer, T2: LexerIntoIter>( - map: &Arc>, - filename: &str, - content: &str, - dialect: Dialect, - lexer: T2, -) -> Result { - let filespan = { - map.lock() - .unwrap() - .add_file(filename.to_string(), content.to_string()) - .span - }; - match { - match dialect { - Dialect::Build => BuildFileParser::new().parse(content, filespan, lexer), - Dialect::Bzl => StarlarkParser::new().parse(content, filespan, lexer), - } - } { - Result::Ok(v) => Ok(Module::compile(v, dialect)?), - Result::Err(p) => Result::Err(p.to_diagnostic(filespan)), - } -} - -/// Parse a build file (if build is true) or a starlark file provided as a content. -/// -/// # arguments -/// -/// * codemap: the codemap object used for diagnostics -/// * filename: the name of the file being parsed, for diagnostics -/// * content: the content to parse -/// * dialect: starlark language dialect. -#[doc(hidden)] -pub fn parse( - map: &Arc>, - filename: &str, - content: &str, - dialect: Dialect, -) -> Result { - let content2 = content.to_owned(); - parse_lexer(map, filename, content, dialect, Lexer::new(&content2)) -} - -/// Parse a build file (if build is true) or a starlark file, reading the content from the file -/// system. -/// -/// # arguments -/// -/// * codemap: the codemap object used for diagnostics -/// * path: the path to the file to parse -/// * dialect: starlark language dialect -/// -/// # Note -/// -/// This method unwrap the path to a unicode string, which can panic. -#[doc(hidden)] -pub fn parse_file( - map: &Arc>, - path: &str, - dialect: Dialect, -) -> Result { - let mut content = String::new(); - let mut file = iotry!(File::open(path)); - iotry!(file.read_to_string(&mut content)); - parse(map, path, &content, dialect) -} diff --git a/starlark/src/syntax/testcases/README.md b/starlark/src/syntax/testcases/README.md deleted file mode 100644 index b961b9c7..00000000 --- a/starlark/src/syntax/testcases/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Test cases - -This is a list of bzl file taken from various Open-source projet to test the Starlark parser -against real world cases. This complete other unit test to test specific behavior of the parser. diff --git a/starlark/src/syntax/testcases/action.bzl b/starlark/src/syntax/testcases/action.bzl deleted file mode 100644 index 846d4c32..00000000 --- a/starlark/src/syntax/testcases/action.bzl +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def add_go_env(args, stdlib, mode): - args.add([ - "-go", stdlib.go, - "-root_file", stdlib.root_file, - "-goos", stdlib.goos, - "-goarch", stdlib.goarch, - "-cgo=" + ("0" if mode.pure else "1"), - ]) - -def bootstrap_action(ctx, go_toolchain, mode, inputs, outputs, mnemonic, arguments): - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - ctx.actions.run_shell( - inputs = inputs + stdlib.files, - outputs = outputs, - mnemonic = mnemonic, - command = "export GOROOT=$(pwd)/{} && {} {}".format(stdlib.root_file.dirname, stdlib.go.path, " ".join(arguments)), - ) diff --git a/starlark/src/syntax/testcases/alias_rules.bzl b/starlark/src/syntax/testcases/alias_rules.bzl deleted file mode 100644 index fa2423ad..00000000 --- a/starlark/src/syntax/testcases/alias_rules.bzl +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylark rules that stub out C++-related alias rules.""" -def cc_toolchain_alias(name): - if hasattr(native, "cc_toolchain_alias"): - native.cc_toolchain_alias(name=name) - else: - pass diff --git a/starlark/src/syntax/testcases/android_sdk_repository_template.bzl b/starlark/src/syntax/testcases/android_sdk_repository_template.bzl deleted file mode 100644 index e657eaaf..00000000 --- a/starlark/src/syntax/testcases/android_sdk_repository_template.bzl +++ /dev/null @@ -1,366 +0,0 @@ -"""Template for the build file used in android_sdk_repository.""" -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def create_config_setting_rules(): - """Create config_setting rules for windows_msvc, windows_msys, windows. - - These represent the matching --host_cpu values. - """ - for suffix in ["", "_msvc", "_msys"]: - name = "windows" + suffix - if not native.existing_rule(name): - native.config_setting( - name = name, - values = {"host_cpu": "x64_" + name}, - ) - -def create_android_sdk_rules( - name, - build_tools_version, - build_tools_directory, - api_levels, - default_api_level): - """Generate android_sdk rules for the API levels in the Android SDK. - - Args: - name: string, the name of the repository being generated. - build_tools_version: string, the version of Android's build tools to use. - build_tools_directory: string, the directory name of the build tools in - sdk's build-tools directory. - api_levels: list of ints, the API levels from which to get android.jar - et al. and create android_sdk rules. - default_api_level: int, the API level to alias the default sdk to if - --android_sdk is not specified on the command line. - """ - - create_config_setting_rules() - - windows_only_files = [ - "build-tools/%s/aapt.exe" % build_tools_directory, - "build-tools/%s/aidl.exe" % build_tools_directory, - "build-tools/%s/zipalign.exe" % build_tools_directory, - "platform-tools/adb.exe", - ] + native.glob(["build-tools/%s/aapt2.exe" % build_tools_directory]) - - linux_only_files = [ - "build-tools/%s/aapt" % build_tools_directory, - "build-tools/%s/aidl" % build_tools_directory, - "build-tools/%s/zipalign" % build_tools_directory, - "platform-tools/adb", - ] + native.glob( - ["extras", "build-tools/%s/aapt2" % build_tools_directory], - exclude_directories = 0, - ) - - # This filegroup is used to pass the minimal contents of the SDK to the - # Android integration tests. Note that in order to work on Windows, we cannot - # include directories and must keep the size small. - native.filegroup( - name = "files", - srcs = [ - "build-tools/%s/lib/apksigner.jar" % build_tools_directory, - "build-tools/%s/lib/dx.jar" % build_tools_directory, - "build-tools/%s/mainDexClasses.rules" % build_tools_directory, - "tools/proguard/lib/proguard.jar", - "tools/support/annotations.jar", - ] + [ - "platforms/android-%d/%s" % (api_level, filename) - for api_level in api_levels - for filename in ["android.jar", "framework.aidl"] - ] + select({ - ":windows": windows_only_files, - ":windows_msvc": windows_only_files, - ":windows_msys": windows_only_files, - "//conditions:default": linux_only_files, - }), - ) - - for api_level in api_levels: - if api_level >= 23: - # Android 23 removed most of org.apache.http from android.jar and moved it - # to a separate jar. - native.java_import( - name = "org_apache_http_legacy-%d" % api_level, - jars = ["platforms/android-%d/optional/org.apache.http.legacy.jar" % api_level] - ) - - native.android_sdk( - name = "sdk-%d" % api_level, - build_tools_version = build_tools_version, - proguard = "@bazel_tools//third_party/java/proguard", - aapt = select({ - ":windows": "build-tools/%s/aapt.exe" % build_tools_directory, - ":windows_msvc": "build-tools/%s/aapt.exe" % build_tools_directory, - ":windows_msys": "build-tools/%s/aapt.exe" % build_tools_directory, - "//conditions:default": ":aapt_binary", - }), - aapt2 = select({ - ":windows": "build-tools/%s/aapt2.exe" % build_tools_directory, - ":windows_msvc": "build-tools/%s/aapt2.exe" % build_tools_directory, - ":windows_msys": "build-tools/%s/aapt2.exe" % build_tools_directory, - "//conditions:default": ":aapt2_binary", - }), - dx = ":dx_binary", - main_dex_list_creator = ":main_dex_list_creator", - adb = select({ - ":windows": "platform-tools/adb.exe", - ":windows_msvc": "platform-tools/adb.exe", - ":windows_msys": "platform-tools/adb.exe", - "//conditions:default": "platform-tools/adb", - }), - framework_aidl = "platforms/android-%d/framework.aidl" % api_level, - aidl = select({ - ":windows": "build-tools/%s/aidl.exe" % build_tools_directory, - ":windows_msvc": "build-tools/%s/aidl.exe" % build_tools_directory, - ":windows_msys": "build-tools/%s/aidl.exe" % build_tools_directory, - "//conditions:default": ":aidl_binary", - }), - android_jar = "platforms/android-%d/android.jar" % api_level, - shrinked_android_jar = "platforms/android-%d/android.jar" % api_level, - annotations_jar = "tools/support/annotations.jar", - main_dex_classes = "build-tools/%s/mainDexClasses.rules" % build_tools_directory, - apksigner = ":apksigner", - zipalign = select({ - ":windows": "build-tools/%s/zipalign.exe" % build_tools_directory, - ":windows_msvc": "build-tools/%s/zipalign.exe" % build_tools_directory, - ":windows_msys": "build-tools/%s/zipalign.exe" % build_tools_directory, - "//conditions:default": ":zipalign_binary", - }), - ) - - native.alias( - name = "org_apache_http_legacy", - actual = ":org_apache_http_legacy-%d" % default_api_level, - ) - - native.alias( - name = "sdk", - actual = ":sdk-%d" % default_api_level, - ) - - native.java_binary( - name = "apksigner", - main_class = "com.android.apksigner.ApkSignerTool", - runtime_deps = ["build-tools/%s/lib/apksigner.jar" % build_tools_directory], - ) - - native.filegroup( - name = "build_tools_libs", - srcs = native.glob([ - "build-tools/%s/lib/**" % build_tools_directory, - # Build tools version 24.0.0 added a lib64 folder. - "build-tools/%s/lib64/**" % build_tools_directory, - ]) - ) - - for tool in ["aapt", "aapt2", "aidl", "zipalign"]: - native.genrule( - name = tool + "_runner", - outs = [tool + "_runner.sh"], - srcs = [], - cmd = "\n".join([ - "cat > $@ << 'EOF'", - "#!/bin/bash", - "set -eu", - # The tools under build-tools/VERSION require the libraries under - # build-tools/VERSION/lib, so we can't simply depend on them as a - # file like we do with aapt. - # On Windows however we can use these binaries directly because - # there's no runfiles support so Bazel just creates a junction to - # {SDK}/build-tools. - "SDK=$${0}.runfiles/%s" % name, - "exec $${SDK}/build-tools/%s/%s $$*" % (build_tools_directory, tool), - "EOF\n"]), - ) - - native.sh_binary( - name = tool + "_binary", - srcs = [tool + "_runner.sh"], - data = [ - ":build_tools_libs", - "build-tools/%s/%s" % (build_tools_directory, tool) - ], - ) - - native.sh_binary( - name = "fail", - srcs = select({ - ":windows": [":generate_fail_cmd"], - ":windows_msvc": [":generate_fail_cmd"], - ":windows_msys": [":generate_fail_cmd"], - "//conditions:default": [":generate_fail_sh"], - }), - ) - - native.genrule( - name = "generate_fail_sh", - executable = 1, - outs = ["fail.sh"], - cmd = "echo -e '#!/bin/bash\\nexit 1' >> $@; chmod +x $@", - ) - - native.genrule( - name = "generate_fail_cmd", - executable = 1, - outs = ["fail.cmd"], - cmd = "echo @exit /b 1 > $@", - ) - - - native.genrule( - name = "main_dex_list_creator_source", - srcs = [], - outs = ["main_dex_list_creator.sh"], - cmd = "\n".join(["cat > $@ <<'EOF'", - "#!/bin/bash", - "", - "MAIN_DEX_LIST=$$1", - "STRIPPED_JAR=$$2", - "JAR=$$3", - "" + - "DIRNAME=$$(dirname $$0)", - "JAVA_BINARY=TBD/main_dex_list_creator_java", # Proper runfiles path comes here - "$$JAVA_BINARY $$STRIPPED_JAR $$JAR > $$MAIN_DEX_LIST", - "exit $$?", - "", - "EOF\n"]), - ) - - native.sh_binary( - name = "main_dex_list_creator", - srcs = ["main_dex_list_creator.sh"], - data = [":main_dex_list_creator_java"], - ) - - native.java_binary( - name = "main_dex_list_creator_java", - main_class = "com.android.multidex.ClassReferenceListBuilder", - runtime_deps = [":dx_jar_import"], - ) - - native.java_binary( - name = "dx_binary", - main_class = "com.android.dx.command.Main", - runtime_deps = [":dx_jar_import"], - ) - - native.filegroup( - name = "dx_jar", - srcs = ["build-tools/%s/lib/dx.jar" % build_tools_directory], - ) - - native.java_import( - name = "dx_jar_import", - jars = [":dx_jar"], - ) - - -TAGDIR_TO_TAG_MAP = { - "google_apis": "google", - "default": "android", - "android-tv": "tv", - "android-wear": "wear", -} - - -ARCHDIR_TO_ARCH_MAP = { - "x86": "x86", - "armeabi-v7a": "arm", -} - - -def create_system_images_filegroups(system_image_dirs): - """Generate filegroups for the system images in the Android SDK. - - Args: - system_image_dirs: list of strings, the directories containing system image - files to be used to create android_device rules. - """ - - # These images will need to be updated as Android releases new system images. - # We are intentionally not adding future releases because there is no - # guarantee that they will work out of the box. Supported system images should - # be added here once they have been confirmed to work with the Bazel Android - # testing infrastructure. - system_images = [(tag, str(api), arch) - for tag in ["android", "google"] - for api in [10] + range(15, 20) + range(21, 27) - for arch in ("x86", "arm")] - tv_images = [("tv", str(api), arch) - for api in range(21, 25) for arch in ("x86", "arm")] - wear_images = [("wear", str(api), "x86") - for api in range(20, 26)] + [("wear", str(api), "arm") - for api in range(24, 26)] - supported_system_images = system_images + tv_images + wear_images - - installed_system_images_dirs = {} - for system_image_dir in system_image_dirs: - apidir, tagdir, archdir = system_image_dir.split("/")[1:] - if "-" not in apidir: - continue - api = apidir.split("-")[1] # "android-24" --> "24", "android-O" --> "O" - if tagdir not in TAGDIR_TO_TAG_MAP: - continue - tag = TAGDIR_TO_TAG_MAP[tagdir] - if archdir not in ARCHDIR_TO_ARCH_MAP: - continue - arch = ARCHDIR_TO_ARCH_MAP[archdir] - if (tag, api, arch) in supported_system_images: - name = "emulator_images_%s_%s_%s" % (tag, api, arch) - installed_system_images_dirs[name] = system_image_dir - else: - # TODO(bazel-team): If the user has an unsupported system image installed, - # should we print a warning? This includes all 64-bit system-images. - pass - - for (tag, api, arch) in supported_system_images: - name = "emulator_images_%s_%s_%s" % (tag, api, arch) - if name in installed_system_images_dirs: - system_image_dir = installed_system_images_dirs[name] - # For supported system images that exist in /sdk/system-images/, we - # create a filegroup with their contents. - native.filegroup( - name = name, - srcs = native.glob([ - "%s/**" % system_image_dir, - ]), - ) - native.filegroup( - name = "%s_qemu2_extra" % name, - srcs = native.glob(["%s/kernel-ranchu" % system_image_dir]), - ) - else: - # For supported system images that are not installed in the SDK, we - # create a "poison pill" genrule to display a helpful error message to - # a user who attempts to run a test against an android_device that - # they don't have the system image for installed. - native.genrule( - name = name, - outs = [ - # Necessary so that the build doesn't fail in analysis because - # android_device expects a file named source.properties. - "poison_pill_for_%s/source.properties" % name, - ], - cmd = """echo \ - This rule requires that the Android SDK used by Bazel has the \ - following system image installed: %s. Please install this system \ - image through the Android SDK Manager and try again. ; \ - exit 1 - """ % name, - ) - native.filegroup( - name = "%s_qemu2_extra" % name, - srcs = [], - ) diff --git a/starlark/src/syntax/testcases/archive.bzl b/starlark/src/syntax/testcases/archive.bzl deleted file mode 100644 index a75ddf4c..00000000 --- a/starlark/src/syntax/testcases/archive.bzl +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", - "split_srcs", - "sets", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "mode_string", -) -load("@io_bazel_rules_go//go/private:rules/aspect.bzl", - "get_archive", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoArchive", - "GoArchiveData", - "sources", -) - -def emit_archive(ctx, go_toolchain, mode=None, importpath=None, source=None, importable=True): - """See go/toolchains.rst#archive for full documentation.""" - - if not importpath: fail("golib is a required parameter") - if source == None: fail("source is a required parameter") - if mode == None: fail("mode is a required parameter") - - source = sources.filter(ctx, source, mode) - - cover_vars = [] - if ctx.configuration.coverage_enabled: - source, cover_vars = go_toolchain.actions.cover(ctx, go_toolchain, source=source, mode=mode, importpath=importpath) - - flat = sources.flatten(ctx, source) - split = split_srcs(flat.srcs) - lib_name = importpath + ".a" - compilepath = importpath if importable else None - out_lib = declare_file(ctx, path=lib_name, mode=mode) - searchpath = out_lib.path[:-len(lib_name)] - - extra_objects = [] - for src in split.asm: - obj = declare_file(ctx, path=src.basename[:-2], ext=".o", mode=mode) - go_toolchain.actions.asm(ctx, go_toolchain, mode=mode, source=src, hdrs=split.headers, out_obj=obj) - extra_objects.append(obj) - - direct = [get_archive(dep) for dep in flat.deps] - runfiles = flat.runfiles - for a in direct: - runfiles = runfiles.merge(a.runfiles) - if a.mode != mode: fail("Archive mode does not match {} is {} expected {}".format(a.data.importpath, mode_string(a.mode), mode_string(mode))) - - if len(extra_objects) == 0 and flat.cgo_archive == None: - go_toolchain.actions.compile(ctx, - go_toolchain = go_toolchain, - sources = split.go, - importpath = compilepath, - archives = direct, - mode = mode, - out_lib = out_lib, - gc_goopts = flat.gc_goopts, - ) - else: - partial_lib = declare_file(ctx, path="partial", ext=".a", mode=mode) - go_toolchain.actions.compile(ctx, - go_toolchain = go_toolchain, - sources = split.go, - importpath = compilepath, - archives = direct, - mode = mode, - out_lib = partial_lib, - gc_goopts = flat.gc_goopts, - ) - go_toolchain.actions.pack(ctx, - go_toolchain = go_toolchain, - mode = mode, - in_lib = partial_lib, - out_lib = out_lib, - objects = extra_objects, - archive = flat.cgo_archive, - ) - data = GoArchiveData( - file = out_lib, - importpath = importpath, - searchpath = searchpath, - ) - return GoArchive( - mode = mode, - data = data, - go_srcs = split.go, - direct = direct, - searchpaths = sets.union([searchpath], *[a.searchpaths for a in direct]), - libs = sets.union([out_lib], *[a.libs for a in direct]), - cgo_deps = sets.union(flat.cgo_deps, *[a.cgo_deps for a in direct]), - cgo_exports = sets.union(flat.cgo_exports, *[a.cgo_exports for a in direct]), - cover_vars = sets.union(cover_vars, *[a.cover_vars for a in direct]), - runfiles = runfiles, - ) diff --git a/starlark/src/syntax/testcases/asciidoc.bzl b/starlark/src/syntax/testcases/asciidoc.bzl deleted file mode 100644 index c39541de..00000000 --- a/starlark/src/syntax/testcases/asciidoc.bzl +++ /dev/null @@ -1,324 +0,0 @@ -def documentation_attributes(): - return [ - "toc", - 'newline="\\n"', - 'asterisk="*"', - 'plus="+"', - 'caret="^"', - 'startsb="["', - 'endsb="]"', - 'tilde="~"', - "last-update-label!", - "source-highlighter=prettify", - "stylesheet=DEFAULT", - "linkcss=true", - "prettifydir=.", - # Just a placeholder, will be filled in asciidoctor java binary: - "revnumber=%s", - ] - -def release_notes_attributes(): - return [ - 'toc', - 'newline="\\n"', - 'asterisk="*"', - 'plus="+"', - 'caret="^"', - 'startsb="["', - 'endsb="]"', - 'tilde="~"', - 'last-update-label!', - 'stylesheet=DEFAULT', - 'linkcss=true', - ] - -def _replace_macros_impl(ctx): - cmd = [ - ctx.file._exe.path, - '--suffix', ctx.attr.suffix, - "-s", ctx.file.src.path, - "-o", ctx.outputs.out.path, - ] - if ctx.attr.searchbox: - cmd.append('--searchbox') - else: - cmd.append('--no-searchbox') - ctx.action( - inputs = [ctx.file._exe, ctx.file.src], - outputs = [ctx.outputs.out], - command = cmd, - progress_message = "Replacing macros in %s" % ctx.file.src.short_path, - ) - -_replace_macros = rule( - attrs = { - "_exe": attr.label( - default = Label("//Documentation:replace_macros.py"), - allow_single_file = True, - ), - "src": attr.label( - mandatory = True, - allow_single_file = [".txt"], - ), - "suffix": attr.string(mandatory = True), - "searchbox": attr.bool(default = True), - "out": attr.output(mandatory = True), - }, - implementation = _replace_macros_impl, -) - -def _generate_asciidoc_args(ctx): - args = [] - if ctx.attr.backend: - args.extend(["-b", ctx.attr.backend]) - revnumber = False - for attribute in ctx.attr.attributes: - if attribute.startswith("revnumber="): - revnumber = True - else: - args.extend(["-a", attribute]) - if revnumber: - args.extend([ - "--revnumber-file", ctx.file.version.path, - ]) - for src in ctx.files.srcs: - args.append(src.path) - return args - -def _invoke_replace_macros(name, src, suffix, searchbox): - fn = src - if fn.startswith(":"): - fn = src[1:] - - _replace_macros( - name = "macros_%s_%s" % (name, fn), - src = src, - out = fn + suffix, - suffix = suffix, - searchbox = searchbox, - ) - - return ":" + fn + suffix, fn.replace(".txt", ".html") - -def _asciidoc_impl(ctx): - args = [ - "--bazel", - "--in-ext", ".txt" + ctx.attr.suffix, - "--out-ext", ".html", - ] - args.extend(_generate_asciidoc_args(ctx)) - ctx.action( - inputs = ctx.files.srcs + [ctx.executable._exe, ctx.file.version], - outputs = ctx.outputs.outs, - executable = ctx.executable._exe, - arguments = args, - progress_message = "Rendering asciidoctor files for %s" % ctx.label.name, - ) - -_asciidoc_attrs = { - "_exe": attr.label( - default = Label("//lib/asciidoctor:asciidoc"), - cfg = "host", - allow_files = True, - executable = True, - ), - "srcs": attr.label_list( - mandatory = True, - allow_files = True, - ), - "version": attr.label( - default = Label("//:version.txt"), - allow_single_file = True, - ), - "suffix": attr.string(mandatory = True), - "backend": attr.string(), - "attributes": attr.string_list(), -} - -_asciidoc = rule( - attrs = _asciidoc_attrs + { - "outs": attr.output_list(mandatory = True), - }, - implementation = _asciidoc_impl, -) - -def _genasciidoc_htmlonly( - name, - srcs = [], - attributes = [], - backend = None, - searchbox = True, - **kwargs): - SUFFIX = "." + name + "_macros" - new_srcs = [] - outs = ["asciidoctor.css"] - - for src in srcs: - new_src, html_name = _invoke_replace_macros(name, src, SUFFIX, searchbox) - new_srcs.append(new_src) - outs.append(html_name) - - _asciidoc( - name = name + "_gen", - srcs = new_srcs, - suffix = SUFFIX, - backend = backend, - attributes = attributes, - outs = outs, - ) - - native.filegroup( - name = name, - data = outs, - **kwargs - ) - -def genasciidoc( - name, - srcs = [], - attributes = [], - backend = None, - searchbox = True, - resources = True, - **kwargs): - SUFFIX = "_htmlonly" - - _genasciidoc_htmlonly( - name = name + SUFFIX if resources else name, - srcs = srcs, - attributes = attributes, - backend = backend, - searchbox = searchbox, - **kwargs - ) - - if resources: - htmlonly = ":" + name + SUFFIX - native.filegroup( - name = name, - srcs = [ - htmlonly, - "//Documentation:resources", - ], - **kwargs - ) - -def _asciidoc_html_zip_impl(ctx): - args = [ - "--mktmp", - "-z", ctx.outputs.out.path, - "--in-ext", ".txt" + ctx.attr.suffix, - "--out-ext", ".html", - ] - args.extend(_generate_asciidoc_args(ctx)) - ctx.action( - inputs = ctx.files.srcs + [ctx.executable._exe, ctx.file.version], - outputs = [ctx.outputs.out], - executable = ctx.executable._exe, - arguments = args, - progress_message = "Rendering asciidoctor files for %s" % ctx.label.name, - ) - -_asciidoc_html_zip = rule( - attrs = _asciidoc_attrs, - outputs = { - "out": "%{name}.zip", - }, - implementation = _asciidoc_html_zip_impl, -) - -def _genasciidoc_htmlonly_zip( - name, - srcs = [], - attributes = [], - backend = None, - searchbox = True, - **kwargs): - SUFFIX = "." + name + "_expn" - new_srcs = [] - - for src in srcs: - new_src, _ = _invoke_replace_macros(name, src, SUFFIX, searchbox) - new_srcs.append(new_src) - - _asciidoc_html_zip( - name = name, - srcs = new_srcs, - suffix = SUFFIX, - backend = backend, - attributes = attributes, - ) - -def _asciidoc_zip_impl(ctx): - tmpdir = ctx.outputs.out.path + "_tmpdir" - cmd = [ - "p=$PWD", - "rm -rf %s" % tmpdir, - "mkdir %s" % tmpdir, - "unzip -q %s -d %s/%s/" % (ctx.file.src.path, tmpdir, ctx.attr.directory), - ] - for r in ctx.files.resources: - if r.path == r.short_path: - cmd.append("tar -cf- %s | tar -C %s -xf-" % (r.short_path, tmpdir)) - else: - parent = r.path[:-len(r.short_path)] - cmd.append( - "tar -C %s -cf- %s | tar -C %s -xf-" % (parent, r.short_path, tmpdir)) - cmd.extend([ - "cd %s" % tmpdir, - "zip -qr $p/%s *" % ctx.outputs.out.path, - ]) - ctx.action( - inputs = [ctx.file.src] + ctx.files.resources, - outputs = [ctx.outputs.out], - command = " && ".join(cmd), - progress_message = - "Generating asciidoctor zip file %s" % ctx.outputs.out.short_path, - ) - -_asciidoc_zip = rule( - attrs = { - "src": attr.label( - mandatory = True, - allow_single_file = [".zip"], - ), - "resources": attr.label_list( - mandatory = True, - allow_files = True, - ), - "directory": attr.string(mandatory = True), - }, - outputs = { - "out": "%{name}.zip", - }, - implementation = _asciidoc_zip_impl, -) - -def genasciidoc_zip( - name, - srcs = [], - attributes = [], - directory = None, - backend = None, - searchbox = True, - resources = True, - **kwargs): - SUFFIX = "_htmlonly" - - _genasciidoc_htmlonly_zip( - name = name + SUFFIX if resources else name, - srcs = srcs, - attributes = attributes, - backend = backend, - searchbox = searchbox, - **kwargs - ) - - if resources: - htmlonly = ":" + name + SUFFIX - _asciidoc_zip( - name = name, - src = htmlonly, - resources = ["//Documentation:resources"], - directory = directory, - ) diff --git a/starlark/src/syntax/testcases/asm.bzl b/starlark/src/syntax/testcases/asm.bzl deleted file mode 100644 index 51083d1e..00000000 --- a/starlark/src/syntax/testcases/asm.bzl +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) -load("@io_bazel_rules_go//go/private:common.bzl", - "to_set", - "sets", -) - -def emit_asm(ctx, go_toolchain, - source = None, - hdrs = [], - out_obj = None, - mode = None): - """See go/toolchains.rst#asm for full documentation.""" - - if source == None: fail("source is a required parameter") - if out_obj == None: fail("out_obj is a required parameter") - if mode == None: fail("mode is a required parameter") - - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - includes = to_set([stdlib.root_file.dirname + "/pkg/include"]) - includes = sets.union(includes, [f.dirname for f in hdrs]) - inputs = hdrs + stdlib.files + [source] - - asm_args = ctx.actions.args() - add_go_env(asm_args, stdlib, mode) - asm_args.add(["-o", out_obj, "-trimpath", "."]) - asm_args.add(includes, before_each="-I") - asm_args.add(source.path) - ctx.actions.run( - inputs = inputs, - outputs = [out_obj], - mnemonic = "GoAsmCompile", - executable = go_toolchain.tools.asm, - arguments = [asm_args], - ) diff --git a/starlark/src/syntax/testcases/aspect.bzl b/starlark/src/syntax/testcases/aspect.bzl deleted file mode 100644 index 7d6ac26b..00000000 --- a/starlark/src/syntax/testcases/aspect.bzl +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "split_srcs", - "to_set", - "sets", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", - "mode_string", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", - "GoSourceList", - "GoArchive", - "GoArchiveData", - "sources", -) -load("@io_bazel_rules_go//go/platform:list.bzl", - "GOOS", - "GOARCH", -) - -GoAspectProviders = provider() - -def get_archive(dep): - if GoAspectProviders in dep: - return dep[GoAspectProviders].archive - return dep[GoArchive] - -def get_source_list(dep): - if GoAspectProviders in dep: - return dep[GoAspectProviders].source - return dep[GoSourceList] - - -def collect_src(ctx, aspect=False, srcs = None, deps=None, want_coverage = None): - rule = ctx.rule if aspect else ctx - if srcs == None: - srcs = rule.files.srcs - if deps == None: - deps = rule.attr.deps - if want_coverage == None: - want_coverage = ctx.coverage_instrumented() and not rule.label.name.endswith("~library~") - return sources.merge([get_source_list(s) for s in rule.attr.embed] + [sources.new( - srcs = srcs, - deps = deps, - gc_goopts = rule.attr.gc_goopts, - runfiles = ctx.runfiles(collect_data = True), - want_coverage = want_coverage, - )]) - -def _go_archive_aspect_impl(target, ctx): - mode = get_mode(ctx, ctx.rule.attr._go_toolchain_flags) - if GoArchive not in target: - if GoSourceList in target and hasattr(ctx.rule.attr, "embed"): - return [GoAspectProviders( - source = collect_src(ctx, aspect=True), - )] - return [] - goarchive = target[GoArchive] - if goarchive.mode == mode: - return [GoAspectProviders( - source = target[GoSourceList], - archive = goarchive, - )] - - source = collect_src(ctx, aspect=True) - for dep in ctx.rule.attr.deps: - a = get_archive(dep) - if a.mode != mode: fail("In aspect on {} found {} is {} expected {}".format(ctx.label, a.data.importpath, mode_string(a.mode), mode_string(mode))) - - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - goarchive = go_toolchain.actions.archive(ctx, - go_toolchain = go_toolchain, - mode = mode, - importpath = target[GoLibrary].package.importpath, - source = source, - importable = True, - ) - return [GoAspectProviders( - source = source, - archive = goarchive, - )] - -go_archive_aspect = aspect( - _go_archive_aspect_impl, - attr_aspects = ["deps", "embed"], - attrs = { - "pure": attr.string(values=["on", "off", "auto"]), - "static": attr.string(values=["on", "off", "auto"]), - "msan": attr.string(values=["on", "off", "auto"]), - "race": attr.string(values=["on", "off", "auto"]), - "goos": attr.string(values=GOOS.keys() + ["auto"], default="auto"), - "goarch": attr.string(values=GOARCH.keys() + ["auto"], default="auto"), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) diff --git a/starlark/src/syntax/testcases/bazel_hash_dict.bzl b/starlark/src/syntax/testcases/bazel_hash_dict.bzl deleted file mode 100644 index 7d15b9b7..00000000 --- a/starlark/src/syntax/testcases/bazel_hash_dict.bzl +++ /dev/null @@ -1,36 +0,0 @@ -# Automatically generated by create_hash_dict '--output=tools/bazel_hash_dict.bzl' - -BAZEL_HASH_DICT = { - '0.5.0': { - 'darwin-x86_64': '5ccdb953dc2b8d81f15ea185f47ecfc7ddc39ad0c6f71e561ec7398377c8cc1a', - 'linux-x86_64': 'd026e581a860f305791f3ba839462ff02b1929858b37d1db2f27af212be73741', - }, - '0.5.1': { - 'darwin-x86_64': '8d92a67a204abdd84376a4265d372e4a9bfc31872e825c028ce261d20bad352a', - 'linux-x86_64': '27bc739082a241e2f7f1a89fbaea3306e3edc40d930472c6789d49dc17fde3d2', - }, - '0.5.2': { - 'darwin-x86_64': '31b92de24cd251923b09773c4c20bcf2014390d930c6a3b7f043558975743510', - 'linux-x86_64': '9a1b6fff69ba8aff460bd1883dd51702b7ad0e4c979c5dcab75baf65027684ef', - }, - '0.5.3': { - 'darwin-x86_64': '4bbcf198c9daeab8597f748aead68e10bcb3ce720fb8e3d474b2e72825c23fb0', - 'linux-x86_64': '7545e5164450c8777aca07903328c9744f930bcba51f2a10fe54f3d1ece49097', - }, - '0.5.4': { - 'darwin-x86_64': '12140eba1de18ade8863f09aa6365e5f4fc99e9fc94f74b31f7a258239b24515', - 'linux-x86_64': '3491f4fafa5fe45f82896cd0cec7edc94c7e4daa6dee2fd8410c88d13b110f2b', - }, - '0.6.0': { - 'darwin-x86_64': '331fb70586ca7f775ad9f0fa262dcce84252263f8654517ac025003cdc86967e', - 'linux-x86_64': '9e77b400c062ae19c89839ee7cba19eea882ee007122d040520bf6024cdf704c', - }, - '0.6.1': { - 'darwin-x86_64': '56feb62d8fc95e8a4f5fe21d9938d47cdda3a066f87111556a194c5adc571ee8', - 'linux-x86_64': '9846ea8c7a7bf448f9895741dea3b018b3f7846aab4a9cac0c9a3024a68e4d37', - }, - '0.7.0': { - 'darwin-x86_64': '036b8281fb240f5f7beb7f76ca4290ece8177dcae1d891e5b34123b5391f249a', - 'linux-x86_64': '5dfc7bf3737b1b9ade4709f2b7348d71e0e81f44e59f2ddd44bc04879e7ddab0', - }, -} diff --git a/starlark/src/syntax/testcases/bazel_integration_test.bzl b/starlark/src/syntax/testcases/bazel_integration_test.bzl deleted file mode 100644 index 44d9392d..00000000 --- a/starlark/src/syntax/testcases/bazel_integration_test.bzl +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Integration test framework for using Bazel -load("//tools:common.bzl", "BAZEL_VERSIONS") -load("//tools:repositories.bzl", "bazel_binary", "bazel_binaries") -load("//tools:bazel_java_integration_test.bzl", "bazel_java_integration_test", "bazel_java_integration_test_deps") -load("//tools:bazel_py_integration_test.bzl", "bazel_py_integration_test") -load("//go:bazel_integration_test.bzl", "bazel_go_integration_test") diff --git a/starlark/src/syntax/testcases/bazel_java_integration_test.bzl b/starlark/src/syntax/testcases/bazel_java_integration_test.bzl deleted file mode 100644 index 8ae2a10f..00000000 --- a/starlark/src/syntax/testcases/bazel_java_integration_test.bzl +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Java integration test framework for using Bazel -load(":common.bzl", "BAZEL_VERSIONS") -load(":repositories.bzl", "bazel_binaries") - -def _index(lst, el): - return lst.index(el) if el in lst else -1 - - -def _java_package(): - # Adaptation of the java class finding library from Bazel. - path = native.package_name() - segments = path.split("/") - roots = [segments.index(i) - for i in ["java", "javatests", "src"] - if i in segments] - if not len(roots): - return ".".join(segments) - idx = min(roots) - is_src = segments[idx] == "src" - check_mvn_idx = idx if is_src else -1 - if idx == 0 or is_src: - # Check for a nested root directory. - end_segments = segments[idx + 1:-1] - src_segment = end_segments.index("src") if "src" in end_segments else -1 - if is_src: - end_segments_idx = [end_segments.index(i) - for i in ["java", "javatests", "src"] - if i in end_segments] - if end_segments_idx: - src_segment = min(end_segments_idx) - if src_segment >= 0: - next = end_segments[src_segment+1] - if next in ["com", "org", "net"]: - # Check for common first element of java package, to avoid false - # positives. - idx += src_segment + 1 - elif next in ["main", "test"]: - # Also accept maven style src/(main|test)/(java|resources). - check_mvn_idx = idx + src_segment + 1 - # Check for (main|test)/(java|resources) after /src/. - if check_mvn_idx >= 0 and check_mvn_idx < len(segments) - 2: - if segments[check_mvn_idx + 1] in ["main", "test"] and segments[check_mvn_idx + 2] in ["java", "resources"]: - idx = check_mvn_idx + 2 - if idx < 0: - return ".".join(segments) - return ".".join(segments[idx+1:]) - - -def bazel_java_integration_test(name, srcs=[], deps=None, runtime_deps=[], - jvm_flags=[], test_class=None, - versions=BAZEL_VERSIONS, **kwargs): - """A wrapper around java_test that create several java tests, one per version - of Bazel. - - Args: - versions: list of version of bazel to create a test for. Each test - will be named `/bazel`. - See java_test for the other arguments. - """ - if not test_class: - test_class = "%s.%s" % (_java_package(), name) - add_deps = [ - str(Label("//java/build/bazel/tests/integration")), - "@org_hamcrest_core//jar", - "@org_junit//jar", - ] - if srcs: - deps = (deps or []) + add_deps - else: - runtime_deps = runtime_deps + add_deps - for version in versions: - native.java_test( - name = "%s/bazel%s" % (name, version), - jvm_flags = ["-Dbazel.version=" + version], - srcs = srcs, - test_class = test_class, - deps = deps, - runtime_deps = runtime_deps, - **kwargs) - native.test_suite( - name = name, - tests = [":%s/bazel%s" % (name, version) for version in versions]) - -def bazel_java_integration_test_deps(versions = BAZEL_VERSIONS): - bazel_binaries(versions) - - # TODO(dmarting): Use http_file and relies on a mirror instead of maven_jar - native.maven_jar( - name = "com_google_guava", - artifact = "com.google.guava:guava:jar:21.0", - ) - - native.maven_jar( - name = "org_hamcrest_core", - artifact = "org.hamcrest:hamcrest-core:jar:1.3", - ) - - native.maven_jar( - name = "org_junit", - artifact = "junit:junit:jar:4.11", - ) diff --git a/starlark/src/syntax/testcases/bazel_py_integration_test.bzl b/starlark/src/syntax/testcases/bazel_py_integration_test.bzl deleted file mode 100644 index 6c84bd44..00000000 --- a/starlark/src/syntax/testcases/bazel_py_integration_test.bzl +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Python integration test framework for using Bazel -load(":common.bzl", "BAZEL_VERSIONS") -load(":repositories.bzl", "bazel_binaries") - -def bazel_py_integration_test(name, srcs, main=None, deps=[], versions=BAZEL_VERSIONS, **kwargs): - """A wrapper around py_test that create several python tests, one per version - of Bazel. - - Args: - versions: list of version of bazel to create a test for. Each test - will be named `/bazel`. - See py_test for the other arguments. - """ - if not main and len(srcs) == 1: - main = srcs[0] - for version in versions: - add_deps = [ - str(Label("//bazel_integration_test:python")), - str(Label("//bazel_integration_test:python_version_" + version)), - ] - native.py_test( - name = "%s/bazel%s" % (name, version), - srcs = srcs, - main = main, - deps = deps + add_deps, - **kwargs) - native.test_suite( - name = name, - tests = [":%s/bazel%s" % (name, version) for version in versions]) diff --git a/starlark/src/syntax/testcases/bazel_tests.bzl b/starlark/src/syntax/testcases/bazel_tests.bzl deleted file mode 100644 index 5241c971..00000000 --- a/starlark/src/syntax/testcases/bazel_tests.bzl +++ /dev/null @@ -1,249 +0,0 @@ -load("@io_bazel_rules_go//go/private:go_repository.bzl", "env_execute") -load("@io_bazel_rules_go//go/private:common.bzl", "declare_file") - -# _bazelrc is the bazel.rc file that sets the default options for tests -_bazelrc = """ -build --verbose_failures -build --sandbox_debug -build --test_output=errors -build --spawn_strategy=standalone -build --genrule_strategy=standalone - -test --test_strategy=standalone - -build:isolate -- -build:fetch --fetch=True -""" - -# _basic_workspace is the content appended to all test workspace files -# it contains the calls required to make the go rules work -_basic_workspace = """ -load("@io_bazel_rules_go//go:def.bzl", "go_rules_dependencies", "go_register_toolchains") -load("@io_bazel_rules_go//proto:def.bzl", "proto_register_toolchains") -go_rules_dependencies() -proto_register_toolchains() -""" - -# _bazel_test_script_template is hte template for the bazel invocation script -_bazel_test_script_template = """ -echo running in {work_dir} -unset TEST_TMPDIR -RULES_GO_OUTPUT={output} - -mkdir -p {work_dir} -mkdir -p {cache_dir} -cp -f {workspace} {work_dir}/WORKSPACE -cp -f {build} {work_dir}/BUILD.bazel -cd {work_dir} - -{bazel} --bazelrc {bazelrc} --nomaster_blazerc {command} --experimental_repository_cache={cache_dir} --config {config} {args} {target} >& bazel-output.txt -result=$? - -{check} - -if (( $result != 0 )); then - echo "Bazel output: $( $@" - - native.genrule( - name = name, - srcs = srcs, - outs = [name + ".xml"], - cmd = cmd, - tools = [stamp_tool], - **kwargs) - -def product_build_txt(name, **kwargs): - """Produces a product-build.txt file with the build number. - - Args: - name: name of this target - **kwargs: Any additional arguments to pass to the final target. - """ - application_info_jar = "//intellij_platform_sdk:application_info_jar" - application_info_name = "//intellij_platform_sdk:application_info_name" - product_build_txt_tool = "//build_defs:product_build_txt" - - args = [ - "./$(location {product_build_txt_tool})", - "--application_info_jar=$(location {application_info_jar})", - "--application_info_name=$(location {application_info_name})", - ] - cmd = " ".join(args).format( - application_info_jar=application_info_jar, - application_info_name=application_info_name, - product_build_txt_tool=product_build_txt_tool, - ) + "> $@" - native.genrule( - name = name, - srcs = [application_info_jar, application_info_name], - outs = ["product-build.txt"], - cmd = cmd, - tools = [product_build_txt_tool], - **kwargs) - -def api_version_txt(name, **kwargs): - """Produces an api_version.txt file with the api version, including the product code. - - Args: - name: name of this target - **kwargs: Any additional arguments to pass to the final target. - """ - application_info_jar = "//intellij_platform_sdk:application_info_jar" - application_info_name = "//intellij_platform_sdk:application_info_name" - api_version_txt_tool = "//build_defs:api_version_txt" - - args = [ - "./$(location {api_version_txt_tool})", - "--application_info_jar=$(location {application_info_jar})", - "--application_info_name=$(location {application_info_name})", - ] - cmd = " ".join(args).format( - application_info_jar=application_info_jar, - application_info_name=application_info_name, - api_version_txt_tool=api_version_txt_tool, - ) + "> $@" - native.genrule( - name = name, - srcs = [application_info_jar, application_info_name], - outs = [name + ".txt"], - cmd = cmd, - tools = [api_version_txt_tool], - **kwargs) - -def repackaged_jar(name, deps, rules, **kwargs): - """Repackages classes in a jar, to avoid collisions in the classpath. - - Args: - name: the name of this target - deps: The dependencies repackage - rules: the rules to apply in the repackaging - Do not repackage: - - com.google.net.** because that has JNI files which use - FindClass(JNIEnv *, const char *) with hard-coded native string - literals that jarjar doesn't rewrite. - - com.google.errorprone packages (rewriting will throw off blaze build). - **kwargs: Any additional arguments to pass to the final target. - """ - java_binary_name = name + "_orig" - out = name + ".jar" - native.java_binary( - name = java_binary_name, - create_executable = 0, - stamp = 0, - runtime_deps = deps) - _repackaged_jar(name, java_binary_name, out, rules, **kwargs) - -def _repackaged_jar(name, src_rule, out, rules, **kwargs): - """Repackages classes in a jar, to avoid collisions in the classpath.""" - repackage_tool = "@jarjar//jar" - deploy_jar = "{src_rule}_deploy.jar".format(src_rule=src_rule) - script_lines = [] - script_lines.append("echo >> /tmp/repackaged_rule.txt") - for rule in rules: - script_lines.append("echo 'rule {rule}' >> /tmp/repackaged_rule.txt;".format(rule=rule)) - script_lines.append(" ".join([ - "$(location {repackage_tool})", - "process /tmp/repackaged_rule.txt", - "$(location {deploy_jar})", - "$@", - ]).format( - repackage_tool = repackage_tool, - deploy_jar = deploy_jar, - )) - genrule_name = name + "_repackaged" - native.genrule( - name = genrule_name, - srcs = [deploy_jar], - outs = [out], - tools = [repackage_tool], - cmd = "\n".join(script_lines), - ) - native.java_import( - name = name, - jars = [out], - **kwargs) - -def beta_gensignature(name, srcs, stable, stable_version, beta_version): - if stable_version == beta_version: - native.alias(name = name, actual = stable) - else: - native.gensignature(name = name, srcs = srcs) diff --git a/starlark/src/syntax/testcases/bundle.bzl b/starlark/src/syntax/testcases/bundle.bzl deleted file mode 100644 index fa11b12f..00000000 --- a/starlark/src/syntax/testcases/bundle.bzl +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rule for bundling Docker images into a tarball.""" - -load(":label.bzl", _string_to_label="string_to_label") -load(":layers.bzl", - _assemble_image="assemble", - _get_layers="get_from_target", - _incr_load="incremental_load", - _layer_tools="tools") -load(":list.bzl", "reverse") - - -def _docker_bundle_impl(ctx): - """Implementation for the docker_bundle rule.""" - - # Compute the set of layers from the image_targes. - image_target_dict = _string_to_label( - ctx.attr.image_targets, ctx.attr.image_target_strings) - - seen_names = [] - layers = [] - for image in ctx.attr.image_targets: - # TODO(mattmoor): Add support for naked tarballs. - for layer in _get_layers(ctx, image): - if layer["name"].path in seen_names: - continue - seen_names.append(layer["name"].path) - layers.append(layer) - - images = dict() - for unresolved_tag in ctx.attr.images: - # Allow users to put make variables into the tag name. - tag = ctx.expand_make_variables("images", unresolved_tag, {}) - - target = ctx.attr.images[unresolved_tag] - target = image_target_dict[target] - images[tag] = _get_layers(ctx, target)[0] - - _incr_load(ctx, layers, images, ctx.outputs.executable) - - _assemble_image(ctx, reverse(layers), { - # Create a new dictionary with the same keyspace that - # points to the name of the layer. - k: images[k]["name"] - for k in images - }, ctx.outputs.out) - - runfiles = ctx.runfiles( - files = ([l["name"] for l in layers] + - [l["id"] for l in layers] + - [l["layer"] for l in layers])) - - return struct(runfiles = runfiles, - files = depset()) - - -docker_bundle_ = rule( - implementation = _docker_bundle_impl, - attrs = { - "images": attr.string_dict(), - # Implicit dependencies. - "image_targets": attr.label_list(allow_files=True), - "image_target_strings": attr.string_list(), - } + _layer_tools, - outputs = { - "out": "%{name}.tar", - }, - executable = True) - - -# Produces a new docker image tarball compatible with 'docker load', which -# contains the N listed 'images', each aliased with their key. -# -# Example: -# docker_bundle( -# name = "foo", -# images = { -# "ubuntu:latest": ":blah", -# "foo.io/bar:canary": "//baz:asdf", -# } -# ) -def docker_bundle(**kwargs): - """Package several docker images into a single tarball. - - Args: - **kwargs: See above. - """ - for reserved in ["image_targets", "image_target_strings"]: - if reserved in kwargs: - fail("reserved for internal use by docker_bundle macro", attr=reserved) - - if "images" in kwargs: - kwargs["image_targets"] = kwargs["images"].values() - kwargs["image_target_strings"] = kwargs["images"].values() - - docker_bundle_(**kwargs) diff --git a/starlark/src/syntax/testcases/bzl.bzl b/starlark/src/syntax/testcases/bzl.bzl deleted file mode 100644 index 1028ac9f..00000000 --- a/starlark/src/syntax/testcases/bzl.bzl +++ /dev/null @@ -1,15 +0,0 @@ -load(":workspace.bzl", _mvn_deps="maven_dependencies") - -def _declare_maven(item): - sha = item.get("sha1") - kwargs = { - "name": item["name"], - "artifact": item["artifact"], - "repository": item["repository"], - } - if sha != None: - kwargs["sha1"] = sha - native.maven_jar(**kwargs) - -def maven_dependencies(): - _mvn_deps(_declare_maven) diff --git a/starlark/src/syntax/testcases/cc.bzl b/starlark/src/syntax/testcases/cc.bzl deleted file mode 100644 index 06a51838..00000000 --- a/starlark/src/syntax/testcases/cc.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# !!!! THIS IS A GENERATED FILE TO NOT EDIT IT BY HAND !!!! -# -# To regenerate this file, run ./update_deps.sh from the root of the -# git repository. - -DIGESTS = { - # "gcr.io/distroless/cc:debug" circa 2017-10-10 04:09 +0000 - "debug": "sha256:21e38594fa41bf10526184c097f3c32ce10932c0944ac3780534bf2ad2c0d320", - # "gcr.io/distroless/cc:latest" circa 2017-10-10 04:09 +0000 - "latest": "sha256:69efccbe4d1318955a884a7b85c192804d0d5df7da00cc5be193a177f6f5cee1", -} diff --git a/starlark/src/syntax/testcases/cc_configure.bzl b/starlark/src/syntax/testcases/cc_configure.bzl deleted file mode 100644 index 9d6d29fc..00000000 --- a/starlark/src/syntax/testcases/cc_configure.bzl +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for configuring the C++ toolchain (experimental).""" - - -load("@bazel_tools//tools/cpp:windows_cc_configure.bzl", "configure_windows_toolchain") -load("@bazel_tools//tools/cpp:osx_cc_configure.bzl", "configure_osx_toolchain") -load("@bazel_tools//tools/cpp:unix_cc_configure.bzl", "configure_unix_toolchain") -load("@bazel_tools//tools/cpp:lib_cc_configure.bzl", "get_cpu_value") - -def _impl(repository_ctx): - repository_ctx.symlink( - Label("@bazel_tools//tools/cpp:dummy_toolchain.bzl"), "dummy_toolchain.bzl") - cpu_value = get_cpu_value(repository_ctx) - if cpu_value == "freebsd": - # This is defaulting to the static crosstool, we should eventually - # autoconfigure this platform too. Theorically, FreeBSD should be - # straightforward to add but we cannot run it in a docker container so - # skipping until we have proper tests for FreeBSD. - repository_ctx.symlink(Label("@bazel_tools//tools/cpp:CROSSTOOL"), "CROSSTOOL") - repository_ctx.symlink(Label("@bazel_tools//tools/cpp:BUILD.static"), "BUILD") - elif cpu_value == "x64_windows": - configure_windows_toolchain(repository_ctx) - elif cpu_value == "darwin": - configure_osx_toolchain(repository_ctx) - else: - configure_unix_toolchain(repository_ctx, cpu_value) - - -cc_autoconf = repository_rule( - implementation=_impl, - environ = [ - "ABI_LIBC_VERSION", - "ABI_VERSION", - "BAZEL_COMPILER", - "BAZEL_HOST_SYSTEM", - "BAZEL_PYTHON", - "BAZEL_SH", - "BAZEL_TARGET_CPU", - "BAZEL_TARGET_LIBC", - "BAZEL_TARGET_SYSTEM", - "BAZEL_VC", - "BAZEL_VS", - "CC", - "CC_CONFIGURE_DEBUG", - "CC_TOOLCHAIN_NAME", - "CPLUS_INCLUDE_PATH", - "CUDA_COMPUTE_CAPABILITIES", - "CUDA_PATH", - "HOMEBREW_RUBY_PATH", - "NO_WHOLE_ARCHIVE_OPTION", - "USE_DYNAMIC_CRT", - "USE_MSVC_WRAPPER", - "SYSTEMROOT", - "VS90COMNTOOLS", - "VS100COMNTOOLS", - "VS110COMNTOOLS", - "VS120COMNTOOLS", - "VS140COMNTOOLS"]) - - -def cc_configure(): - """A C++ configuration rules that generate the crosstool file.""" - cc_autoconf(name="local_config_cc") - native.bind(name="cc_toolchain", actual="@local_config_cc//:toolchain") diff --git a/starlark/src/syntax/testcases/cgo.bzl b/starlark/src/syntax/testcases/cgo.bzl deleted file mode 100644 index 1d9d3ad7..00000000 --- a/starlark/src/syntax/testcases/cgo.bzl +++ /dev/null @@ -1,380 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", - "split_srcs", - "join_srcs", - "pkg_dir", - "sets", - "to_set", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", - "sources", -) -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) - -_CgoCodegen = provider() - -def _mangle(src): - src_stem, _, src_ext = src.path.rpartition('.') - mangled_stem = src_stem.replace('/', '_') - return mangled_stem, src_ext - -def _c_filter_options(options, blacklist): - return [opt for opt in options - if not any([opt.startswith(prefix) for prefix in blacklist])] - -def _select_archive(files): - """Selects a single archive from a list of files produced by a - static cc_library. - - In some configurations, cc_library can produce multiple files, and the - order isn't guaranteed, so we can't simply pick the first one. - """ - # list of file extensions in descending order or preference. - exts = [".pic.lo", ".lo", ".a"] - for ext in exts: - for f in files: - if f.basename.endswith(ext): - return f - fail("cc_library did not produce any files") - -def _cgo_codegen_impl(ctx): - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - if not stdlib.cgo_tools: - fail("Go toolchain does not support cgo") - linkopts = ctx.attr.linkopts[:] - copts = stdlib.cgo_tools.c_options + ctx.attr.copts - deps = depset([], order="topological") - cgo_export_h = declare_file(ctx, path="_cgo_export.h") - cgo_export_c = declare_file(ctx, path="_cgo_export.c") - cgo_main = declare_file(ctx, path="_cgo_main.c") - cgo_types = declare_file(ctx, path="_cgo_gotypes.go") - out_dir = cgo_main.dirname - - cc = stdlib.cgo_tools.compiler_executable - args = ctx.actions.args() - add_go_env(args, stdlib, mode) - args.add(["-cc", str(cc), "-objdir", out_dir]) - - c_outs = [cgo_export_h, cgo_export_c] - go_outs = [cgo_types] - - source = split_srcs(ctx.files.srcs) - for src in source.headers: - copts.extend(['-iquote', src.dirname]) - for src in source.go: - mangled_stem, src_ext = _mangle(src) - gen_file = declare_file(ctx, path=mangled_stem + ".cgo1."+src_ext) - gen_c_file = declare_file(ctx, path=mangled_stem + ".cgo2.c") - go_outs.append(gen_file) - c_outs.append(gen_c_file) - args.add(["-src", gen_file.path + "=" + src.path]) - for src in source.asm: - mangled_stem, src_ext = _mangle(src) - gen_file = declare_file(ctx, path=mangled_stem + ".cgo1."+src_ext) - go_outs.append(gen_file) - args.add(["-src", gen_file.path + "=" + src.path]) - for src in source.c: - mangled_stem, src_ext = _mangle(src) - gen_file = declare_file(ctx, path=mangled_stem + ".cgo1."+src_ext) - c_outs.append(gen_file) - args.add(["-src", gen_file.path + "=" + src.path]) - - inputs = sets.union(ctx.files.srcs, go_toolchain.data.crosstool, stdlib.files, - *[d.cc.transitive_headers for d in ctx.attr.deps]) - deps = sets.union(deps, *[d.cc.libs for d in ctx.attr.deps]) - runfiles = ctx.runfiles(collect_data = True) - for d in ctx.attr.deps: - runfiles = runfiles.merge(d.data_runfiles) - copts.extend(['-D' + define for define in d.cc.defines]) - for inc in d.cc.include_directories: - copts.extend(['-I', inc]) - for inc in d.cc.quote_include_directories: - copts.extend(['-iquote', inc]) - for inc in d.cc.system_include_directories: - copts.extend(['-isystem', inc]) - for lib in d.cc.libs: - if lib.basename.startswith('lib') and lib.basename.endswith('.so'): - linkopts.extend(['-L', lib.dirname, '-l', lib.basename[3:-3]]) - else: - linkopts.append(lib.path) - linkopts.extend(d.cc.link_flags) - - # The first -- below is to stop the cgo from processing args, the - # second is an actual arg to forward to the underlying go tool - args.add(["--", "--"]) - args.add(copts) - ctx.actions.run( - inputs = inputs, - outputs = c_outs + go_outs + [cgo_main], - mnemonic = "CGoCodeGen", - progress_message = "CGoCodeGen %s" % ctx.label, - executable = go_toolchain.tools.cgo, - arguments = [args], - env = { - "CGO_LDFLAGS": " ".join(linkopts), - }, - ) - - return [ - _CgoCodegen( - go_files = to_set(go_outs), - main_c = to_set([cgo_main]), - deps = deps.to_list(), - exports = [cgo_export_h], - ), - DefaultInfo( - files = depset(), - runfiles = runfiles, - ), - OutputGroupInfo( - go_files = to_set(go_outs), - input_go_files = to_set(source.go + source.asm), - c_files = sets.union(c_outs, source.headers), - main_c = to_set([cgo_main]), - ), - ] - -_cgo_codegen = rule( - _cgo_codegen_impl, - attrs = { - "srcs": attr.label_list(allow_files = True), - "deps": attr.label_list( - allow_files = False, - providers = ["cc"], - ), - "copts": attr.string_list(), - "linkopts": attr.string_list(), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) - -def _cgo_import_impl(ctx): - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - out = declare_file(ctx, ext=".go") - args = ctx.actions.args() - add_go_env(args, stdlib, mode) - args.add([ - "-dynout", out, - "-dynimport", ctx.file.cgo_o, - "-src", ctx.files.sample_go_srcs[0], - ]) - ctx.actions.run( - inputs = [ - ctx.file.cgo_o, - ctx.files.sample_go_srcs[0], - ] + stdlib.files, - outputs = [out], - executable = go_toolchain.tools.cgo, - arguments = [args], - mnemonic = "CGoImportGen", - ) - return struct( - files = depset([out]), - ) - -_cgo_import = rule( - _cgo_import_impl, - attrs = { - "cgo_o": attr.label( - allow_files = True, - single_file = True, - ), - "sample_go_srcs": attr.label_list(allow_files = True), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) -"""Generates symbol-import directives for cgo - -Args: - cgo_o: The loadable object to extract dynamic symbols from. - sample_go_src: A go source which is compiled together with the generated file. - The generated file will have the same Go package name as this file. - out: Destination of the generated codes. -""" - -def _pure(ctx, mode): - return mode.pure - -def _not_pure(ctx, mode): - return not mode.pure - -def _cgo_collect_info_impl(ctx): - codegen = ctx.attr.codegen[_CgoCodegen] - runfiles = ctx.runfiles(collect_data = True) - runfiles = runfiles.merge(ctx.attr.codegen.data_runfiles) - return [ - DefaultInfo(files = depset(), runfiles = runfiles), - sources.merge([ - sources.new( - srcs = ctx.files.gen_go_srcs, - runfiles = runfiles, - cgo_deps = ctx.attr.codegen[_CgoCodegen].deps, - cgo_exports = ctx.attr.codegen[_CgoCodegen].exports, - cgo_archive = _select_archive(ctx.files.lib), - want_coverage = ctx.coverage_instrumented(), #TODO: not all sources? - exclude = _pure, - ), - sources.new( - srcs = ctx.files.input_go_srcs, - runfiles = runfiles, - want_coverage = ctx.coverage_instrumented(), #TODO: not all sources? - exclude = _not_pure, - ), - ]), - ] - -_cgo_collect_info = rule( - _cgo_collect_info_impl, - attrs = { - "codegen": attr.label(mandatory = True, providers = [_CgoCodegen]), - "input_go_srcs": attr.label_list(mandatory = True, allow_files = [".go"]), - "gen_go_srcs": attr.label_list(mandatory = True, allow_files = [".go"]), - "lib": attr.label(mandatory = True, providers = ["cc"]), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, -) -"""No-op rule that collects information from _cgo_codegen and cc_library -info into a GoSourceList provider for easy consumption.""" - -def setup_cgo_library(name, srcs, cdeps, copts, clinkopts): - # Apply build constraints to source files (both Go and C) but not to header - # files. Separate filtered Go and C sources. - - # Run cgo on the filtered Go files. This will split them into pure Go files - # and pure C files, plus a few other glue files. - base_dir = pkg_dir( - "external/" + REPOSITORY_NAME[1:] if len(REPOSITORY_NAME) > 1 else "", - PACKAGE_NAME) - copts = copts + ["-I", base_dir] - - cgo_codegen_name = name + ".cgo_codegen" - _cgo_codegen( - name = cgo_codegen_name, - srcs = srcs, - deps = cdeps, - copts = copts, - linkopts = clinkopts, - visibility = ["//visibility:private"], - ) - - select_go_files = name + ".select_go_files" - native.filegroup( - name = select_go_files, - srcs = [cgo_codegen_name], - output_group = "go_files", - visibility = ["//visibility:private"], - ) - - select_input_go_files = name + ".select_input_go_files" - native.filegroup( - name = select_input_go_files, - srcs = [cgo_codegen_name], - output_group = "input_go_files", - visibility = ["//visibility:private"], - ) - - select_c_files = name + ".select_c_files" - native.filegroup( - name = select_c_files, - srcs = [cgo_codegen_name], - output_group = "c_files", - visibility = ["//visibility:private"], - ) - - select_main_c = name + ".select_main_c" - native.filegroup( - name = select_main_c, - srcs = [cgo_codegen_name], - output_group = "main_c", - visibility = ["//visibility:private"], - ) - - # Compile C sources and generated files into a library. This will be linked - # into binaries that depend on this cgo_library. It will also be used - # in _cgo_.o. - platform_copts = select({ - "@io_bazel_rules_go//go/platform:darwin_amd64": [], - "@io_bazel_rules_go//go/platform:windows_amd64": ["-mthreads"], - "//conditions:default": ["-pthread"], - }) - platform_linkopts = platform_copts - - cgo_lib_name = name + ".cgo_c_lib" - native.cc_library( - name = cgo_lib_name, - srcs = [select_c_files], - deps = cdeps, - copts = copts + platform_copts + [ - # The generated thunks often contain unused variables. - "-Wno-unused-variable", - ], - linkopts = clinkopts + platform_linkopts, - linkstatic = 1, - # _cgo_.o needs all symbols because _cgo_import needs to see them. - alwayslink = 1, - visibility = ["//visibility:private"], - ) - - # Create a loadable object with no undefined references. cgo reads this - # when it generates _cgo_import.go. - cgo_o_name = name + "._cgo_.o" - native.cc_binary( - name = cgo_o_name, - srcs = [select_main_c], - deps = cdeps + [cgo_lib_name], - copts = copts, - linkopts = clinkopts, - visibility = ["//visibility:private"], - ) - - # Create a Go file which imports symbols from the C library. - cgo_import_name = name + ".cgo_import" - _cgo_import( - name = cgo_import_name, - cgo_o = cgo_o_name, - sample_go_srcs = [select_go_files], - visibility = ["//visibility:private"], - ) - - cgo_embed_name = name + ".cgo_embed" - _cgo_collect_info( - name = cgo_embed_name, - codegen = cgo_codegen_name, - input_go_srcs = [ - select_input_go_files, - ], - gen_go_srcs = [ - select_go_files, - cgo_import_name, - ], - lib = cgo_lib_name, - visibility = ["//visibility:private"], - ) - - return cgo_embed_name diff --git a/starlark/src/syntax/testcases/classpath.bzl b/starlark/src/syntax/testcases/classpath.bzl deleted file mode 100644 index 5b9242e1..00000000 --- a/starlark/src/syntax/testcases/classpath.bzl +++ /dev/null @@ -1,22 +0,0 @@ -def _classpath_collector(ctx): - all = set() - for d in ctx.attr.deps: - if hasattr(d, 'java'): - all += d.java.transitive_runtime_deps - all += d.java.compilation_info.runtime_classpath - elif hasattr(d, 'files'): - all += d.files - - as_strs = [c.path for c in all] - ctx.file_action(output= ctx.outputs.runtime, - content="\n".join(sorted(as_strs))) - -classpath_collector = rule( - attrs = { - "deps": attr.label_list(), - }, - outputs = { - "runtime": "%{name}.runtime_classpath", - }, - implementation = _classpath_collector, -) diff --git a/starlark/src/syntax/testcases/cm.bzl b/starlark/src/syntax/testcases/cm.bzl deleted file mode 100644 index 54d60d51..00000000 --- a/starlark/src/syntax/testcases/cm.bzl +++ /dev/null @@ -1,355 +0,0 @@ -load("//tools/bzl:genrule2.bzl", "genrule2") - -CM_CSS = [ - "lib/codemirror.css", - "addon/dialog/dialog.css", - "addon/merge/merge.css", - "addon/scroll/simplescrollbars.css", - "addon/search/matchesonscrollbar.css", - "addon/lint/lint.css", -] - -CM_JS = [ - "lib/codemirror.js", - "mode/meta.js", - "keymap/emacs.js", - "keymap/sublime.js", - "keymap/vim.js", -] - -CM_ADDONS = [ - "dialog/dialog.js", - "edit/closebrackets.js", - "edit/matchbrackets.js", - "edit/trailingspace.js", - "scroll/annotatescrollbar.js", - "scroll/simplescrollbars.js", - "search/jump-to-line.js", - "search/matchesonscrollbar.js", - "search/searchcursor.js", - "search/search.js", - "selection/mark-selection.js", - "mode/multiplex.js", - "mode/overlay.js", - "mode/simple.js", - "lint/lint.js", -] - -# Available themes must be enumerated here, -# in gerrit-extension-api/src/main/java/com/google/gerrit/extensions/client/Theme.java, -# in gerrit-gwtui/src/main/java/net/codemirror/theme/Themes.java -CM_THEMES = [ - "3024-day", - "3024-night", - "abcdef", - "ambiance", - "base16-dark", - "base16-light", - "bespin", - "blackboard", - "cobalt", - "colorforth", - "dracula", - "duotone-dark", - "duotone-light", - "eclipse", - "elegant", - "erlang-dark", - "hopscotch", - "icecoder", - "isotope", - "lesser-dark", - "liquibyte", - "material", - "mbo", - "mdn-like", - "midnight", - "monokai", - "neat", - "neo", - "night", - "paraiso-dark", - "paraiso-light", - "pastel-on-dark", - "railscasts", - "rubyblue", - "seti", - "solarized", - "the-matrix", - "tomorrow-night-bright", - "tomorrow-night-eighties", - "ttcn", - "twilight", - "vibrant-ink", - "xq-dark", - "xq-light", - "yeti", - "zenburn", -] - -# Available modes must be enumerated here, -# in gerrit-gwtui/src/main/java/net/codemirror/mode/Modes.java, -# gerrit-gwtui/src/main/java/net/codemirror/mode/ModeInfo.java, -# and in CodeMirror's own mode/meta.js script. -CM_MODES = [ - "apl", - "asciiarmor", - "asn.1", - "asterisk", - "brainfuck", - "clike", - "clojure", - "cmake", - "cobol", - "coffeescript", - "commonlisp", - "crystal", - "css", - "cypher", - "d", - "dart", - "diff", - "django", - "dockerfile", - "dtd", - "dylan", - "ebnf", - "ecl", - "eiffel", - "elm", - "erlang", - "factor", - "fcl", - "forth", - "fortran", - "gas", - "gfm", - "gherkin", - "go", - "groovy", - "haml", - "handlebars", - "haskell-literate", - "haskell", - "haxe", - "htmlembedded", - "htmlmixed", - "http", - "idl", - "javascript", - "jinja2", - "jsx", - "julia", - "livescript", - "lua", - "markdown", - "mathematica", - "mbox", - "mirc", - "mllike", - "modelica", - "mscgen", - "mumps", - "nginx", - "nsis", - "ntriples", - "octave", - "oz", - "pascal", - "pegjs", - "perl", - "php", - "pig", - "powershell", - "properties", - "protobuf", - "pug", - "puppet", - "python", - "q", - "r", - "rpm", - "rst", - "ruby", - "rust", - "sas", - "sass", - "scheme", - "shell", - "sieve", - "slim", - "smalltalk", - "smarty", - "solr", - "soy", - "sparql", - "spreadsheet", - "sql", - "stex", - "stylus", - "swift", - "tcl", - "textile", - "tiddlywiki", - "tiki", - "toml", - "tornado", - "troff", - "ttcn-cfg", - "ttcn", - "turtle", - "twig", - "vb", - "vbscript", - "velocity", - "verilog", - "vhdl", - "vue", - "webidl", - "xml", - "xquery", - "yacas", - "yaml-frontmatter", - "yaml", - "z80", -] - -CM_VERSION = "5.25.0" - -TOP = "META-INF/resources/webjars/codemirror/%s" % CM_VERSION - -TOP_MINIFIED = "META-INF/resources/webjars/codemirror-minified/%s" % CM_VERSION - -LICENSE = "//lib:LICENSE-codemirror-original" - -LICENSE_MINIFIED = "//lib:LICENSE-codemirror-minified" - -DIFF_MATCH_PATCH_VERSION = "20121119-1" - -DIFF_MATCH_PATCH_TOP = ("META-INF/resources/webjars/google-diff-match-patch/%s" % - DIFF_MATCH_PATCH_VERSION) - -def pkg_cm(): - for archive, suffix, top, license in [ - ('@codemirror_original//jar', '', TOP, LICENSE), - ('@codemirror_minified//jar', '_r', TOP_MINIFIED, LICENSE_MINIFIED) - ]: - # Main JavaScript and addons - genrule2( - name = 'cm' + suffix, - cmd = ' && '.join([ - "echo '/** @license' >$@", - 'unzip -p $(location %s) %s/LICENSE >>$@' % (archive, top), - "echo '*/' >>$@", - ] + - ['unzip -p $(location %s) %s/%s >>$@' % (archive, top, n) for n in CM_JS] + - ['unzip -p $(location %s) %s/addon/%s >>$@' % (archive, top, n) - for n in CM_ADDONS] - ), - tools = [archive], - outs = ['cm%s.js' % suffix], - ) - - # Main CSS - genrule2( - name = 'css' + suffix, - cmd = ' && '.join([ - "echo '/** @license' >$@", - 'unzip -p $(location %s) %s/LICENSE >>$@' % (archive, top), - "echo '*/' >>$@", - ] + - ['unzip -p $(location %s) %s/%s >>$@' % (archive, top, n) - for n in CM_CSS] - ), - tools = [archive], - outs = ['cm%s.css' % suffix], - ) - - # Modes - for n in CM_MODES: - genrule2( - name = 'mode_%s%s' % (n, suffix), - cmd = ' && '.join([ - "echo '/** @license' >$@", - 'unzip -p $(location %s) %s/LICENSE >>$@' % (archive, top), - "echo '*/' >>$@", - 'unzip -p $(location %s) %s/mode/%s/%s.js >>$@' % (archive, top, n, n), - ] - ), - tools = [archive], - outs = ['mode_%s%s.js' % (n, suffix)], - ) - - # Themes - for n in CM_THEMES: - genrule2( - name = 'theme_%s%s' % (n, suffix), - cmd = ' && '.join([ - "echo '/** @license' >$@", - 'unzip -p $(location %s) %s/LICENSE >>$@' % (archive, top), - "echo '*/' >>$@", - 'unzip -p $(location %s) %s/theme/%s.css >>$@' % (archive, top, n) - ] - ), - tools = [archive], - outs = ['theme_%s%s.css' % (n, suffix)], - ) - - # Merge Addon bundled with diff-match-patch - genrule2( - name = 'addon_merge_with_diff_match_patch%s' % suffix, - cmd = ' && '.join([ - "echo '/** @license' >$@", - 'unzip -p $(location %s) %s/LICENSE >>$@' % (archive, top), - "echo '*/\n' >>$@", - "echo '// The google-diff-match-patch library is from https://repo1.maven.org/maven2/org/webjars/google-diff-match-patch/%s/google-diff-match-patch-%s.jar\n' >> $@" % (DIFF_MATCH_PATCH_VERSION, DIFF_MATCH_PATCH_VERSION), - "echo '/** @license' >>$@", - "echo 'LICENSE-Apache2.0' >>$@", - "echo '*/' >>$@", - 'unzip -p $(location @diff_match_patch//jar) %s/diff_match_patch.js >>$@' % DIFF_MATCH_PATCH_TOP, - "echo ';' >> $@", - 'unzip -p $(location %s) %s/addon/merge/merge.js >>$@' % (archive, top) - ] - ), - tools = [ - '@diff_match_patch//jar', - # dependency just for license tracking. - ':diff-match-patch', - archive, - "//lib:LICENSE-Apache2.0", - ], - outs = ['addon_merge_with_diff_match_patch%s.js' % suffix], - ) - - # Jar packaging - genrule2( - name = 'jar' + suffix, - cmd = ' && '.join([ - 'cd $$TMP', - 'mkdir -p net/codemirror/{addon,lib,mode,theme}', - 'cp $$ROOT/$(location :css%s) net/codemirror/lib/cm.css' % suffix, - 'cp $$ROOT/$(location :cm%s) net/codemirror/lib/cm.js' % suffix] - + ['cp $$ROOT/$(location :mode_%s%s) net/codemirror/mode/%s.js' % (n, suffix, n) - for n in CM_MODES] - + ['cp $$ROOT/$(location :theme_%s%s) net/codemirror/theme/%s.css' % (n, suffix, n) - for n in CM_THEMES] - + ['cp $$ROOT/$(location :addon_merge_with_diff_match_patch%s) net/codemirror/addon/merge_bundled.js' % suffix] - + ['zip -qr $$ROOT/$@ net/codemirror/{addon,lib,mode,theme}']), - tools = [ - ':addon_merge_with_diff_match_patch%s' % suffix, - ':cm%s' % suffix, - ':css%s' % suffix, - ] + [ - ':mode_%s%s' % (n, suffix) for n in CM_MODES - ] + [ - ':theme_%s%s' % (n, suffix) for n in CM_THEMES - ], - outs = ['codemirror%s.jar' % suffix], - ) - - native.java_import( - name = 'codemirror' + suffix, - jars = [':jar%s' % suffix], - visibility = ['//visibility:public'], - data = [license], - ) diff --git a/starlark/src/syntax/testcases/common.bzl b/starlark/src/syntax/testcases/common.bzl deleted file mode 100644 index 0e3329c2..00000000 --- a/starlark/src/syntax/testcases/common.bzl +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:providers.bzl", "GoLibrary") -load("//go/private:skylib/lib/dicts.bzl", "dicts") -load("//go/private:skylib/lib/paths.bzl", "paths") -load("//go/private:skylib/lib/sets.bzl", "sets") -load("//go/private:skylib/lib/shell.bzl", "shell") -load("//go/private:skylib/lib/structs.bzl", "structs") -load("@io_bazel_rules_go//go/private:mode.bzl", "mode_string") - -DEFAULT_LIB = "go_default_library" -VENDOR_PREFIX = "/vendor/" - -go_exts = [ - ".go", -] - -asm_exts = [ - ".s", - ".S", - ".h", # may be included by .s -] - -# be consistent to cc_library. -hdr_exts = [ - ".h", - ".hh", - ".hpp", - ".hxx", - ".inc", -] - -c_exts = [ - ".c", - ".cc", - ".cxx", - ".cpp", - ".h", - ".hh", - ".hpp", - ".hxx", -] - -go_filetype = FileType(go_exts + asm_exts) -cc_hdr_filetype = FileType(hdr_exts) - -# Extensions of files we can build with the Go compiler or with cc_library. -# This is a subset of the extensions recognized by go/build. -cgo_filetype = FileType(go_exts + asm_exts + c_exts) - -def pkg_dir(workspace_root, package_name): - """Returns a relative path to a package directory from the root of the - sandbox. Useful at execution-time or run-time.""" - if workspace_root and package_name: - return workspace_root + "/" + package_name - if workspace_root: - return workspace_root - if package_name: - return package_name - return "." - -def split_srcs(srcs): - go = [] - headers = [] - asm = [] - c = [] - for src in srcs: - if any([src.basename.endswith(ext) for ext in go_exts]): - go.append(src) - elif any([src.basename.endswith(ext) for ext in hdr_exts]): - headers.append(src) - elif any([src.basename.endswith(ext) for ext in asm_exts]): - asm.append(src) - elif any([src.basename.endswith(ext) for ext in c_exts]): - c.append(src) - else: - fail("Unknown source type {0}".format(src.basename)) - return struct( - go = go, - headers = headers, - asm = asm, - c = c, - ) - -def join_srcs(source): - return source.go + source.headers + source.asm + source.c - - -def go_importpath(ctx): - """Returns the expected importpath of the go_library being built. - - Args: - ctx: The skylark Context - - Returns: - Go importpath of the library - """ - path = ctx.attr.importpath - if path != "": - return path - path = ctx.attr._go_prefix.go_prefix - if path.endswith("/"): - path = path[:-1] - if ctx.label.package: - path += "/" + ctx.label.package - if ctx.label.name != DEFAULT_LIB and not path.endswith(ctx.label.name): - path += "/" + ctx.label.name - if path.rfind(VENDOR_PREFIX) != -1: - path = path[len(VENDOR_PREFIX) + path.rfind(VENDOR_PREFIX):] - if path[0] == "/": - path = path[1:] - return path - -def env_execute(ctx, arguments, environment = {}, **kwargs): - """env_executes a command in a repository context. It prepends "env -i" - to "arguments" before calling "ctx.execute". - - Variables that aren't explicitly mentioned in "environment" - are removed from the environment. This should be preferred to "ctx.execut"e - in most situations. - """ - env_args = ["env", "-i"] - environment = dict(environment) - for var in ["TMP", "TMPDIR"]: - if var in ctx.os.environ and not var in environment: - environment[var] = ctx.os.environ[var] - for k, v in environment.items(): - env_args.append("%s=%s" % (k, v)) - return ctx.execute(env_args + arguments, **kwargs) - -def to_set(v): - if type(v) == "depset": - fail("Do not pass a depset to to_set") - return depset(v) - -def declare_file(ctx, path="", ext="", mode=None, name = ""): - filename = "" - if mode: - filename += mode_string(mode) + "/" - filename += name if name else ctx.label.name - if path: - filename += "~/" + path - if ext: - filename += ext - return ctx.actions.declare_file(filename) \ No newline at end of file diff --git a/starlark/src/syntax/testcases/compile.bzl b/starlark/src/syntax/testcases/compile.bzl deleted file mode 100644 index 32041ea3..00000000 --- a/starlark/src/syntax/testcases/compile.bzl +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "sets", -) -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", - "bootstrap_action", -) - -def _importpath(l): - return [v.data.importpath for v in l] - -def _searchpath(l): - return [v.data.searchpath for v in l] - -def emit_compile(ctx, go_toolchain, - sources = None, - importpath = "", - archives = [], - mode = None, - out_lib = None, - gc_goopts = []): - """See go/toolchains.rst#compile for full documentation.""" - - if sources == None: fail("sources is a required parameter") - if out_lib == None: fail("out_lib is a required parameter") - if mode == None: fail("mode is a required parameter") - - # Add in any mode specific behaviours - if mode.race: - gc_goopts = gc_goopts + ["-race"] - if mode.msan: - gc_goopts = gc_goopts + ["-msan"] - - gc_goopts = [ctx.expand_make_variables("gc_goopts", f, {}) for f in gc_goopts] - inputs = sets.union(sources, [go_toolchain.data.package_list]) - go_sources = [s.path for s in sources if not s.basename.startswith("_cgo")] - cgo_sources = [s.path for s in sources if s.basename.startswith("_cgo")] - - inputs = sets.union(inputs, [archive.data.file for archive in archives]) - - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - inputs = sets.union(inputs, stdlib.files) - - args = ctx.actions.args() - add_go_env(args, stdlib, mode) - args.add(["-package_list", go_toolchain.data.package_list]) - args.add(go_sources, before_each="-src") - args.add(archives, before_each="-dep", map_fn=_importpath) - args.add(archives, before_each="-I", map_fn=_searchpath) - args.add(["-o", out_lib, "-trimpath", ".", "-I", "."]) - args.add(["--"]) - if importpath: - args.add(["-p", importpath]) - args.add(gc_goopts) - args.add(go_toolchain.flags.compile) - if mode.debug: - args.add(["-N", "-l"]) - args.add(cgo_sources) - ctx.actions.run( - inputs = inputs, - outputs = [out_lib], - mnemonic = "GoCompile", - executable = go_toolchain.tools.compile, - arguments = [args], - ) - -def bootstrap_compile(ctx, go_toolchain, - sources = None, - importpath = "", - archives = [], - mode = None, - out_lib = None, - gc_goopts = []): - """See go/toolchains.rst#compile for full documentation.""" - - if sources == None: fail("sources is a required parameter") - if out_lib == None: fail("out_lib is a required parameter") - if archives: fail("compile does not accept deps in bootstrap mode") - if mode == None: fail("mode is a required parameter") - - args = ["tool", "compile", "-o", out_lib.path] - args.extend(gc_goopts) - args.extend([s.path for s in sources]) - bootstrap_action(ctx, go_toolchain, mode, - inputs = sources, - outputs = [out_lib], - mnemonic = "GoCompile", - arguments = args, - ) diff --git a/starlark/src/syntax/testcases/compiler.bzl b/starlark/src/syntax/testcases/compiler.bzl deleted file mode 100644 index 5d55123f..00000000 --- a/starlark/src/syntax/testcases/compiler.bzl +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", - "sets", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", -) - -GoProtoCompiler = provider() - -def go_proto_compile(ctx, compiler, proto, imports, importpath): - go_srcs = [] - outpath = None - for src in proto.direct_sources: - out = declare_file(ctx, path=importpath+"/"+src.basename[:-len(".proto")], ext=compiler.suffix) - go_srcs.append(out) - if outpath == None: - outpath = out.dirname[:-len(importpath)] - args = ctx.actions.args() - args.add([ - "--protoc", compiler.protoc, - "--importpath", importpath, - "--out_path", outpath, - "--plugin", compiler.plugin, - ]) - args.add(compiler.options, before_each = "--option") - args.add(proto.transitive_descriptor_sets, before_each = "--descriptor_set") - args.add(go_srcs, before_each = "--expected") - args.add(imports, before_each = "--import") - args.add(proto.direct_sources, map_fn=_all_proto_paths) - ctx.actions.run( - inputs = sets.union([ - compiler.go_protoc, - compiler.protoc, - compiler.plugin, - ], proto.transitive_descriptor_sets), - outputs = go_srcs, - progress_message = "Generating into %s" % go_srcs[0].dirname, - mnemonic = "GoProtocGen", - executable = compiler.go_protoc, - arguments = [args], - ) - return go_srcs - -def _all_proto_paths(protos): - return [_proto_path(proto) for proto in protos] - -def _proto_path(proto): - """ - The proto path is not really a file path - It's the path to the proto that was seen when the descriptor file was generated. - """ - path = proto.path - root = proto.root.path - ws = proto.owner.workspace_root - if path.startswith(root): path = path[len(root):] - if path.startswith("/"): path = path[1:] - if path.startswith(ws): path = path[len(ws):] - if path.startswith("/"): path = path[1:] - return path - - -def _go_proto_compiler_impl(ctx): - return [GoProtoCompiler( - deps = ctx.attr.deps, - compile = go_proto_compile, - options = ctx.attr.options, - suffix = ctx.attr.suffix, - go_protoc = ctx.file._go_protoc, - protoc = ctx.file._protoc, - plugin = ctx.file.plugin, - )] - -go_proto_compiler = rule( - _go_proto_compiler_impl, - attrs = { - "deps": attr.label_list(providers = [GoLibrary]), - "options": attr.string_list(), - "suffix": attr.string(default = ".pb.go"), - "plugin": attr.label( - allow_files = True, - single_file = True, - executable = True, - cfg = "host", - default = Label("@com_github_golang_protobuf//protoc-gen-go"), - ), - "_go_protoc": attr.label( - allow_files=True, - single_file=True, - executable = True, - cfg = "host", - default=Label("@io_bazel_rules_go//go/tools/builders:go-protoc"), - ), - "_protoc": attr.label( - allow_files = True, - single_file = True, - executable = True, - cfg = "host", - default = Label("@com_github_google_protobuf//:protoc"), - ), - } -) diff --git a/starlark/src/syntax/testcases/config.bzl b/starlark/src/syntax/testcases/config.bzl deleted file mode 100644 index d5575097..00000000 --- a/starlark/src/syntax/testcases/config.bzl +++ /dev/null @@ -1,17 +0,0 @@ -ADMIN_USERS = [ - "dmarting@google.com", - "dslomov@google.com", - "laszlocsomor@google.com", - "lberki@google.com", - "pcloudy@google.com", - "yueg@google.com", - "jcater@google.com", - "aehlig@google.com", - "elenairina@google.com", - "hlopko@google.com", - "vladmos@google.com", - "fisherii@google.com", - "philwo@google.com", - "buchgr@google.com", - "davidstanke@google.com", -] diff --git a/starlark/src/syntax/testcases/container.bzl b/starlark/src/syntax/testcases/container.bzl deleted file mode 100644 index 19abf44a..00000000 --- a/starlark/src/syntax/testcases/container.bzl +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for manipulation container images.""" - -load("//container:image.bzl", "container_image", "image") -load("//container:bundle.bzl", "container_bundle") -load("//container:flatten.bzl", "container_flatten") -load("//container:import.bzl", "container_import") -load("//container:pull.bzl", "container_pull") -load("//container:push.bzl", "container_push") - -container = struct( - image = image, -) - -# The release of the github.com/google/containerregistry to consume. -CONTAINERREGISTRY_RELEASE = "v0.0.19" - -def repositories(): - """Download dependencies of container rules.""" - excludes = native.existing_rules().keys() - - if "puller" not in excludes: - native.http_file( - name = "puller", - url = ("https://storage.googleapis.com/containerregistry-releases/" + - CONTAINERREGISTRY_RELEASE + "/puller.par"), - sha256 = "9a51a39a2ddaf13c62e5eab32385eb5b987899e526ce5b5e8b22b0fa6a09b229", - executable = True, - ) - - if "importer" not in excludes: - native.http_file( - name = "importer", - url = ("https://storage.googleapis.com/containerregistry-releases/" + - CONTAINERREGISTRY_RELEASE + "/importer.par"), - sha256 = "03de18f0bec9c81f0c969b8da65935b4658538ef56f6c9952262e899b4cbd84b", - executable = True, - ) - - if "containerregistry" not in excludes: - native.git_repository( - name = "containerregistry", - remote = "https://github.com/google/containerregistry.git", - tag = CONTAINERREGISTRY_RELEASE, - ) - - # TODO(mattmoor): Remove all of this (copied from google/containerregistry) - # once transitive workspace instantiation lands. - if "httplib2" not in excludes: - # TODO(mattmoor): Is there a clean way to override? - native.new_http_archive( - name = "httplib2", - url = "https://codeload.github.com/httplib2/httplib2/tar.gz/v0.10.3", - sha256 = "d1bee28a68cc665c451c83d315e3afdbeb5391f08971dcc91e060d5ba16986f1", - strip_prefix = "httplib2-0.10.3/python2/httplib2/", - type = "tar.gz", - build_file_content = """ -py_library( - name = "httplib2", - srcs = glob(["**/*.py"]), - data = ["cacerts.txt"], - visibility = ["//visibility:public"] -)""", - ) - - # Used by oauth2client - if "six" not in excludes: - # TODO(mattmoor): Is there a clean way to override? - native.new_http_archive( - name = "six", - url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz", - sha256 = "e24052411fc4fbd1f672635537c3fc2330d9481b18c0317695b46259512c91d5", - strip_prefix = "six-1.9.0/", - type = "tar.gz", - build_file_content = """ -# Rename six.py to __init__.py -genrule( - name = "rename", - srcs = ["six.py"], - outs = ["__init__.py"], - cmd = "cat $< >$@", -) -py_library( - name = "six", - srcs = [":__init__.py"], - visibility = ["//visibility:public"], -)""" - ) - - # Used for authentication in containerregistry - if "oauth2client" not in excludes: - # TODO(mattmoor): Is there a clean way to override? - native.new_http_archive( - name = "oauth2client", - url = "https://codeload.github.com/google/oauth2client/tar.gz/v4.0.0", - sha256 = "7230f52f7f1d4566a3f9c3aeb5ffe2ed80302843ce5605853bee1f08098ede46", - strip_prefix = "oauth2client-4.0.0/oauth2client/", - type = "tar.gz", - build_file_content = """ -py_library( - name = "oauth2client", - srcs = glob(["**/*.py"]), - visibility = ["//visibility:public"], - deps = [ - "@httplib2//:httplib2", - "@six//:six", - ] -)""" - ) - - # Used for parallel execution in containerregistry - if "concurrent" not in excludes: - # TODO(mattmoor): Is there a clean way to override? - native.new_http_archive( - name = "concurrent", - url = "https://codeload.github.com/agronholm/pythonfutures/tar.gz/3.0.5", - sha256 = "a7086ddf3c36203da7816f7e903ce43d042831f41a9705bc6b4206c574fcb765", - strip_prefix = "pythonfutures-3.0.5/concurrent/", - type = "tar.gz", - build_file_content = """ -py_library( - name = "concurrent", - srcs = glob(["**/*.py"]), - visibility = ["//visibility:public"] -)""" - ) - - # For packaging python tools. - if "subpar" not in excludes: - native.git_repository( - name = "subpar", - remote = "https://github.com/google/subpar", - commit = "7e12cc130eb8f09c8cb02c3585a91a4043753c56", - ) diff --git a/starlark/src/syntax/testcases/cover.bzl b/starlark/src/syntax/testcases/cover.bzl deleted file mode 100644 index 0598abae..00000000 --- a/starlark/src/syntax/testcases/cover.bzl +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoSource", - "GoSourceList", -) -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", - "structs", -) - -def emit_cover(ctx, go_toolchain, - source = None, - mode = None, - importpath = ""): - """See go/toolchains.rst#cover for full documentation.""" - - if source == None: fail("source is a required parameter") - if mode == None: fail("mode is a required parameter") - if not importpath: fail("importpath is a required parameter") - - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - - covered = [] - cover_vars = [] - for s in source.entries: - if not s.want_coverage: - covered.append(s) - continue - outputs = [] - for src in s.srcs: - if not src.basename.endswith(".go"): - outputs.append(src) - continue - cover_var = "Cover_" + src.basename[:-3].replace("-", "_").replace(".", "_") - cover_vars.append("{}={}={}".format(cover_var, src.short_path, importpath)) - out = declare_file(ctx, path=cover_var, ext='.cover.go') - outputs.append(out) - args = ctx.actions.args() - add_go_env(args, stdlib, mode) - args.add(["--", "--mode=set", "-var=%s" % cover_var, "-o", out, src]) - ctx.actions.run( - inputs = [src] + stdlib.files, - outputs = [out], - mnemonic = "GoCover", - executable = go_toolchain.tools.cover, - arguments = [args], - ) - - members = structs.to_dict(s) - members["srcs"] = outputs - covered.append(GoSource(**members)) - return GoSourceList(entries=covered), cover_vars diff --git a/starlark/src/syntax/testcases/csharp.bzl b/starlark/src/syntax/testcases/csharp.bzl deleted file mode 100644 index 4fe0faa7..00000000 --- a/starlark/src/syntax/testcases/csharp.bzl +++ /dev/null @@ -1,600 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""CSharp bazel rules""" - -_MONO_UNIX_BIN = "/usr/local/bin/mono" - -# TODO(jeremy): Windows when it's available. - -def _make_csc_flag(flag_start, flag_name, flag_value=None): - return flag_start + flag_name + (":" + flag_value if flag_value else "") - -def _make_csc_deps(deps, extra_files=[]): - dlls = set() - refs = set() - transitive_dlls = set() - for dep in deps: - if hasattr(dep, "target_type"): - dep_type = getattr(dep, "target_type") - if dep_type == "exe": - fail("You can't use a binary target as a dependency", "deps") - if dep_type == "library": - dlls += [dep.out] - refs += [dep.name] - if dep_type == "library_set": - dlls += dep.out - refs += [d.basename for d in dep.out] - if dep.transitive_dlls: - transitive_dlls += dep.transitive_dlls - - return struct( - dlls = dlls + set(extra_files), - refs = refs, - transitive_dlls = transitive_dlls) - -def _get_libdirs(dlls, libdirs=[]): - return [dep.dirname for dep in dlls] + libdirs - -def _make_csc_arglist(ctx, output, depinfo, extra_refs=[]): - flag_start = ctx.attr._flag_start - args = [ - # /out: - _make_csc_flag(flag_start, "out", output.path), - # /target (exe for binary, library for lib, module for module) - _make_csc_flag(flag_start, "target", ctx.attr._target_type), - # /fullpaths - _make_csc_flag(flag_start, "fullpaths"), - # /warn - _make_csc_flag(flag_start, "warn", str(ctx.attr.warn)), - # /nologo - _make_csc_flag(flag_start, "nologo"), - ] - - # /modulename: only used for modules - libdirs = _get_libdirs(depinfo.dlls) - libdirs = _get_libdirs(depinfo.transitive_dlls, libdirs) - - # /lib:dir1,[dir1] - if libdirs: - args += [_make_csc_flag(flag_start, "lib", ",".join(list(libdirs)))] - - # /reference:filename[,filename2] - if depinfo.refs or extra_refs: - args += [_make_csc_flag(flag_start, "reference", - ",".join(list(depinfo.refs + extra_refs)))] - else: - args += extra_refs - - # /doc - if hasattr(ctx.outputs, "doc_xml"): - args += [_make_csc_flag(flag_start, "doc", ctx.outputs.doc_xml.path)] - - # /debug - debug = ctx.var.get("BINMODE", "") == "-dbg" - args += [_make_csc_flag(flag_start, "debug")] if debug else [] - - # /warnaserror - # TODO(jeremy): /define:name[;name2] - # TODO(jeremy): /resource:filename[,identifier[,accesibility-modifier]] - - # /main:class - if hasattr(ctx.attr, "main_class") and ctx.attr.main_class: - args += [_make_csc_flag(flag_start, "main", ctx.attr.main_class)] - - # TODO(jwall): /parallel - - return args - -_NUNIT_LAUNCHER_SCRIPT = """\ -#!/bin/bash - -if [[ -e "$0.runfiles" ]]; then - cd $0.runfiles/{workspace} -fi - -# Create top-level symlinks for lib files. -# TODO(jeremy): This is a gross and fragile hack. -# We should be able to do better than this. -for l in {libs}; do - if [[ ! -e $(basename $l) ]]; then - # Note: -f required because the symlink may exist - # even though its current target does not exist. - ln -s -f $l $(basename $l) - fi -done - -{mono_exe} {nunit_exe} {libs} "$@" -""" - -def _make_nunit_launcher(ctx, depinfo, output): - libs = ([d.short_path for d in depinfo.dlls] + - [d.short_path for d in depinfo.transitive_dlls]) - - content = _NUNIT_LAUNCHER_SCRIPT.format( - mono_exe=ctx.file.mono.short_path, - nunit_exe=ctx.files._nunit_exe[0].short_path, - libs=" ".join(list(set(libs))), - workspace=ctx.workspace_name) - - ctx.file_action(output=ctx.outputs.executable, content=content) - -_LAUNCHER_SCRIPT = """\ -#!/bin/bash - -set -e - -RUNFILES=$0.runfiles/{workspace} - -pushd $RUNFILES - -# Create top-level symlinks for .exe and lib files. -# TODO(jeremy): This is a gross and fragile hack. -# We should be able to do better than this. -if [[ ! -e $(basename {exe}) ]]; then - # Note: -f required because the symlink may exist - # even though its current target does not exist. - ln -s -f {exe} $(basename {exe}) -fi -for l in {libs}; do - if [[ ! -e $(basename {workspace}/$l) ]]; then - ln -s -f $l $(basename {workspace}/$l) - fi -done - -popd - -$RUNFILES/{mono_exe} $RUNFILES/$(basename {exe}) "$@" -""" - -def _make_launcher(ctx, depinfo, output): - libs = ([d.short_path for d in depinfo.dlls] + - [d.short_path for d in depinfo.transitive_dlls]) - - content = _LAUNCHER_SCRIPT.format(mono_exe=ctx.file.mono.path, - workspace=ctx.workspace_name, - exe=output.short_path, - libs=" ".join(libs)) - ctx.file_action(output=ctx.outputs.executable, content=content) - -def _csc_get_output(ctx): - output = None - if hasattr(ctx.outputs, "csc_lib"): - output = ctx.outputs.csc_lib - elif hasattr(ctx.outputs, "csc_exe"): - output = ctx.outputs.csc_exe - else: - fail("You must supply one of csc_lib or csc_exe") - return output - -def _csc_collect_inputs(ctx, extra_files=[]): - depinfo = _make_csc_deps(ctx.attr.deps, extra_files=extra_files) - inputs = (set(ctx.files.srcs) + depinfo.dlls + depinfo.transitive_dlls - + [ctx.file.csc]) - srcs = [src.path for src in ctx.files.srcs] - return struct(depinfo=depinfo, - inputs=inputs, - srcs=srcs) - -def _csc_compile_action(ctx, assembly, all_outputs, collected_inputs, - extra_refs=[]): - csc_args = _make_csc_arglist(ctx, assembly, collected_inputs.depinfo, - extra_refs=extra_refs) - command_script = " ".join([ctx.file.csc.path] + csc_args + - collected_inputs.srcs) - - ctx.action( - inputs = list(collected_inputs.inputs), - outputs = all_outputs, - command = command_script, - arguments = csc_args, - progress_message = ( - "Compiling " + ctx.label.package + ":" + ctx.label.name)) - -def _cs_runfiles(ctx, outputs, depinfo, add_mono=False): - mono_file = [] - if add_mono: - mono_file = [ctx.file.mono] - transitive_files = set(depinfo.dlls + depinfo.transitive_dlls + mono_file) or None - return ctx.runfiles( - files = outputs, - transitive_files = set(depinfo.dlls + depinfo.transitive_dlls + [ctx.file.mono]) or None) - -def _csc_compile_impl(ctx): - if hasattr(ctx.outputs, "csc_lib") and hasattr(ctx.outputs, "csc_exe"): - fail("exactly one of csc_lib and csc_exe must be defined") - - output = _csc_get_output(ctx) - outputs = [output] + ( - [ctx.outputs.doc_xml] if hasattr(ctx.outputs, "doc_xml") else []) - - collected = _csc_collect_inputs(ctx) - - depinfo = collected.depinfo - inputs = collected.inputs - srcs = collected.srcs - - runfiles = _cs_runfiles(ctx, outputs, depinfo) - - _csc_compile_action(ctx, output, outputs, collected) - - if hasattr(ctx.outputs, "csc_exe"): - _make_launcher(ctx, depinfo, output) - - return struct(name = ctx.label.name, - srcs = srcs, - target_type=ctx.attr._target_type, - out = output, - dlls = set([output]), - transitive_dlls = depinfo.dlls, - runfiles = runfiles) - -def _cs_nunit_run_impl(ctx): - if hasattr(ctx.outputs, "csc_lib") and hasattr(ctx.outputs, "csc_exe"): - fail("exactly one of csc_lib and csc_exe must be defined") - - output = _csc_get_output(ctx) - outputs = [output] + ( - [ctx.outputs.doc_xml] if hasattr(ctx.outputs, "doc_xml") else []) - outputs = outputs - - collected_inputs = _csc_collect_inputs(ctx, ctx.files._nunit_framework) - - depinfo = collected_inputs.depinfo - inputs = collected_inputs.inputs - srcs = collected_inputs.srcs - - runfiles = _cs_runfiles( - ctx, - outputs + ctx.files._nunit_exe + ctx.files._nunit_exe_libs, - depinfo) - - _csc_compile_action(ctx, output, outputs, collected_inputs, - extra_refs=["Nunit.Framework"]) - - _make_nunit_launcher(ctx, depinfo, output) - - return struct(name=ctx.label.name, - srcs=srcs, - target_type=ctx.attr._target_type, - out=output, - dlls = (set([output]) - if hasattr(ctx.outputs, "csc_lib") else None), - transitive_dlls = depinfo.dlls, - runfiles=runfiles) - -def _find_and_symlink(repository_ctx, binary, env_variable): - #repository_ctx.file("bin/empty") - if env_variable in repository_ctx.os.environ: - return repository_ctx.path(repository_ctx.os.environ[env_variable]) - else: - found_binary = repository_ctx.which(binary) - if found_binary == None: - fail("Cannot find %s. Either correct your path or set the " % binary + - "%s environment variable." % env_variable) - repository_ctx.symlink(found_binary, "bin/%s" % binary) - -def _csharp_autoconf(repository_ctx): - _find_and_symlink(repository_ctx, "mono", "MONO") - _find_and_symlink(repository_ctx, "mcs", "CSC") - toolchain_build = """\ -package(default_visibility = ["//visibility:public"]) -exports_files(["mono", "mcs"]) -""" - repository_ctx.file("bin/BUILD", toolchain_build) - -_COMMON_ATTRS = { - # configuration fragment that specifies - "_flag_start": attr.string(default="-"), - # code dependencies for this rule. - # all dependencies must provide an out field. - "deps": attr.label_list(providers=["out", "target_type"]), - # source files for this target. - "srcs": attr.label_list(allow_files = FileType([".cs", ".resx"])), - # resources to use as dependencies. - # TODO(jeremy): "resources_deps": attr.label_list(allow_files=True), - # TODO(jeremy): # name of the module if you are creating a module. - # TODO(jeremy): "modulename": attri.string(), - # warn level to use - "warn": attr.int(default=4), - # define preprocessor symbols. - # TODO(jeremy): "define": attr.string_list(), - # The mono binary and csharp compiler. - "mono": attr.label( - default = Label("@mono//bin:mono"), - allow_files = True, - single_file = True, - executable = True, - cfg = "host", - ), - "csc": attr.label( - default = Label("@mono//bin:mcs"), - allow_files = True, - single_file = True, - executable = True, - cfg = "host", - ), -} - -_LIB_ATTRS = { - "_target_type": attr.string(default="library") -} - -_NUGET_ATTRS = { - "srcs": attr.label_list(allow_files = FileType([".dll"])), - "_target_type": attr.string(default="library_set") -} - -_EXE_ATTRS = { - "_target_type": attr.string(default="exe"), - # main class to use as entry point. - "main_class": attr.string(), -} - -_NUNIT_ATTRS = { - "_nunit_exe": attr.label(default=Label("@nunit//:nunit_exe"), - single_file=True), - "_nunit_framework": attr.label(default=Label("@nunit//:nunit_framework")), - "_nunit_exe_libs": attr.label(default=Label("@nunit//:nunit_exe_libs")), -} - -_LIB_OUTPUTS = { - "csc_lib": "%{name}.dll", - "doc_xml": "%{name}.xml", -} - -_BIN_OUTPUTS = { - "csc_exe": "%{name}.exe", -} - -csharp_library = rule( - implementation = _csc_compile_impl, - attrs = dict(_COMMON_ATTRS.items() + _LIB_ATTRS.items()), - outputs = _LIB_OUTPUTS, -) -"""Builds a C# .NET library and its corresponding documentation. - -Args: - name: A unique name for this rule. - srcs: C# `.cs` or `.resx` files. - deps: Dependencies for this rule - warn: Compiler warning level for this library. (Defaults to 4). - csc: Override the default C# compiler. - - **Note:** This attribute may be removed in future versions. -""" - -csharp_binary = rule( - implementation = _csc_compile_impl, - attrs = dict(_COMMON_ATTRS.items() + _EXE_ATTRS.items()), - outputs = _BIN_OUTPUTS, - executable = True, -) -"""Builds a C# .NET binary. - -Args: - name: A unique name for this rule. - srcs: C# `.cs` or `.resx` files. - deps: Dependencies for this rule - main_class: Name of class with `main()` method to use as entry point. - warn: Compiler warning level for this library. (Defaults to 4). - csc: Override the default C# compiler. - - **Note:** This attribute may be removed in future versions. -""" - -csharp_nunit_test = rule( - implementation = _cs_nunit_run_impl, - executable = True, - attrs = dict(_COMMON_ATTRS.items() + _LIB_ATTRS.items() + - _NUNIT_ATTRS.items()), - outputs = _LIB_OUTPUTS, - test = True, -) -"""Builds a C# .NET test binary that uses the [NUnit](http://nunit.org) unit -testing framework. - -Args: - name: A unique name for this rule. - srcs: C# `.cs` or `.resx` files. - deps: Dependencies for this rule - warn: Compiler warning level for this library. (Defaults to 4). - csc: Override the default C# compiler. - - **Note:** This attribute may be removed in future versions. -""" - -def _dll_import_impl(ctx): - inputs = set(ctx.files.srcs) - return struct( - name = ctx.label.name, - target_type = ctx.attr._target_type, - out = inputs, - dlls = inputs, - transitive_dlls = set([]), - ) - -dll_import = rule( - implementation = _dll_import_impl, - attrs = _NUGET_ATTRS, -) - -def _nuget_package_impl(repository_ctx, - build_file = None, - build_file_content = None): - # figure out the output_path - package = repository_ctx.attr.package - output_dir = repository_ctx.path("") - - mono = repository_ctx.path(repository_ctx.attr.mono_exe) - nuget = repository_ctx.path(repository_ctx.attr.nuget_exe) - - # assemble our nuget command - nuget_cmd = [ - mono, - "--config", "%s/../etc/mono/config" % mono.dirname, - nuget, - "install", - "-Version", repository_ctx.attr.version, - "-OutputDirectory", output_dir, - ] - # add the sources from our source list to the command - for source in repository_ctx.attr.package_sources: - nuget_cmd += ["-Source", source] - - # Lastly we add the nuget package name. - nuget_cmd += [repository_ctx.attr.package] - # execute nuget download. - result = repository_ctx.execute(nuget_cmd) - if result.return_code: - fail("Nuget command failed: %s (%s)" % (result.stderr, " ".join(nuget_cmd))) - - if build_file_content: - repository_ctx.file("BUILD", build_file_content) - elif build_file: - repository_ctx.symlink(repository_ctx.path(build_file), "BUILD") - else: - tpl_file = Label("//dotnet:NUGET_BUILD.tpl") - # add the BUILD file - repository_ctx.template( - "BUILD", - tpl_file, - {"%{package}": repository_ctx.name, - "%{output_dir}": "%s" % output_dir}) - -_nuget_package_attrs = { - # Sources to download the nuget packages from - "package_sources":attr.string_list(), - # The name of the nuget package - "package":attr.string(mandatory=True), - # The version of the nuget package - "version":attr.string(mandatory=True), - # Reference to the mono binary - "mono_exe":attr.label( - executable=True, - default=Label("@mono//bin:mono"), - cfg="host", - ), - # Reference to the nuget.exe file - "nuget_exe":attr.label( - default=Label("@nuget//:nuget.exe"), - ), -} - -nuget_package = repository_rule( - implementation=_nuget_package_impl, - attrs=_nuget_package_attrs, -) -"""Fetches a nuget package as an external dependency. - -Args: - package_sources: list of sources to use for nuget package feeds. - package: name of the nuget package. - version: version of the nuget package (e.g. 0.1.2) - mono_exe: optional label to the mono executable. - nuget_exe: optional label to the nuget.exe file. -""" - -def _new_nuget_package_impl(repository_ctx): - build_file = repository_ctx.attr.build_file - build_file_content = repository_ctx.attr.build_file_content - if not (build_file_content or build_file): - fail("build_file or build_file_content is required") - _nuget_package_impl(repository_ctx, build_file, build_file_content) - -new_nuget_package = repository_rule( - implementation=_new_nuget_package_impl, - attrs=_nuget_package_attrs + { - "build_file": attr.label( - allow_files = True, - ), - "build_file_content": attr.string(), - }) -"""Fetches a nuget package as an external dependency with custom BUILD content. - -Args: - package_sources: list of sources to use for nuget package feeds. - package: name of the nuget package. - version: version of the nuget package (e.g. 0.1.2) - mono_exe: optional label to the mono executable. - nuget_exe: optional label to the nuget.exe file. - build_file: label to the BUILD file. - build_file_content: content for the BUILD file. -""" - -csharp_autoconf = repository_rule( - implementation = _csharp_autoconf, - local = True, -) - -def _mono_osx_repository_impl(repository_ctx): - download_output = repository_ctx.path("") - # download the package - repository_ctx.download_and_extract( - "http://bazel-mirror.storage.googleapis.com/download.mono-project.com/archive/4.2.3/macos-10-x86/MonoFramework-MDK-4.2.3.4.macos10.xamarin.x86.tar.gz", - download_output, - "a7afb92d4a81f17664a040c8f36147e57a46bb3c33314b73ec737ad73608e08b", - "", "mono") - - # now we create the build file. - toolchain_build = """ -package(default_visibility = ["//visibility:public"]) -exports_files(["mono", "mcs"]) -""" - repository_ctx.file("bin/BUILD", toolchain_build) - -def _mono_repository_impl(repository_ctx): - use_local = repository_ctx.os.environ.get( - "RULES_DOTNET_USE_LOCAL_MONO", repository_ctx.attr.use_local) - if use_local: - _csharp_autoconf(repository_ctx) - elif repository_ctx.os.name.find("mac") != -1: - _mono_osx_repository_impl(repository_ctx) - else: - fail("Unsupported operating system: %s" % repository_ctx.os.name) - -mono_package = repository_rule( - implementation = _mono_repository_impl, - attrs = { - "use_local": attr.bool(default=False), - }, - local = True, -) - -def csharp_repositories(use_local_mono=False): - """Adds the repository rules needed for using the C# rules.""" - - native.new_http_archive( - name = "nunit", - url = "http://bazel-mirror.storage.googleapis.com/github.com/nunit/nunitv2/releases/download/2.6.4/NUnit-2.6.4.zip", - sha256 = "1bd925514f31e7729ccde40a38a512c2accd86895f93465f3dfe6d0b593d7170", - type = "zip", - # This is a little weird but is necessary for the build file reference to - # work when Workspaces import this using a repository rule. - build_file = str(Label("//dotnet:nunit.BUILD")), - ) - - native.new_http_archive( - name = "nuget", - url = "https://github.com/mono/nuget-binary/archive/0811ba888a80aaff66a93a4c98567ce904ab2663.zip", # Sept 6, 2016 - sha256 = "28323d23b7e6e02d3ba8892f525a1457ad23adb7e3a48908d37c1b5ae37519f6", - strip_prefix = "nuget-binary-0811ba888a80aaff66a93a4c98567ce904ab2663", - type = "zip", - build_file_content = """ - package(default_visibility = ["//visibility:public"]) - exports_files(["nuget.exe"]) - """ - ) - - mono_package(name="mono", use_local=use_local_mono) diff --git a/starlark/src/syntax/testcases/def.bzl b/starlark/src/syntax/testcases/def.bzl deleted file mode 100644 index e703ed3b..00000000 --- a/starlark/src/syntax/testcases/def.bzl +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2017 The Bazel Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -load("@io_bazel_rules_groovy//groovy:groovy.bzl", "groovy_library") - -def bazel_groovy_test(name=None, test_class=None, size="small", deps=[], srcs=[], **kwargs): - """A wrapper around groovy test to use the Bazel test runner.""" - test_class = "build.bazel.ci." + name if test_class == None else test_class - groovy_library( - name = name + "-lib", - deps = deps, - srcs = srcs) - native.java_test( - name = name, - test_class = test_class, - size = size, - runtime_deps = deps + [ - "@org_codehaus_groovy_groovy_all//jar", - "@org_hamcrest_hamcrest_all//jar", - name + "-lib"], - **kwargs) - -def jenkins_library_test(deps=[], data=[], **kwargs): - bazel_groovy_test( - deps=deps+[ - "//jenkins/lib/tests/build/bazel/ci/utils", - "//jenkins/lib/src/build/bazel/ci:lib"], - data=data+["//jenkins/lib:lib-files"], - **kwargs) diff --git a/starlark/src/syntax/testcases/dicts.bzl b/starlark/src/syntax/testcases/dicts.bzl deleted file mode 100644 index ee1076cb..00000000 --- a/starlark/src/syntax/testcases/dicts.bzl +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylib module containing functions that operate on dictionaries.""" - - -def _add(*dictionaries): - """Returns a new `dict` that has all the entries of the given dictionaries. - - If the same key is present in more than one of the input dictionaries, the - last of them in the argument list overrides any earlier ones. - - This function is designed to take zero or one arguments as well as multiple - dictionaries, so that it follows arithmetic identities and callers can avoid - special cases for their inputs: the sum of zero dictionaries is the empty - dictionary, and the sum of a single dictionary is a copy of itself. - - Args: - *dictionaries: Zero or more dictionaries to be added. - Returns: - A new `dict` that has all the entries of the given dictionaries. - """ - result = {} - for d in dictionaries: - result.update(d) - return result - - -dicts = struct( - add=_add, -) diff --git a/starlark/src/syntax/testcases/docker.bzl b/starlark/src/syntax/testcases/docker.bzl deleted file mode 100644 index f15b2eda..00000000 --- a/starlark/src/syntax/testcases/docker.bzl +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for manipulation Docker images.""" - -load(":build.bzl", "docker_build") -load(":bundle.bzl", "docker_bundle") diff --git a/starlark/src/syntax/testcases/docker_base.bzl b/starlark/src/syntax/testcases/docker_base.bzl deleted file mode 100644 index 5101e77d..00000000 --- a/starlark/src/syntax/testcases/docker_base.bzl +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrapper to fetch the base docker images we need.""" - -load(":dockerfile_build.bzl", "dockerfile_build") - -def docker_bases(): - dockerfile_build( - name = "ubuntu-xenial-amd64-deploy", - dockerfile = "//base:Dockerfile.ubuntu-xenial-amd64-deploy", - tag = "local:ubuntu-xenial-amd64-deploy", - ) - dockerfile_build( - name = "ubuntu-xenial-amd64-ssh", - dockerfile = "//base:Dockerfile.ubuntu-xenial-amd64-ssh", - tag = "local:ubuntu-xenial-amd64-ssh", - ) diff --git a/starlark/src/syntax/testcases/docker_pull.bzl b/starlark/src/syntax/testcases/docker_pull.bzl deleted file mode 100644 index 61b39590..00000000 --- a/starlark/src/syntax/testcases/docker_pull.bzl +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Quick and not really nice docker_pull rules based on the docker daemon.""" - -def _impl(repository_ctx): - docker = repository_ctx.which("docker") - if docker == None and repository_ctx.attr.optional: - repository_ctx.file("BUILD", """ -load("@io_bazel//tools/build_defs/docker:docker.bzl", "docker_build") - -# an empty image to still allow building despite not having the base -# image. -docker_build( - name = "image", - visibility = ['//visibility:public'], -) -""") - repository_ctx.file("image.tar") - return - - repository_ctx.file("BUILD", """ -load("@io_bazel//tools/build_defs/docker:docker.bzl", "docker_build") - -docker_build( - name = "image", - base = ":base.tar", - visibility = ["//visibility:public"], -) -""") - tag = repository_ctx.attr.tag - cmd = "pull" - if repository_ctx.attr.dockerfile: - dockerfile = repository_ctx.path(repository_ctx.attr.dockerfile) - cmd = "build" - print("Running `docker build`") - result = repository_ctx.execute([ - docker, - "build", - "-q", - "-t", - tag, - "-f", - dockerfile, - dockerfile.dirname, - ], quiet=False, timeout=3600) - else: - print("Running `docker pull`") - result = repository_ctx.execute([docker, "pull", tag], quiet=False, timeout=3600) - if result.return_code: - fail("docker %s failed with error code %s:\n%s" % ( - cmd, - result.return_code, - result.stderr)) - result = repository_ctx.execute([ - docker, "save", "-o", repository_ctx.path("base.tar"), tag]) - if result.return_code: - fail("docker save failed with error code %s:\n%s" % ( - result.return_code, - result.stderr)) - -docker_pull = repository_rule( - implementation = _impl, - attrs = { - "tag": attr.string(mandatory=True), - "dockerfile": attr.label(default=None), - "optional": attr.bool(default=False), - }, -) diff --git a/starlark/src/syntax/testcases/docker_repository.bzl b/starlark/src/syntax/testcases/docker_repository.bzl deleted file mode 100644 index 4f961701..00000000 --- a/starlark/src/syntax/testcases/docker_repository.bzl +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rule for importing the docker binary for tests (experimental).""" - -def _impl(ctx): - docker = ctx.which("docker") - if docker == None: - # We cannot find docker, we won't be able to run tests depending - # on it, silently ignoring. - ctx.file("BUILD", - "\n".join([ - "filegroup(", - " name = 'docker',", - " visibility = ['//visibility:public'],", - ")" - ])) - else: - exports = [] - for k in ctx.os.environ: - # DOCKER* environment variable are used by the docker client - # to know how to talk to the docker daemon. - if k.startswith("DOCKER"): - exports.append("export %s='%s'" % (k, ctx.os.environ[k])) - ctx.symlink(docker, "docker-bin") - ctx.file("docker.sh", "\n".join([ - "#!/bin/bash", - "\n".join(exports), -"""BIN="$0" -while [ -L "${BIN}" ]; do - BIN="$(readlink "${BIN}")" -done -exec "${BIN%%.sh}-bin" "$@" -"""])) - ctx.file("BUILD", "\n".join([ - "sh_binary(", - " name = 'docker',", - " srcs = ['docker.sh'],", - " data = [':docker-bin'],", - " visibility = ['//visibility:public'],", - ")"])) - -docker_repository_ = repository_rule(_impl) - -def docker_repository(): - """Declare a @docker repository that provide a docker binary.""" - docker_repository_(name = "docker") diff --git a/starlark/src/syntax/testcases/dockerfile_build.bzl b/starlark/src/syntax/testcases/dockerfile_build.bzl deleted file mode 100644 index 6c6f8883..00000000 --- a/starlark/src/syntax/testcases/dockerfile_build.bzl +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Repository rule to build image with `docker build`.""" - -def _impl(rctx): - tag = rctx.attr.tag - dockerfile = rctx.path(rctx.attr.dockerfile) - result = rctx.execute([ - "docker", - "build", - "-q", - "-t", - tag, - "-f", - dockerfile, - dockerfile.dirname, - ]) - if result.return_code: - fail("docker build failed with error code %s:\n%s" % ( - result.return_code, - result.stdout + result.stderr)) - base_tar = rctx.path("base.tar") - base_dir = rctx.path("base") - result = rctx.execute(["docker", "save", "-o", base_tar, tag]) - if result.return_code: - fail("docker save failed with error code %s:\n%s" % ( - result.return_code, - result.stderr)) - result = rctx.execute(["python", rctx.path(Label("//base:convert_image_to_build.py")), - base_tar, base_dir, rctx.path("BUILD")]) - if result.return_code: - fail("Converting the image failed with error code %s:\n%s" % ( - result.return_code, - result.stderr)) - -dockerfile_build = repository_rule( - implementation = _impl, - attrs = { - "tag": attr.string(mandatory=True), - "dockerfile": attr.label(mandatory=True), - }, -) diff --git a/starlark/src/syntax/testcases/dummy_toolchain.bzl b/starlark/src/syntax/testcases/dummy_toolchain.bzl deleted file mode 100644 index c787f731..00000000 --- a/starlark/src/syntax/testcases/dummy_toolchain.bzl +++ /dev/null @@ -1,23 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylark rule that stubs a toolchain.""" -def _dummy_toolchain_impl(ctx): - ctx = ctx # unused argument - toolchain = platform_common.ToolchainInfo() - return [toolchain] - -dummy_toolchain = rule(_dummy_toolchain_impl, attrs = {}) - diff --git a/starlark/src/syntax/testcases/e4b_aspect.bzl b/starlark/src/syntax/testcases/e4b_aspect.bzl deleted file mode 100644 index 55b80f7b..00000000 --- a/starlark/src/syntax/testcases/e4b_aspect.bzl +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Aspect for e4b, taken from intellij_info.bzl - -DEPENDENCY_ATTRIBUTES = [ - "deps", - "runtime_deps", - "exports", -] - -def struct_omit_none(**kwargs): - d = {name: kwargs[name] for name in kwargs if kwargs[name] != None} - return struct(**d) - -def artifact_location(file): - return None if file == None else file.path - -def library_artifact(java_output): - if java_output == None or java_output.class_jar == None: - return None - return struct_omit_none( - jar = artifact_location(java_output.class_jar), - interface_jar = artifact_location(java_output.ijar), - source_jar = artifact_location(java_output.source_jar), - ) - -def annotation_processing_jars(annotation_processing): - return struct_omit_none( - jar = artifact_location(annotation_processing.class_jar), - source_jar = artifact_location(annotation_processing.source_jar), - ) - -def jars_from_output(output): - """ Collect jars for ide-resolve-files from Java output. - """ - if output == None: - return [] - return [jar - for jar in [output.class_jar, output.ijar, output.source_jar] - if jar != None and not jar.is_source] - -def java_rule_ide_info(target, ctx): - if hasattr(ctx.rule.attr, "srcs"): - sources = [artifact_location(file) - for src in ctx.rule.attr.srcs - for file in src.files] - else: - sources = [] - - jars = [library_artifact(output) for output in target.java.outputs.jars] - ide_resolve_files = depset([jar - for output in target.java.outputs.jars - for jar in jars_from_output(output)]) - - gen_jars = [] - if target.java.annotation_processing and target.java.annotation_processing.enabled: - gen_jars = [annotation_processing_jars(target.java.annotation_processing)] - ide_resolve_files = ide_resolve_files + depset([ jar - for jar in [target.java.annotation_processing.class_jar, - target.java.annotation_processing.source_jar] - if jar != None and not jar.is_source]) - - return (struct_omit_none( - sources = sources, - jars = jars, - generated_jars = gen_jars - ), - ide_resolve_files) - - -def _aspect_impl(target, ctx): - kind = ctx.rule.kind - rule_attrs = ctx.rule.attr - - ide_info_text = depset() - ide_resolve_files = depset() - all_deps = [] - - for attr_name in DEPENDENCY_ATTRIBUTES: - if hasattr(rule_attrs, attr_name): - deps = getattr(rule_attrs, attr_name) - if type(deps) == 'list': - for dep in deps: - ide_info_text = ide_info_text + dep.intellij_info_files.ide_info_text - ide_resolve_files = ide_resolve_files + dep.intellij_info_files.ide_resolve_files - all_deps += [str(dep.label) for dep in deps] - - if hasattr(target, "java"): - (java_rule_ide_info_struct, java_ide_resolve_files) = java_rule_ide_info(target, ctx) - info = struct( - label = str(target.label), - kind = kind, - dependencies = all_deps, - build_file_artifact_location = ctx.build_file_path, - ) + java_rule_ide_info_struct - ide_resolve_files = ide_resolve_files + java_ide_resolve_files - output = ctx.new_file(target.label.name + ".e4b-build.json") - ctx.file_action(output, info.to_json()) - ide_info_text += depset([output]) - - return struct( - output_groups = { - "ide-info-text" : ide_info_text, - "ide-resolve" : ide_resolve_files, - }, - intellij_info_files = struct( - ide_info_text = ide_info_text, - ide_resolve_files = ide_resolve_files, - ) - ) - -e4b_aspect = aspect(implementation = _aspect_impl, - attr_aspects = DEPENDENCY_ATTRIBUTES -) -"""Aspect for Eclipse 4 Bazel plugin. - -This aspect produces information for IDE integration with Eclipse. This only -produces information for Java targets. - -This aspect has two output groups: - - ide-info-text produces .e4b-build.json files that contains information - about target dependencies and sources files for the IDE. - - ide-resolve build the dependencies needed for the build (i.e., artifacts - generated by Java annotation processors). - -An e4b-build.json file is a json blob with the following keys: -```javascript -{ - // Label of the corresponding target - "label": "//package:target", - // Kind of the corresponding target, e.g., java_test, java_binary, ... - "kind": "java_library", - // List of dependencies of this target - "dependencies": ["//package1:dep1", "//package2:dep2"], - "Path, relative to the workspace root, of the build file containing the target. - "build_file_artifact_location": "package/BUILD", - // List of sources file, relative to the execroot - "sources": ["package/Test.java"], - // List of jars created when building this target. - "jars": [jar1, jar2], - // List of jars generated by java annotation processors when building this target. - "generated_jars": [genjar1, genjar2] -} -``` - -Jar files structure has the following keys: -```javascript -{ - // Location, relative to the execroot, of the jar file or null - "jar": "bazel-out/host/package/libtarget.jar", - // Location, relative to the execroot, of the interface jar file, - // containing only the interfaces of the target jar or null. - "interface_jar": "bazel-out/host/package/libtarget.interface-jar", - // Location, relative to the execroot, of the source jar file, - // containing the sources used to generate the target jar or null. - "source_jar": "bazel-out/host/package/libtarget.interface-jar", -} -``` -""" diff --git a/starlark/src/syntax/testcases/eclipse.bzl b/starlark/src/syntax/testcases/eclipse.bzl deleted file mode 100644 index b175b97b..00000000 --- a/starlark/src/syntax/testcases/eclipse.bzl +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# TODO(dmarting): Provide checksums for those files. -_EQUINOX_MIRROR_URL="https://storage.googleapis.com/bazel-mirror/download.eclipse.org/eclipse/updates" -_ECLIPSE_VERSION="4.5.2-201602121500" -_DOWNLOAD_URL = "%s/%s/R-%s/plugins/%s_%s.jar" % ( - _EQUINOX_MIRROR_URL, - ".".join(_ECLIPSE_VERSION.split(".", 3)[0:2]), - _ECLIPSE_VERSION, - "%s", - "%s") - -# TODO(dmarting): make this configurable? -_DECLARED_DEPS = [ - "org.eclipse.ui.console", - "org.eclipse.ui", - "org.eclipse.core.resources", - "org.eclipse.ui.ide", - "org.eclipse.jdt.core", - "org.eclipse.core.runtime", - "javax.inject", -] - -_ECLIPSE_PLUGIN_DEPS = { - # Declared deps. - "org.eclipse.ui.console": "3.6.100.v20150822-1912", - "javax.inject": "1.0.0.v20091030", - "org.eclipse.core.runtime": "3.11.1.v20150903-1804", - "org.eclipse.ui": "3.107.0.v20150507-1945", - "org.eclipse.jdt.core": "3.11.2.v20160128-0629", - "org.eclipse.core.resources": "3.10.1.v20150725-1910", - "org.eclipse.ui.ide": "3.11.0.v20150825-2158", - # implicit deps - "org.eclipse.swt": "3.104.2.v20160212-1350", - # TODO(dmarting): make it works cross platform. This is not a problem while - # we are using the dependency to compile but this might become an issue if - # we need to run the plugin (e.g. to test it). - # - # Available platforms: cocoa.macosx.x86_64 gtk.aix.ppc gtk.aix.ppc64 - # gtk.hpux.ia64 gtk.linux.ppc gtk.linux.ppc64 gtk.linux.ppc64le gtk.linux.s390 - # gtk.linux.s390x gtk.linux.x86 gtk.linux.x86_64 gtk.solaris.sparc - # gtk.solaris.x86 win32.win32.x86 win32.win32.x86_64 - "org.eclipse.swt.gtk.linux.ppc": "3.104.2.v20160212-1350", - "org.eclipse.jface": "3.11.1.v20160128-1644", - "org.eclipse.core.commands": "3.7.0.v20150422-0725", - "org.eclipse.ui.workbench": "3.107.1.v20160120-2131", - "org.eclipse.e4.ui.workbench3": "0.13.0.v20150422-0725", - "org.eclipse.jdt.compiler.apt": "1.2.0.v20150514-0146", - "org.eclipse.jdt.compiler.tool": "1.1.0.v20150513-2007", - "javax.annotation": "1.2.0.v201401042248", - "org.eclipse.osgi": "3.10.102.v20160118-1700", - "org.eclipse.osgi.compatibility.state": "1.0.100.v20150402-1551", - "org.eclipse.equinox.common": "3.7.0.v20150402-1709", - "org.eclipse.core.jobs": "3.7.0.v20150330-2103", - "org.eclipse.core.runtime.compatibility.registry": "3.6.0.v20150318-1505", - "org.eclipse.equinox.registry": "3.6.0.v20150318-1503", - "org.eclipse.equinox.preferences": "3.5.300.v20150408-1437", - "org.eclipse.core.contenttype": "3.5.0.v20150421-2214", - "org.eclipse.equinox.app": "1.3.300.v20150423-1356", - "org.eclipse.ui.views": "3.8.0.v20150422-0725", -} - - -def _load_eclipse_dep(plugin, version): - native.http_file( - name = plugin.replace(".", "_"), - url = _DOWNLOAD_URL % (plugin, version), - ) - -load("//tools/build_defs:eclipse_platform.bzl", "eclipse_platform") - -def load_eclipse_deps(): - """Load dependencies of the Eclipse plugin.""" - for plugin in _ECLIPSE_PLUGIN_DEPS: - _load_eclipse_dep(plugin, _ECLIPSE_PLUGIN_DEPS[plugin]) - eclipse_platform(name="org_eclipse_equinox", version=_ECLIPSE_VERSION) - - -def eclipse_plugin(name, version, bundle_name, activator=None, - vendor=None, **kwargs): - """A macro to generate an eclipse plugin (see java_binary).""" - jars = ["@%s//file" % plugin.replace(".", "_") - for plugin in _ECLIPSE_PLUGIN_DEPS] - native.java_import( - name = name + "-deps", - neverlink = 1, - jars = jars, - ) - deps = [name + "-deps"] - if "deps" in kwargs: - deps = deps + kwargs["deps"] - args = {k: kwargs[k] - for k in kwargs - if k not in [ - "deps", - "classpath_resources", - "deploy_manifest_lines", - "visibility", - "main_class"]} - visibility = kwargs["visibility"] if "visibility" in kwargs else None - # Generate the .api_description to put in the final jar - native.genrule( - name = name + ".api_description", - srcs = [], - outs = [name + "/.api_description"], - cmd = """ -cat <$@ - - - - -EOF -""" % (name, version, name, version)) - # Generate the final jar (a deploy jar) - native.java_binary( - name = name + "-bin", - main_class = "does.not.exist", - classpath_resources = [ - ":%s/.api_description" % name, - # TODO(dmarting): this add the plugin.xml dependency here, maybe we - # should move that to the BUILD file to avoid surprise? - "plugin.xml", - ] + (kwargs["classpath_resources"] - if "classpath_resources" in kwargs else []), - deploy_manifest_lines = [ - "Bundle-ManifestVersion: 2", - "Bundle-Name: " + bundle_name, - # TODO(dmarting): We mark always as singleton, make it configurable? - "Bundle-SymbolicName: %s;singleton:=true" % name, - "Bundle-Version: " + version, - "Require-Bundle: " + ", ".join(_DECLARED_DEPS), - # TODO(dmarting): Take the java version from java_toolchain. - "Bundle-RequiredExecutionEnvironment: JavaSE-1.8", - "Bundle-ActivationPolicy: lazy", - "Bundle-ClassPath: .", - ] + ( - ["Bundle-Activator: " + activator] if activator else [] - ) + ( - ["Bundle-Vendor: " + vendor] if vendor else [] - ) + (kwargs["deploy_manifest_lines"] - if "deploy_manifest_lines" in kwargs else []), - deps = deps, - **args) - # Rename the output to the correct name - native.genrule( - name = name, - srcs = [":%s-bin_deploy.jar" % name], - outs = ["%s_%s.jar" % (name, version)], - cmd = "cp $< $@", - output_to_bindir = 1, - visibility = visibility, - ) - - -def _eclipse_feature_impl(ctx): - feature_xml = ctx.new_file(ctx.outputs.out, ctx.label.name + ".xml") - ctx.action( - outputs = [feature_xml], - inputs = [ctx.file.license], - executable = ctx.executable._builder, - arguments = [ - "--output=" + feature_xml.path, - "--id=" + ctx.label.name, - "--label=" + ctx.attr.label, - "--version=" + ctx.attr.version, - "--provider=" + ctx.attr.provider, - "--url=" + ctx.attr.url, - "--description=" + ctx.attr.description, - "--copyright=" + ctx.attr.copyright, - "--license_url=" + ctx.attr.license_url, - "--license=" + ctx.file.license.path] + [ - "--site=%s=%s" % (site, ctx.attr.sites[site]) - for site in ctx.attr.sites] + [ - "--plugin=" + p.basename for p in ctx.files.plugins]) - ctx.action( - outputs = [ctx.outputs.out], - inputs = [feature_xml], - executable = ctx.executable._zipper, - arguments = ["c", - ctx.outputs.out.path, - "feature.xml=" + feature_xml.path], - ) - return struct( - eclipse_feature=struct( - file=ctx.outputs.out, - id=ctx.label.name, - version=ctx.attr.version, - plugins=ctx.files.plugins - ) - ) - - -eclipse_feature = rule( - implementation=_eclipse_feature_impl, - attrs = { - "label": attr.string(mandatory=True), - "version": attr.string(mandatory=True), - "provider": attr.string(mandatory=True), - "description": attr.string(mandatory=True), - "url": attr.string(mandatory=True), - "copyright": attr.string(mandatory=True), - "license_url": attr.string(mandatory=True), - "license": attr.label(mandatory=True, allow_single_file=True), - "sites": attr.string_dict(), - # TODO(dmarting): restrict what can be passed to the plugins attribute. - "plugins": attr.label_list(), - "_zipper": attr.label(default=Label("@bazel_tools//tools/zip:zipper"), - executable=True, - cfg="host"), - "_builder": attr.label(default=Label("//tools/build_defs:feature_builder"), - executable=True, - cfg="host"), - }, - outputs = {"out": "%{name}_%{version}.jar"}) -"""Create an eclipse feature jar.""" - - -def _eclipse_p2updatesite_impl(ctx): - feat_files = [f.eclipse_feature.file for f in ctx.attr.eclipse_features] - args = [ - "--output=" + ctx.outputs.out.path, - "--java=" + ctx.executable._java.path, - "--eclipse_launcher=" + ctx.file._eclipse_launcher.path, - "--name=" + ctx.attr.label, - "--url=" + ctx.attr.url, - "--description=" + ctx.attr.description] - - _plugins = {} - for f in ctx.attr.eclipse_features: - args.append("--feature=" + f.eclipse_feature.file.path) - args.append("--feature_id=" + f.eclipse_feature.id) - args.append("--feature_version=" + f.eclipse_feature.version) - for p in f.eclipse_feature.plugins: - if p.path not in _plugins: - _plugins[p.path] = p - plugins = [_plugins[p] for p in _plugins] - - ctx.action( - outputs=[ctx.outputs.out], - inputs=[ - ctx.executable._java, - ctx.file._eclipse_launcher, - ] + ctx.files._jdk + ctx.files._eclipse_platform + feat_files + plugins, - executable = ctx.executable._site_builder, - arguments = args + ["--bundle=" + p.path for p in plugins]) - - -eclipse_p2updatesite = rule( - implementation=_eclipse_p2updatesite_impl, - attrs = { - "label": attr.string(mandatory=True), - "description": attr.string(mandatory=True), - "url": attr.string(mandatory=True), - "eclipse_features": attr.label_list(providers=["eclipse_feature"]), - "_site_builder": attr.label( - default=Label("//tools/build_defs:site_builder"), - executable=True, - cfg="host"), - "_zipper": attr.label( - default=Label("@bazel_tools//tools/zip:zipper"), - executable=True, - cfg="host"), - "_java": attr.label( - default=Label("@bazel_tools//tools/jdk:java"), - executable=True, - cfg="host"), - "_jdk": attr.label(default=Label("@bazel_tools//tools/jdk:jdk")), - "_eclipse_launcher": attr.label( - default=Label("@org_eclipse_equinox//:launcher"), - allow_single_file=True), - "_eclipse_platform": attr.label(default=Label("@org_eclipse_equinox//:platform")), - }, - outputs = {"out": "%{name}.zip"}) -"""Create an eclipse p2update site inside a ZIP file.""" diff --git a/starlark/src/syntax/testcases/eclipse_platform.bzl b/starlark/src/syntax/testcases/eclipse_platform.bzl deleted file mode 100644 index add06791..00000000 --- a/starlark/src/syntax/testcases/eclipse_platform.bzl +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The Eclipse website provides SHA-512 but Bazel only support SHA256. -# Really Bazel should start supporting all "safe" checksum (and also -# drop support for SHA-1). -SHA256_SUM={ - # TODO(dmarting): we only support 4.5.2 right now because we need to - # download all version of eclipse to provide those checksums... - "4.5.2": { - "macosx-cocoa-x86_64": "755f8a75075f6310a8d0453b5766a84aca2fcc687808341b7a657259230b490f", - "linux-gtk-x86_64": "87f82b0c13c245ee20928557dbc4435657d1e029f72d9135683c8d585c69ba8d" - } -} - -def _get_file_url(version, platform, t): - drop = "drops" - if int(version.split(".", 1)[0]) >= 4: - drop = "drops4" - short_version = version.split("-", 1)[0] - sha256 = "" - if short_version in SHA256_SUM: - if platform in SHA256_SUM[short_version]: - sha256 = SHA256_SUM[short_version][platform] - - filename = "eclipse-SDK-%s-%s.%s" % (short_version, platform, t) - file = "/eclipse/downloads/%s/R-%s/%s" % ( - drop, - version, - filename) - # This is a mirror, original base url is http://www.eclipse.org/downloads/download.php?file= - base_url = "https://storage.googleapis.com/bazel-mirror/download.eclipse.org" - return (base_url + file, sha256) - - -def _eclipse_platform_impl(rctx): - version = rctx.attr.version - os_name = rctx.os.name.lower() - if os_name.startswith("mac os"): - platform = "macosx-cocoa-x86_64" - t = "tar.gz" - elif os_name.startswith("linux"): - platform = "linux-gtk-x86_64" - t = "tar.gz" - else: - fail("Cannot fetch Eclipse for platform %s" % rctx.os.name) - url, sha256 = _get_file_url(version, platform, t) - rctx.download_and_extract(url=url, type=t, sha256=sha256) - rctx.file("BUILD.bazel", """ -package(default_visibility = ["//visibility:public"]) -filegroup(name = "platform", srcs = glob(["**"], exclude = ["BUILD.bazel", "BUILD"])) -filegroup(name = "launcher", srcs = glob(["**/plugins/org.eclipse.equinox.launcher_*.jar"])) -""") - - -eclipse_platform = repository_rule( - implementation = _eclipse_platform_impl, - attrs = { - "version": attr.string(mandatory=True), - }, local=False) -"""A repository for downloading the good version eclipse depending on the platform.""" diff --git a/starlark/src/syntax/testcases/embed_data.bzl b/starlark/src/syntax/testcases/embed_data.bzl deleted file mode 100644 index d2d79f20..00000000 --- a/starlark/src/syntax/testcases/embed_data.bzl +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "sources", -) - -def _go_embed_data_impl(ctx): - if ctx.attr.src and ctx.attr.srcs: - fail("%s: src and srcs attributes cannot both be specified" % ctx.label) - if ctx.attr.src and ctx.attr.flatten: - fail("%s: src and flatten attributes cannot both be specified" % ctx.label) - - args = ctx.actions.args() - if ctx.attr.src: - srcs = [ctx.file.src] - else: - srcs = ctx.files.srcs - args.add("-multi") - - if ctx.attr.package: - package = ctx.attr.package - else: - _, _, package = ctx.label.package.rpartition("/") - if package == "": - fail("%s: must provide package attribute for go_embed_data rules in the repository root directory" % ctx.label) - - out = declare_file(ctx, ext=".go") - args.add([ - "-workspace", ctx.workspace_name, - "-label", str(ctx.label), - "-out", out, - "-package", package, - "-var", ctx.attr.var, - ]) - if ctx.attr.flatten: - args.add("-flatten") - if ctx.attr.string: - args.add("-string") - args.add(srcs) - - ctx.actions.run( - outputs = [out], - inputs = srcs, - executable = ctx.executable._embed, - arguments = [args], - mnemonic = "GoSourcesData", - ) - return [ - DefaultInfo(files = depset([out])), - sources.new(srcs = [out], want_coverage = False), - ] - -go_embed_data = rule( - implementation = _go_embed_data_impl, - attrs = { - "package": attr.string(), - "var": attr.string(default = "Data"), - "src": attr.label(allow_single_file = True), - "srcs": attr.label_list(allow_files = True), - "flatten": attr.bool(), - "string": attr.bool(), - "_embed": attr.label( - default = Label("@io_bazel_rules_go//go/tools/builders:embed"), - executable = True, - cfg = "host", - ), - }, -) -"""See go/extras.rst#go_embed_data for full documentation.""" diff --git a/starlark/src/syntax/testcases/embedded_tools.bzl b/starlark/src/syntax/testcases/embedded_tools.bzl deleted file mode 100644 index b36cc142..00000000 --- a/starlark/src/syntax/testcases/embedded_tools.bzl +++ /dev/null @@ -1,50 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http:#www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Contains Skylark rules used to build the embedded_tools.zip.""" - -def _embedded_tools(ctx): - # The list of arguments we pass to the script. - args_file = ctx.new_file(ctx.label.name + ".params") - ctx.file_action(output=args_file, content="\n".join([f.path for f in ctx.files.srcs])) - # Action to call the script. - ctx.action( - inputs=ctx.files.srcs, - outputs=[ctx.outputs.out], - arguments=[ctx.outputs.out.path, args_file.path], - progress_message="Creating embedded tools: %s" % ctx.outputs.out.short_path, - executable=ctx.executable.tool) - -embedded_tools = rule( - implementation=_embedded_tools, - attrs={ - "srcs": attr.label_list(allow_files=True), - "out": attr.output(mandatory=True), - "tool": attr.label(executable=True, cfg="host", allow_files=True, - default=Label("//src:create_embedded_tools_sh")) - } -) - -def _srcsfile(ctx): - ctx.file_action( - output=ctx.outputs.out, - content="\n".join([f.path for f in ctx.files.srcs])) - -srcsfile = rule( - implementation=_srcsfile, - attrs={ - "srcs": attr.label_list(allow_files=True), - "out": attr.output(mandatory=True), - } -) diff --git a/starlark/src/syntax/testcases/empty.bzl b/starlark/src/syntax/testcases/empty.bzl deleted file mode 100644 index f18a5bae..00000000 --- a/starlark/src/syntax/testcases/empty.bzl +++ /dev/null @@ -1,8 +0,0 @@ -"""Minimalist example of a rule that does nothing.""" - -def _empty_impl(ctx): - # This function is called when the rule is analyzed. - # You may use print for debugging. - print("This rule does nothing") - -empty = rule(implementation=_empty_impl) diff --git a/starlark/src/syntax/testcases/executable.bzl b/starlark/src/syntax/testcases/executable.bzl deleted file mode 100644 index 656754eb..00000000 --- a/starlark/src/syntax/testcases/executable.bzl +++ /dev/null @@ -1,19 +0,0 @@ -"""This example creates an executable rule. - -An executable rule, like `cc_library`, can be run using 'bazel run'. It -can also be executed as part of the build. -""" - -def _impl(ctx): - # The implementation function must generate the file 'ctx.outputs.executable'. - ctx.actions.write( - output=ctx.outputs.executable, - content="#!/bin/bash\necho Hello!", - is_executable=True - ) - # The executable output is added automatically to this target. - -executable_rule = rule( - implementation=_impl, - executable=True -) diff --git a/starlark/src/syntax/testcases/extension.bzl b/starlark/src/syntax/testcases/extension.bzl deleted file mode 100644 index 88019387..00000000 --- a/starlark/src/syntax/testcases/extension.bzl +++ /dev/null @@ -1,21 +0,0 @@ -"""This example shows how to create custom (user defined) outputs for a rule. - -This rule takes a list of output files from the user and writes content in -each of them. -""" - -def _impl(ctx): - # Access the custom outputs using ctx.outputs.. - for output in ctx.outputs.outs: - ctx.actions.write( - output=output, - content="I am " + output.short_path + "\n" - ) - # The custom outputs are added automatically to this target. - -rule_with_outputs = rule( - implementation=_impl, - attrs={ - "outs": attr.output_list() - } -) diff --git a/starlark/src/syntax/testcases/external_plugin_deps.bzl b/starlark/src/syntax/testcases/external_plugin_deps.bzl deleted file mode 100644 index 391f9208..00000000 --- a/starlark/src/syntax/testcases/external_plugin_deps.bzl +++ /dev/null @@ -1,2 +0,0 @@ -def external_plugin_deps(): - pass \ No newline at end of file diff --git a/starlark/src/syntax/testcases/files_equal_test.bzl b/starlark/src/syntax/testcases/files_equal_test.bzl deleted file mode 100644 index 4c0e3b7f..00000000 --- a/starlark/src/syntax/testcases/files_equal_test.bzl +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2016 The Bazel Go Rules Authors. All rights reserved. -# Copyright 2016 The Closure Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tests that two files contain the same data.""" - -def _impl(ctx): - if ctx.file.golden == ctx.file.actual: - fail("GOLDEN and ACTUAL should be different files") - ctx.actions.write( - output=ctx.outputs.executable, - content="\n".join([ - "#!/bin/bash", - "function checksum() {", - " if command -v openssl >/dev/null; then", - " openssl sha1 $1 | cut -f 2 -d ' '", - " elif command -v sha256sum >/dev/null; then", - " sha256sum $1 | cut -f 1 -d ' '", - " elif command -v shasum >/dev/null; then", - " cat $1 | shasum -a 256 | cut -f 1 -d ' '", - " else", - " echo please install openssl >&2", - " exit 1", - " fi", - "}", - "SUM1=$(checksum %s)" % _runpath(ctx.file.golden), - "SUM2=$(checksum %s)" % _runpath(ctx.file.actual), - "if [[ ${SUM1} != ${SUM2} ]]; then", - " echo ERROR: %s >&2" % ctx.attr.error_message, - " echo %s ${SUM1} >&2" % _runpath(ctx.file.golden), - " echo %s ${SUM2} >&2" % _runpath(ctx.file.actual), - " exit 1", - "fi", - ]), - is_executable=True) - return struct(runfiles=ctx.runfiles([ctx.file.golden, - ctx.file.actual])) - -def _runpath(f): - """Figures out the proper runfiles path for a file, using voodoo""" - if f.path.startswith('bazel-out/'): - return f.short_path - else: - return f.path - -files_equal_test = rule( - attrs = { - "golden": attr.label( - mandatory = True, - allow_files = True, - single_file = True), - "actual": attr.label( - mandatory = True, - allow_files = True, - single_file = True), - "error_message": attr.string( - default="FILES DO NOT HAVE EQUAL CONTENTS"), - }, - implementation = _impl, - test = True) diff --git a/starlark/src/syntax/testcases/filetype.bzl b/starlark/src/syntax/testcases/filetype.bzl deleted file mode 100644 index 59332132..00000000 --- a/starlark/src/syntax/testcases/filetype.bzl +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Filetype constants.""" - -# Filetype to restrict inputs -tar = [".tar", ".tar.gz", ".tgz", ".tar.xz"] -deb = [".deb", ".udeb"] - -# Docker files are tarballs, should we allow other extensions than tar? -docker = tar diff --git a/starlark/src/syntax/testcases/flatten.bzl b/starlark/src/syntax/testcases/flatten.bzl deleted file mode 100644 index ae4bf463..00000000 --- a/starlark/src/syntax/testcases/flatten.bzl +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A rule to flatten container images.""" - -load( - "//skylib:path.bzl", - _get_runfile_path = "runfile", -) -load( - "//container:layers.bzl", - _get_layers = "get_from_target", - _layer_tools = "tools", -) - -def _impl(ctx): - """Core implementation of container_flatten.""" - - image = _get_layers(ctx, ctx.attr.image, ctx.files.image) - - # Leverage our efficient intermediate representation to push. - legacy_base_arg = [] - legacy_files = [] - if image.get("legacy"): - # TODO(mattmoor): warn about legacy base. - legacy_files += [image["legacy"]] - legacy_base_arg = ["--tarball=%s" % image["legacy"].path] - - blobsums = image.get("blobsum", []) - digest_args = ["--digest=" + f.path for f in blobsums] - blobs = image.get("zipped_layer", []) - layer_args = ["--layer=" + f.path for f in blobs] - config_arg = "--config=%s" % image["config"].path - - ctx.action( - executable = ctx.executable._flattener, - arguments = legacy_base_arg + digest_args + layer_args + [ - config_arg, - "--filesystem=" + ctx.outputs.filesystem.path, - "--metadata=" + ctx.outputs.metadata.path, - ], - inputs = blobsums + blobs + [image["config"]] + legacy_files, - outputs = [ctx.outputs.filesystem, ctx.outputs.metadata], - use_default_shell_env=True, - mnemonic="Flatten" - ) - return struct() - -container_flatten = rule( - attrs = { - "image": attr.label( - allow_files = [".tar"], - single_file = True, - mandatory = True, - ), - "_flattener": attr.label( - default = Label("@containerregistry//:flatten"), - cfg = "host", - executable = True, - allow_files = True, - ), - } + _layer_tools, - outputs = { - "filesystem": "%{name}.tar", - "metadata": "%{name}.json", - }, - implementation = _impl, -) diff --git a/starlark/src/syntax/testcases/flavours.bzl b/starlark/src/syntax/testcases/flavours.bzl deleted file mode 100644 index 713c7c52..00000000 --- a/starlark/src/syntax/testcases/flavours.bzl +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Create base images for docker tests.""" -# TODO(dmarting): Right now we use a custom docker_pull that can build -# docker images, which is not reproducible and as a high cost, ideally -# we would switch to the docker_pull from bazelbuild/rules_docker but -# we do not have an easy mean to create and maintain the images we need -# for those tests. -load("//src/test/docker:docker_pull.bzl", "docker_pull") - -FLAVOURS = [ - "centos6.7", - "debian-stretch", - "fedora23", - "ubuntu-15.04", - "ubuntu-16.04", -] - -def pull_images_for_docker_tests(): - for flavour in FLAVOURS: - docker_pull( - name = "docker-" + flavour, - tag = "bazel_tools_cpp_test:" + flavour, - dockerfile = "//src/test/docker:Dockerfile." + flavour, - optional = True, - ) diff --git a/starlark/src/syntax/testcases/foo.bzl b/starlark/src/syntax/testcases/foo.bzl deleted file mode 100644 index f1661c3b..00000000 --- a/starlark/src/syntax/testcases/foo.bzl +++ /dev/null @@ -1,49 +0,0 @@ -# A provider with one field, transitive_sources. -FooFiles = provider() - -def get_transitive_srcs(srcs, deps): - """Obtain the source files for a target and its transitive dependencies. - - Args: - srcs: a list of source files - deps: a list of targets that are direct dependencies - Returns: - a collection of the transitive sources - """ - return depset( - srcs, - transitive = [dep[FooFiles].transitive_sources for dep in deps]) - -def _foo_library_impl(ctx): - trans_srcs = get_transitive_srcs(ctx.files.srcs, ctx.attr.deps) - return [FooFiles(transitive_sources=trans_srcs)] - -foo_library = rule( - implementation = _foo_library_impl, - attrs = { - "srcs": attr.label_list(allow_files=True), - "deps": attr.label_list(), - }, -) - -def _foo_binary_impl(ctx): - foocc = ctx.executable._foocc - out = ctx.outputs.out - trans_srcs = get_transitive_srcs(ctx.files.srcs, ctx.attr.deps) - srcs_list = trans_srcs.to_list() - ctx.actions.run(executable = foocc, - arguments = [out.path] + [src.path for src in srcs_list], - inputs = srcs_list + [foocc], - outputs = [out]) - -foo_binary = rule( - implementation = _foo_binary_impl, - attrs = { - "srcs": attr.label_list(allow_files=True), - "deps": attr.label_list(), - "_foocc": attr.label(default=Label("//depsets:foocc"), - allow_files=True, executable=True, cfg="host") - }, - outputs = {"out": "%{name}.out"}, -) - diff --git a/starlark/src/syntax/testcases/gazelle.bzl b/starlark/src/syntax/testcases/gazelle.bzl deleted file mode 100644 index 785d82b2..00000000 --- a/starlark/src/syntax/testcases/gazelle.bzl +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2016 The Bazel Go Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", -) - -_script_content = """ -BASE=$(pwd) -WORKSPACE=$(dirname $(readlink WORKSPACE)) -cd "$WORKSPACE" -$BASE/{gazelle} {args} $@ -""" - -def _gazelle_script_impl(ctx): - prefix = ctx.attr.prefix if ctx.attr.prefix else ctx.attr._go_prefix.go_prefix - args = [ctx.attr.command] - args += [ - "-repo_root", "$WORKSPACE", - "-go_prefix", prefix, - "-external", ctx.attr.external, - "-mode", ctx.attr.mode, - ] - if ctx.attr.build_tags: - args += ["-build_tags", ",".join(ctx.attr.build_tags)] - args += ctx.attr.args - script_content = _script_content.format(gazelle=ctx.file._gazelle.short_path, args=" ".join(args)) - script_file = declare_file(ctx, ext=".bash") - ctx.actions.write(output=script_file, is_executable=True, content=script_content) - return struct( - files = depset([script_file]), - runfiles = ctx.runfiles([ctx.file._gazelle]) - ) - -def _go_prefix_default(prefix): - return (None - if prefix - else Label("//:go_prefix", relative_to_caller_repository = True)) - -_gazelle_script = rule( - _gazelle_script_impl, - attrs = { - "command": attr.string(values=["update", "fix"], default="update"), - "mode": attr.string(values=["print", "fix", "diff"], default="fix"), - "external": attr.string(values=["external", "vendored"], default="external"), - "build_tags": attr.string_list(), - "args": attr.string_list(), - "prefix": attr.string(), - "_gazelle": attr.label( - default = Label("@io_bazel_rules_go//go/tools/gazelle/gazelle:gazelle"), - allow_files = True, - single_file = True, - executable = True, - cfg = "host" - ), - "_go_prefix": attr.label(default = _go_prefix_default), - } -) - -def gazelle(name, **kwargs): - """See go/extras.rst#gazelle for full documentation.""" - script_name = name+"_script" - _gazelle_script( - name = script_name, - tags = ["manual"], - **kwargs - ) - native.sh_binary( - name = name, - srcs = [script_name], - data = ["//:WORKSPACE"], - tags = ["manual"], - ) diff --git a/starlark/src/syntax/testcases/generate_test.bzl b/starlark/src/syntax/testcases/generate_test.bzl deleted file mode 100644 index 14d98ff9..00000000 --- a/starlark/src/syntax/testcases/generate_test.bzl +++ /dev/null @@ -1,16 +0,0 @@ -def _generate_script_impl(ctx): - script_file = ctx.actions.declare_file(ctx.label.name + ".bash") - ctx.actions.write(output=script_file, is_executable=True, content=""" -{0} -""".format(ctx.file.binary.short_path)) - return struct( - files = depset([script_file]), - ) - - -generate_script = rule( - _generate_script_impl, - attrs = { - "binary": attr.label(allow_files=True, single_file=True), - }, -) diff --git a/starlark/src/syntax/testcases/generate_workspace.bzl b/starlark/src/syntax/testcases/generate_workspace.bzl deleted file mode 100644 index b20410d3..00000000 --- a/starlark/src/syntax/testcases/generate_workspace.bzl +++ /dev/null @@ -1,405 +0,0 @@ -# The following dependencies were calculated from: -# -# generate_workspace -a org.apache.maven:maven-artifact:3.5.0 -a org.apache.maven:maven-aether-provider:3.3.9 -a org.eclipse.aether:aether-util:1.1.0 -a com.google.guava:guava:20.0 -a org.powermock:powermock-module-junit4:1.6.6 -a com.google.code.findbugs:jsr305:3.0.1 -a org.codehaus.plexus:plexus-utils:jar:3.0.24 -a org.codehaus.plexus:plexus-component-annotations:1.7.1 -a org.codehaus.plexus:plexus-interpolation:1.24 -a org.mockito:mockito-all:1.9.5 -a junit:junit:4.4 -a com.google.truth:truth:0.30 -a org.apache.httpcomponents:httpcore:4.4.6 -a org.apache.httpcomponents:httpclient:4.5.3 - - -def generated_maven_jars(): - native.maven_jar( - name = "com_google_code_findbugs_jsr305", - artifact = "com.google.code.findbugs:jsr305:3.0.1", - ) - - - # org.apache.httpcomponents:httpclient:jar:4.5.3 got requested version - native.maven_jar( - name = "org_apache_httpcomponents_httpcore", - artifact = "org.apache.httpcomponents:httpcore:4.4.6", - ) - - - # org.powermock:powermock-core:jar:1.6.6 - # org.powermock:powermock-module-junit4-common:jar:1.6.6 got requested version - native.maven_jar( - name = "org_powermock_powermock_reflect", - artifact = "org.powermock:powermock-reflect:1.6.6", - sha1 = "3fa5d0acee85c5662102ab2ef7a49bbb5a56bae5", - ) - - - # org.apache.httpcomponents:httpclient:jar:4.5.3 - native.maven_jar( - name = "commons_codec_commons_codec", - artifact = "commons-codec:commons-codec:1.9", - sha1 = "9ce04e34240f674bc72680f8b843b1457383161a", - ) - - - native.maven_jar( - name = "org_apache_maven_maven_aether_provider", - artifact = "org.apache.maven:maven-aether-provider:3.3.9", - ) - - - native.maven_jar( - name = "org_powermock_powermock_module_junit4", - artifact = "org.powermock:powermock-module-junit4:1.6.6", - ) - - - native.maven_jar( - name = "org_apache_maven_maven_artifact", - artifact = "org.apache.maven:maven-artifact:3.5.0", - ) - - - native.maven_jar( - name = "com_google_truth_truth", - artifact = "com.google.truth:truth:0.30", - ) - - - # com.google.truth:truth:jar:0.30 - native.maven_jar( - name = "com_google_errorprone_error_prone_annotations", - artifact = "com.google.errorprone:error_prone_annotations:2.0.8", - sha1 = "54e2d56cb157df08cbf183149bcf50c9f5151ed4", - ) - - - native.maven_jar( - name = "org_codehaus_plexus_plexus_interpolation", - artifact = "org.codehaus.plexus:plexus-interpolation:1.24", - ) - - - # org.powermock:powermock-reflect:jar:1.6.6 - native.maven_jar( - name = "org_objenesis_objenesis", - artifact = "org.objenesis:objenesis:2.4", - sha1 = "2916b6c96b50c5b3ec4452ed99401db745aabb27", - ) - - - # com.google.truth:truth:jar:0.30 wanted version 4.10 - native.maven_jar( - name = "junit_junit", - artifact = "junit:junit:4.4", - ) - - - # org.powermock:powermock-module-junit4-common:jar:1.6.6 - native.maven_jar( - name = "org_powermock_powermock_core", - artifact = "org.powermock:powermock-core:1.6.6", - sha1 = "8085fae46f60d7ff960f1cc711359c00b35c5887", - ) - - - native.maven_jar( - name = "org_codehaus_plexus_plexus_component_annotations", - artifact = "org.codehaus.plexus:plexus-component-annotations:1.7.1", - ) - - - native.maven_jar( - name = "org_codehaus_plexus_plexus_utils", - artifact = "org.codehaus.plexus:plexus-utils:3.0.24", - ) - - - # org.apache.httpcomponents:httpclient:jar:4.5.3 - native.maven_jar( - name = "commons_logging_commons_logging", - artifact = "commons-logging:commons-logging:1.2", - sha1 = "4bfc12adfe4842bf07b657f0369c4cb522955686", - ) - - - # junit:junit:jar:4.12 - # junit:junit:jar:4.12 got requested version - native.maven_jar( - name = "org_hamcrest_hamcrest_core", - artifact = "org.hamcrest:hamcrest-core:1.3", - sha1 = "42a25dc3219429f0e5d060061f71acb49bf010a0", - ) - - - native.maven_jar( - name = "org_eclipse_aether_aether_util", - artifact = "org.eclipse.aether:aether-util:1.1.0", - ) - - - # org.powermock:powermock-module-junit4:jar:1.6.6 - native.maven_jar( - name = "org_powermock_powermock_module_junit4_common", - artifact = "org.powermock:powermock-module-junit4-common:1.6.6", - sha1 = "6302c934d03f76fa348ec91c603e11ce05b61f44", - ) - - - native.maven_jar( - name = "org_apache_httpcomponents_httpclient", - artifact = "org.apache.httpcomponents:httpclient:4.5.3", - ) - - - native.maven_jar( - name = "org_mockito_mockito_all", - artifact = "org.mockito:mockito-all:1.9.5", - ) - - - # org.apache.maven:maven-artifact:jar:3.5.0 - native.maven_jar( - name = "org_apache_commons_commons_lang3", - artifact = "org.apache.commons:commons-lang3:3.5", - sha1 = "6c6c702c89bfff3cd9e80b04d668c5e190d588c6", - ) - - - # org.powermock:powermock-core:jar:1.6.6 - native.maven_jar( - name = "org_javassist_javassist", - artifact = "org.javassist:javassist:3.22.0-CR2", - sha1 = "44eaf0990dea92f4bca4b9931b2239c0e8756ee7", - ) - - - # org.eclipse.aether:aether-util:jar:1.1.0 - native.maven_jar( - name = "org_eclipse_aether_aether_api", - artifact = "org.eclipse.aether:aether-api:1.1.0", - sha1 = "05dd291e788f50dfb48822dab29defc16ad70860", - ) - - - # com.google.truth:truth:jar:0.30 wanted version 19.0 - native.maven_jar( - name = "com_google_guava_guava", - artifact = "com.google.guava:guava:20.0", - ) - - - - -def generated_java_libraries(): - native.java_library( - name = "com_google_code_findbugs_jsr305", - visibility = ["//visibility:public"], - exports = ["@com_google_code_findbugs_jsr305//jar"], - ) - - - native.java_library( - name = "org_apache_httpcomponents_httpcore", - visibility = ["//visibility:public"], - exports = ["@org_apache_httpcomponents_httpcore//jar"], - ) - - - native.java_library( - name = "org_powermock_powermock_reflect", - visibility = ["//visibility:public"], - exports = ["@org_powermock_powermock_reflect//jar"], - runtime_deps = [ - ":org_objenesis_objenesis", - ], - ) - - - native.java_library( - name = "commons_codec_commons_codec", - visibility = ["//visibility:public"], - exports = ["@commons_codec_commons_codec//jar"], - ) - - - native.java_library( - name = "org_apache_maven_maven_aether_provider", - visibility = ["//visibility:public"], - exports = ["@org_apache_maven_maven_aether_provider//jar"], - ) - - - native.java_library( - name = "org_powermock_powermock_module_junit4", - visibility = ["//visibility:public"], - exports = ["@org_powermock_powermock_module_junit4//jar"], - runtime_deps = [ - ":junit_junit", - ":org_hamcrest_hamcrest_core", - ":org_javassist_javassist", - ":org_objenesis_objenesis", - ":org_powermock_powermock_core", - ":org_powermock_powermock_module_junit4_common", - ":org_powermock_powermock_reflect", - ], - ) - - - native.java_library( - name = "org_apache_maven_maven_artifact", - visibility = ["//visibility:public"], - exports = ["@org_apache_maven_maven_artifact//jar"], - runtime_deps = [ - ":org_apache_commons_commons_lang3", - ":org_codehaus_plexus_plexus_utils", - ], - ) - - - native.java_library( - name = "com_google_truth_truth", - visibility = ["//visibility:public"], - exports = ["@com_google_truth_truth//jar"], - runtime_deps = [ - ":com_google_errorprone_error_prone_annotations", - ":com_google_guava_guava", - ":junit_junit", - ], - ) - - - native.java_library( - name = "com_google_errorprone_error_prone_annotations", - visibility = ["//visibility:public"], - exports = ["@com_google_errorprone_error_prone_annotations//jar"], - ) - - - native.java_library( - name = "org_codehaus_plexus_plexus_interpolation", - visibility = ["//visibility:public"], - exports = ["@org_codehaus_plexus_plexus_interpolation//jar"], - ) - - - native.java_library( - name = "org_objenesis_objenesis", - visibility = ["//visibility:public"], - exports = ["@org_objenesis_objenesis//jar"], - ) - - - native.java_library( - name = "junit_junit", - visibility = ["//visibility:public"], - exports = ["@junit_junit//jar"], - runtime_deps = [ - ":org_hamcrest_hamcrest_core", - ], - ) - - - native.java_library( - name = "org_powermock_powermock_core", - visibility = ["//visibility:public"], - exports = ["@org_powermock_powermock_core//jar"], - runtime_deps = [ - ":org_javassist_javassist", - ":org_objenesis_objenesis", - ":org_powermock_powermock_reflect", - ], - ) - - - native.java_library( - name = "org_codehaus_plexus_plexus_component_annotations", - visibility = ["//visibility:public"], - exports = ["@org_codehaus_plexus_plexus_component_annotations//jar"], - ) - - - native.java_library( - name = "org_codehaus_plexus_plexus_utils", - visibility = ["//visibility:public"], - exports = ["@org_codehaus_plexus_plexus_utils//jar"], - ) - - - native.java_library( - name = "commons_logging_commons_logging", - visibility = ["//visibility:public"], - exports = ["@commons_logging_commons_logging//jar"], - ) - - - native.java_library( - name = "org_hamcrest_hamcrest_core", - visibility = ["//visibility:public"], - exports = ["@org_hamcrest_hamcrest_core//jar"], - ) - - - native.java_library( - name = "org_eclipse_aether_aether_util", - visibility = ["//visibility:public"], - exports = ["@org_eclipse_aether_aether_util//jar"], - runtime_deps = [ - ":org_eclipse_aether_aether_api", - ], - ) - - - native.java_library( - name = "org_powermock_powermock_module_junit4_common", - visibility = ["//visibility:public"], - exports = ["@org_powermock_powermock_module_junit4_common//jar"], - runtime_deps = [ - ":junit_junit", - ":org_javassist_javassist", - ":org_objenesis_objenesis", - ":org_powermock_powermock_core", - ":org_powermock_powermock_reflect", - ], - ) - - - native.java_library( - name = "org_apache_httpcomponents_httpclient", - visibility = ["//visibility:public"], - exports = ["@org_apache_httpcomponents_httpclient//jar"], - runtime_deps = [ - ":commons_codec_commons_codec", - ":commons_logging_commons_logging", - ":org_apache_httpcomponents_httpcore", - ], - ) - - - native.java_library( - name = "org_mockito_mockito_all", - visibility = ["//visibility:public"], - exports = ["@org_mockito_mockito_all//jar"], - ) - - - native.java_library( - name = "org_apache_commons_commons_lang3", - visibility = ["//visibility:public"], - exports = ["@org_apache_commons_commons_lang3//jar"], - ) - - - native.java_library( - name = "org_javassist_javassist", - visibility = ["//visibility:public"], - exports = ["@org_javassist_javassist//jar"], - ) - - - native.java_library( - name = "org_eclipse_aether_aether_api", - visibility = ["//visibility:public"], - exports = ["@org_eclipse_aether_aether_api//jar"], - ) - - - native.java_library( - name = "com_google_guava_guava", - visibility = ["//visibility:public"], - exports = ["@com_google_guava_guava//jar"], - ) - - diff --git a/starlark/src/syntax/testcases/genproto.bzl b/starlark/src/syntax/testcases/genproto.bzl deleted file mode 100644 index f935e52b..00000000 --- a/starlark/src/syntax/testcases/genproto.bzl +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This is a quick and dirty rule to make Bazel compile itself. It -# only supports Java. - -proto_filetype = [".proto"] - -def cc_grpc_library(name, src): - basename = src[:-len(".proto")] - protoc_label = str(Label("//third_party/protobuf:protoc")) - cpp_plugin_label = str(Label("//third_party/grpc:cpp_plugin")) - native.genrule( - name = name + "_codegen", - srcs = [src], - tools = [protoc_label, cpp_plugin_label], - cmd = "\\\n".join([ - "$(location " + protoc_label + ")", - " --plugin=protoc-gen-grpc=$(location " + cpp_plugin_label + ")", - " --cpp_out=$(GENDIR)", - " --grpc_out=$(GENDIR)", - " $(location " + src + ")"]), - outs = [basename + ".grpc.pb.h", basename + ".grpc.pb.cc", basename + ".pb.cc", basename + ".pb.h"]) - - native.cc_library( - name = name, - srcs = [basename + ".grpc.pb.cc", basename + ".pb.cc"], - hdrs = [basename + ".grpc.pb.h", basename + ".pb.h"], - deps = [str(Label("//third_party/grpc:grpc++_unsecure"))], - includes = ["."]) - diff --git a/starlark/src/syntax/testcases/genrule2.bzl b/starlark/src/syntax/testcases/genrule2.bzl deleted file mode 100644 index 563a9efb..00000000 --- a/starlark/src/syntax/testcases/genrule2.bzl +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Syntactic sugar for native genrule() rule: -# expose ROOT shell variable -# expose TMP shell variable - -def genrule2(cmd, **kwargs): - cmd = ' && '.join([ - 'ROOT=$$PWD', - 'TMP=$$(mktemp -d || mktemp -d -t bazel-tmp)', - '(' + cmd + ')', - ]) - native.genrule( - cmd = cmd, - **kwargs) diff --git a/starlark/src/syntax/testcases/git.bzl b/starlark/src/syntax/testcases/git.bzl deleted file mode 100644 index 2375b21f..00000000 --- a/starlark/src/syntax/testcases/git.bzl +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for cloning external git repositories.""" - -def _clone_or_update(ctx): - if (ctx.attr.verbose): - print('git.bzl: Cloning or updating repository %s' % ctx.name) - if ((not ctx.attr.tag and not ctx.attr.commit) or - (ctx.attr.tag and ctx.attr.commit)): - fail('Exactly one of commit and tag must be provided') - if ctx.attr.commit: - ref = ctx.attr.commit - else: - ref = 'tags/' + ctx.attr.tag - - st = ctx.execute(['bash', '-c', """ -set -ex -( cd {working_dir} && - if ! ( cd '{dir}' && git rev-parse --git-dir ) >/dev/null 2>&1; then - rm -rf '{dir}' - git clone '{remote}' '{dir}' - fi - cd '{dir}' - git reset --hard {ref} || (git fetch origin {ref}:{ref} && git reset --hard {ref}) - git clean -xdf ) - """.format( - working_dir=ctx.path('.').dirname, - dir=ctx.path('.'), - remote=ctx.attr.remote, - ref=ref, - )]) - if st.return_code: - fail('error cloning %s:\n%s' % (ctx.name, st.stderr)) - if ctx.attr.init_submodules: - st = ctx.execute(['bash', '-c', """ -set -ex -( cd '{dir}' - git submodule update --init --checkout --force ) - """.format( - dir=ctx.path('.'), - )]) - if st.return_code: - fail('error updating submodules %s:\n%s' % (ctx.name, st.stderr)) - - -def _new_git_repository_implementation(ctx): - if ((not ctx.attr.build_file and not ctx.attr.build_file_content) or - (ctx.attr.build_file and ctx.attr.build_file_content)): - fail('Exactly one of build_file and build_file_content must be provided.') - _clone_or_update(ctx) - ctx.file('WORKSPACE', 'workspace(name = \'{name}\')\n'.format(name=ctx.name)) - if ctx.attr.build_file: - ctx.symlink(ctx.attr.build_file, 'BUILD.bazel') - else: - ctx.file('BUILD.bazel', ctx.attr.build_file_content) - -def _git_repository_implementation(ctx): - _clone_or_update(ctx) - - -_common_attrs = { - 'remote': attr.string(mandatory=True), - 'commit': attr.string(default=''), - 'tag': attr.string(default=''), - 'init_submodules': attr.bool(default=False), - 'verbose': attr.bool(default=False), -} - - -new_git_repository = repository_rule( - implementation=_new_git_repository_implementation, - attrs=_common_attrs + { - 'build_file': attr.label(allow_single_file=True), - 'build_file_content': attr.string(), - } -) -"""Clone an external git repository. - -Clones a Git repository, checks out the specified tag, or commit, and -makes its targets available for binding. - -Args: - name: A unique name for this rule. - - build_file: The file to use as the BUILD file for this repository. - Either build_file or build_file_content must be specified. - - This attribute is a label relative to the main workspace. The file - does not need to be named BUILD, but can be (something like - BUILD.new-repo-name may work well for distinguishing it from the - repository's actual BUILD files. - - build_file_content: The content for the BUILD file for this repository. - Either build_file or build_file_content must be specified. - - init_submodules: Whether to clone submodules in the repository. - - remote: The URI of the remote Git repository. -""" - -git_repository = repository_rule( - implementation=_git_repository_implementation, - attrs=_common_attrs, -) -"""Clone an external git repository. - -Clones a Git repository, checks out the specified tag, or commit, and -makes its targets available for binding. - -Args: - name: A unique name for this rule. - - init_submodules: Whether to clone submodules in the repository. - - remote: The URI of the remote Git repository. -""" diff --git a/starlark/src/syntax/testcases/git_repositories.bzl b/starlark/src/syntax/testcases/git_repositories.bzl deleted file mode 100644 index 1519fbc1..00000000 --- a/starlark/src/syntax/testcases/git_repositories.bzl +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Redirects for git repository rules.""" - -load( - ":git.bzl", - original_git_repository = "git_repository", - original_new_git_repository = "new_git_repository", -) - -def git_repository(**kwargs): - print("The git_repository rule has been moved. Please load " + - "@bazel_tools//tools/build_defs/repo:git.bzl instead. This redirect " + - "will be removed in the future.") - original_git_repository(**kwargs) - -def new_git_repository(**kwargs): - print("The new_git_repository rule has been moved. Please load " + - "@bazel_tools//tools/build_defs/repo:git.bzl instead. This redirect " + - "will be removed in the future.") - original_new_git_repository(**kwargs) diff --git a/starlark/src/syntax/testcases/go.bzl b/starlark/src/syntax/testcases/go.bzl deleted file mode 100644 index 7ef5c5d9..00000000 --- a/starlark/src/syntax/testcases/go.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# !!!! THIS IS A GENERATED FILE TO NOT EDIT IT BY HAND !!!! -# -# To regenerate this file, run ./update_deps.sh from the root of the -# git repository. - -DIGESTS = { - # "gcr.io/distroless/base:debug" circa 2017-10-10 04:09 +0000 - "debug": "sha256:fa2c87f23ce19665607fe43c701f0496fef87a5dbcf821c2ae64bfe415247822", - # "gcr.io/distroless/base:latest" circa 2017-10-10 04:09 +0000 - "latest": "sha256:4a8979a768c3ef8d0a8ed8d0af43dc5920be45a51749a9c611d178240f136eb4", -} diff --git a/starlark/src/syntax/testcases/go_proto_library.bzl b/starlark/src/syntax/testcases/go_proto_library.bzl deleted file mode 100644 index 8a03c3b9..00000000 --- a/starlark/src/syntax/testcases/go_proto_library.bzl +++ /dev/null @@ -1,321 +0,0 @@ -"""A basic go_proto_library. - -Takes .proto as srcs and go_proto_library as deps -Note: can also work with a go_library(name=name,...) - and a filegroup of .protos (name=name+"_protos",...) - -A go_proto_library can then be a dependency of go_library or another go_proto_library. - -Requires/Uses: -@io_bazel_rules_go for go_* macros/rules. - -Does: -Generate protos using the open-source protoc and protoc-gen-go. -Handles transitive dependencies. -gRPC for service generation -Handles bazel-style names like 'foo_proto', -and also Go package-style like 'go_default_library' - -Does not yet: -Gets confused if local protos use 'option go_package' - -Usage: - -In the BUILD file where protos are - -load("@io_bazel_rules_go//proto:go_proto_library.bzl", "go_proto_library") - -go_proto_library( - name = "my_proto", - srcs = ["my.proto"], - deps = [ - ":other_proto", - "@com_github_golang_protobuf//ptypes/duration:go_default_library", - ], -) -""" - -load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_repository") - -_DEFAULT_LIB = "go_default_library" # matching go_library - -_PROTOS_SUFFIX = "_protos" - -_GO_GOOGLE_PROTOBUF = "go_google_protobuf" - -_WELL_KNOWN_REPO = "@com_github_golang_protobuf//ptypes/" - -def _collect_protos_import(ctx): - """Collect the list of transitive protos and m_import_path. - - Paths of the form Mpath/to.proto=foo.com/bar specify a mapping into the global Go namespace. - https://github.com/golang/protobuf#parameters - - Args: - ctx: the standard bazel rule ctx object. - - Returns: - (list of unique protos, list of m_import paths) - """ - protos = depset() - m_import_path = [] - for d in ctx.attr.deps: - if not hasattr(d, "_protos"): - # should be a raw filegroup then - protos += list(d.files) - continue - protos += d._protos - m_import_path.append(d._m_import_path) - return list(protos), m_import_path - -def _drop_external(path): - """Drop leading '../' indicating an external dir of the form ../$some-repo. - - Non-generated external protos show up in a parallel directory. - e.g. ptypes/any/any.proto is at ../com_github_golang_protobuf/ptypes/any/any.proto - So this function detects and drops the 2 leading directories in this case. - - Args: - path: short_path of a proto file - - Returns: - A cleaned path. - """ - if not path.startswith("../"): - return path - return "/".join(path.split("/")[2:]) - -def _check_bazel_style(ctx): - """If the library name is not 'go_default_library', then we have to create an extra level of indirection.""" - if ctx.label.name == _DEFAULT_LIB + _PROTOS_SUFFIX: - return ctx.outputs.outs, "" - proto_outs = [ - ctx.new_file( - ctx.configuration.bin_dir, - s.basename[:-len(".proto")] + ".pb.go") - for s in ctx.files.srcs - ] - for proto_out, ctx_out in zip(proto_outs, ctx.outputs.outs): - ctx.action( - inputs=[proto_out], - outputs=[ctx_out], - command="cp %s %s" % (proto_out.path, ctx_out.path), - mnemonic="GoProtocGenCp") - return proto_outs, "/" + ctx.label.name[:-len(_PROTOS_SUFFIX)] - -def _go_proto_library_gen_impl(ctx): - """Rule implementation that generates Go using protoc.""" - proto_outs, go_package_name = _check_bazel_style(ctx) - - go_prefix = ctx.attr.go_prefix.go_prefix - if go_prefix and ctx.label.package and not go_prefix.endswith("/"): - go_prefix = go_prefix + "/" - source_go_package = "%s%s%s" % (go_prefix, ctx.label.package, go_package_name) - - m_imports = ["M%s=%s" % (_drop_external(f.short_path), source_go_package) - for f in ctx.files.srcs] - - protos, mi = _collect_protos_import(ctx) - m_import_path = ",".join(m_imports + mi) - use_grpc = "plugins=grpc," if ctx.attr.grpc else "" - - # Create work dir, copy all protos there stripping of any external/bazel- prefixes. - work_dir = ctx.outputs.outs[0].path + ".protoc" - root_prefix = "/".join([".." for _ in work_dir.split("/")]) - cmds = ["set -e", "/bin/rm -rf %s; /bin/mkdir -p %s" % (work_dir, work_dir)] - srcs = list(ctx.files.srcs) - dirs = depset([s.short_path[:-1-len(s.basename)] for s in srcs + protos]) - cmds += ["mkdir -p %s/%s" % (work_dir, _drop_external(d)) for d in dirs if d] - - if ctx.attr.ignore_go_package_option: - # Strip the "option go_package" line from the proto file before compiling, - # c.f., https://github.com/bazelbuild/rules_go/issues/323 - # - # NOTE: Using sed does not provide a perfect solution, build will break if - # the go_package option splits multiple lines. Use with caution. - cmds += ["sed '/^ *option *go_package/d' %s > %s/%s" % - (s.path, work_dir, _drop_external(s.short_path)) for s in srcs] - cmds += ["cp %s %s/%s" % (s.path, work_dir, _drop_external(s.short_path)) - for s in protos] - else: - cmds += ["cp %s %s/%s" % (s.path, work_dir, _drop_external(s.short_path)) - for s in srcs + protos] - cmds += ["cd %s" % work_dir, - "%s/%s --go_out=%s%s:. %s" % (root_prefix, ctx.executable.protoc.path, - use_grpc, m_import_path, - " ".join([_drop_external(f.short_path) for f in srcs]))] - cmds += ["cp %s %s/%s" % (_drop_external(p.short_path), root_prefix, p.path) - for p in proto_outs] - run = ctx.new_file(ctx.configuration.bin_dir, ctx.outputs.outs[0].basename + ".run") - ctx.file_action( - output = run, - content = "\n".join(cmds), - executable = True) - - ctx.action( - inputs=srcs + protos + ctx.files.protoc_gen_go + [ctx.executable.protoc, run], - outputs=proto_outs, - progress_message="Generating into %s" % ctx.outputs.outs[0].dirname, - mnemonic="GoProtocGen", - env = {"PATH": root_prefix + "/" + ctx.files.protoc_gen_go[0].dirname + - ":/bin:/usr/bin"}, # /bin/sed for linux, /usr/bin/sed for macos. - executable=run) - return struct(_protos=protos+srcs, - _m_import_path=m_import_path) - -_go_proto_library_gen = rule( - attrs = { - "deps": attr.label_list(), - "srcs": attr.label_list( - mandatory = True, - allow_files = True, - ), - "grpc": attr.int(default = 0), - "outs": attr.output_list(mandatory = True), - "ignore_go_package_option": attr.int(default = 0), - "protoc": attr.label( - executable = True, - single_file = True, - allow_files = True, - cfg = "host", - ), - "protoc_gen_go": attr.label( - allow_files = True, - cfg = "host", - ), - "_protos": attr.label_list(default = []), - "go_prefix": attr.label( - providers = ["go_prefix"], - default = Label( - "//:go_prefix", - relative_to_caller_repository = True, - ), - allow_files = False, - cfg = "host", - ), - }, - output_to_genfiles = True, - implementation = _go_proto_library_gen_impl, -) - -def _add_target_suffix(target, suffix): - idx = target.find(":") - if idx != -1: - return target + suffix - toks = target.split("/") - return target + ":" + toks[-1] + suffix - -def _well_known_proto_deps(deps, repo): - for d in deps: - if d.startswith(_WELL_KNOWN_REPO): - return [repo + "//:" + _GO_GOOGLE_PROTOBUF] - return [] - -def go_proto_library(name, srcs = None, deps = None, - has_services = 0, - testonly = 0, visibility = None, - ignore_go_package_option = 0, - protoc = "@com_github_google_protobuf//:protoc", - protoc_gen_go = "@com_github_golang_protobuf//protoc-gen-go", - x_net_context = "@org_golang_x_net//context:go_default_library", - google_grpc = "@org_golang_google_grpc//:go_default_library", - golang_protobuf = "@com_github_golang_protobuf//proto:go_default_library", - rules_go_repo_only_for_internal_use = "@io_bazel_rules_go", - **kwargs): - """Macro which generates and compiles protobufs for Go. - - Args: - name: name assigned to the underlying go_library, - typically "foo_proto" for ["foo.proto"] - srcs: a list of .proto source files, currently only 1 supported - deps: a mixed list of either go_proto_libraries, or - any go_library which has a companion - filegroup(name=name+"_protos",...) - which contains the protos which were used - has_services: indicates the proto has gRPC services and deps - testonly: mark as testonly - visibility: visibility to use on underlying go_library - ignore_go_package_option: if 1, ignore the "option go_package" statement in - the srcs proto files. - protoc: override the default version of protoc. Most users won't need this. - protoc_gen_go: override the default version of protoc_gen_go. - Most users won't need this. - x_net_context: override the default version of the context package. Most - users won't need this. - google_grpc: override the default version of grpc. Most users won't need - this. - golang_protobuf: override the default version of proto. Most users won't - need this. - rules_go_repo_only_for_internal_use: don't use this, only to allow - internal tests to work. - **kwargs: any other args which are passed through to the underlying go_library - """ - if not name: - fail("name is required", "name") - if not srcs: - fail("srcs required", "srcs") - if not deps: - deps = [] - # bazel-style - outs = [name + "/" + s[:-len(".proto")] + ".pb.go" - for s in srcs] - if name == _DEFAULT_LIB: - outs = [s[:-len(".proto")] + ".pb.go" - for s in srcs] - - _go_proto_library_gen( - name = name + _PROTOS_SUFFIX, - srcs = srcs, - deps = [_add_target_suffix(s, _PROTOS_SUFFIX) - for s in deps] + _well_known_proto_deps( - deps, repo=rules_go_repo_only_for_internal_use), - outs = outs, - testonly = testonly, - visibility = visibility, - ignore_go_package_option = ignore_go_package_option, - grpc = has_services, - protoc = protoc, - protoc_gen_go = protoc_gen_go, - ) - grpc_deps = [] - if has_services: - grpc_deps += [x_net_context, google_grpc] - go_library( - name = name, - srcs = [":" + name + _PROTOS_SUFFIX], - deps = deps + grpc_deps + [golang_protobuf], - testonly = testonly, - visibility = visibility, - **kwargs - ) - -def _well_known_import_key(name): - return "%s%s:go_default_library" % (_WELL_KNOWN_REPO, name) - -_well_known_imports = [ - "any", - "duration", - "empty", - "struct", - "timestamp", - "wrappers", -] - -# If you have well_known proto deps, rules_go will add a magic -# google/protobuf/ directory at the import root -def go_google_protobuf(name = _GO_GOOGLE_PROTOBUF): - deps = [_add_target_suffix(_well_known_import_key(wk), _PROTOS_SUFFIX) - for wk in _well_known_imports] - outs = [wk + ".proto" for wk in _well_known_imports] - - native.genrule( - name = name, - srcs = deps, - outs = ["google/protobuf/"+o for o in outs], - cmd = "cp $(SRCS) $(@D)/google/protobuf/", - visibility = ["//visibility:public"], - ) - -def go_proto_repositories(shared = 1): - """Add this to your WORKSPACE to pull in all of the needed dependencies.""" - print("DEPRECATED: go_proto_repositories is redundant and will be removed soon") diff --git a/starlark/src/syntax/testcases/go_repository.bzl b/starlark/src/syntax/testcases/go_repository.bzl deleted file mode 100644 index 80daba0b..00000000 --- a/starlark/src/syntax/testcases/go_repository.bzl +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", "env_execute") -load("@io_bazel_rules_go//go/private:toolchain.bzl", "executable_extension") - -def _go_repository_impl(ctx): - if ctx.attr.urls: - # explicit source url - if ctx.attr.vcs: - fail("cannot specify both of urls and vcs", "vcs") - if ctx.attr.commit: - fail("cannot specify both of urls and commit", "commit") - if ctx.attr.tag: - fail("cannot specify both of urls and tag", "tag") - ctx.download_and_extract( - url = ctx.attr.urls, - sha256 = ctx.attr.sha256, - stripPrefix = ctx.attr.strip_prefix, - type = ctx.attr.type, - ) - else: - if ctx.attr.commit and ctx.attr.tag: - fail("cannot specify both of commit and tag", "commit") - if ctx.attr.commit: - rev = ctx.attr.commit - elif ctx.attr.tag: - rev = ctx.attr.tag - else: - fail("neither commit or tag is specified", "commit") - - # Using fetch repo - if ctx.attr.vcs and not ctx.attr.remote: - fail("if vcs is specified, remote must also be") - - fetch_repo_env = { - "PATH": ctx.os.environ["PATH"], # to find git - } - if "SSH_AUTH_SOCK" in ctx.os.environ: - fetch_repo_env["SSH_AUTH_SOCK"] = ctx.os.environ["SSH_AUTH_SOCK"] - if "HTTP_PROXY" in ctx.os.environ: - fetch_repo_env["HTTP_PROXY"] = ctx.os.environ["HTTP_PROXY"] - if "HTTPS_PROXY" in ctx.os.environ: - fetch_repo_env["HTTPS_PROXY"] = ctx.os.environ["HTTPS_PROXY"] - - # TODO(yugui): support submodule? - # c.f. https://www.bazel.io/versions/master/docs/be/workspace.html#git_repository.init_submodules - _fetch_repo = "@io_bazel_rules_go_repository_tools//:bin/fetch_repo{}".format(executable_extension(ctx)) - result = env_execute( - ctx, - [ - ctx.path(Label(_fetch_repo)), - '--dest', ctx.path(''), - '--remote', ctx.attr.remote, - '--rev', rev, - '--vcs', ctx.attr.vcs, - '--importpath', ctx.attr.importpath, - ], - environment = fetch_repo_env, - ) - if result.return_code: - fail("failed to fetch %s: %s" % (ctx.name, result.stderr)) - - generate = ctx.attr.build_file_generation == "on" - if ctx.attr.build_file_generation == "auto": - generate = True - for name in ['BUILD', 'BUILD.bazel', ctx.attr.build_file_name]: - path = ctx.path(name) - if path.exists and not env_execute(ctx, ['test', '-f', path]).return_code: - generate = False - break - if generate: - # Build file generation is needed - _gazelle = "@io_bazel_rules_go_repository_tools//:bin/gazelle{}".format(executable_extension(ctx)) - gazelle = ctx.path(Label(_gazelle)) - cmds = [gazelle, '--go_prefix', ctx.attr.importpath, '--mode', 'fix', - '--repo_root', ctx.path(''), - "--build_tags", ",".join(ctx.attr.build_tags), - "--external", ctx.attr.build_external, - "--proto", ctx.attr.build_file_proto_mode] - if ctx.attr.build_file_name: - cmds.extend(["--build_file_name", ctx.attr.build_file_name]) - cmds.append(ctx.path('')) - result = env_execute(ctx, cmds) - if result.return_code: - fail("failed to generate BUILD files for %s: %s" % ( - ctx.attr.importpath, result.stderr)) - - -go_repository = repository_rule( - implementation = _go_repository_impl, - attrs = { - # Fundamental attributes of a go repository - "importpath": attr.string(mandatory = True), - "commit": attr.string(), - "tag": attr.string(), - - # Attributes for a repository that cannot be inferred from the import path - "vcs": attr.string(default="", values=["", "git", "hg", "svn", "bzr"]), - "remote": attr.string(), - - # Attributes for a repository that comes from a source blob not a vcs - "urls": attr.string_list(), - "strip_prefix": attr.string(), - "type": attr.string(), - "sha256": attr.string(), - - # Attributes for a repository that needs automatic build file generation - "build_external": attr.string(default="external", values=["external", "vendored"]), - "build_file_name": attr.string(default="BUILD.bazel,BUILD"), - "build_file_generation": attr.string(default="auto", values=["on", "auto", "off"]), - "build_tags": attr.string_list(), - "build_file_proto_mode": attr.string(default="default", values=["default", "disable", "legacy"]), - }, -) -"""See go/workspace.rst#go-repository for full documentation.""" diff --git a/starlark/src/syntax/testcases/go_toolchain.bzl b/starlark/src/syntax/testcases/go_toolchain.bzl deleted file mode 100644 index 182b98cf..00000000 --- a/starlark/src/syntax/testcases/go_toolchain.bzl +++ /dev/null @@ -1,223 +0,0 @@ -# Copyright 2016 The Bazel Go Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Toolchain rules used by go. -""" -load("@io_bazel_rules_go//go/private:actions/archive.bzl", "emit_archive") -load("@io_bazel_rules_go//go/private:actions/asm.bzl", "emit_asm") -load("@io_bazel_rules_go//go/private:actions/binary.bzl", "emit_binary") -load("@io_bazel_rules_go//go/private:actions/compile.bzl", "emit_compile", "bootstrap_compile") -load("@io_bazel_rules_go//go/private:actions/cover.bzl", "emit_cover") -load("@io_bazel_rules_go//go/private:actions/library.bzl", "emit_library") -load("@io_bazel_rules_go//go/private:actions/link.bzl", "emit_link", "bootstrap_link") -load("@io_bazel_rules_go//go/private:actions/pack.bzl", "emit_pack") -load("@io_bazel_rules_go//go/private:providers.bzl", "GoStdLib") -load("@io_bazel_rules_go//go/platform:list.bzl", "GOOS_GOARCH") -load("@io_bazel_rules_go//go/private:mode.bzl", "mode_string") - -def _get_stdlib(ctx, go_toolchain, mode): - for stdlib in go_toolchain.stdlib.all: - stdlib = stdlib[GoStdLib] - if (stdlib.goos == mode.goos and - stdlib.goarch == mode.goarch and - stdlib.race == mode.race and - stdlib.pure == mode.pure): - return stdlib - fail("No matching standard library for "+mode_string(mode)) - -def _goos_to_extension(goos): - if goos == "windows": - return ".exe" - return "" - -def _go_toolchain_impl(ctx): - return [platform_common.ToolchainInfo( - name = ctx.label.name, - cross_compile = ctx.attr.cross_compile, - default_goos = ctx.attr.goos, - default_goarch = ctx.attr.goarch, - stdlib = struct( - all = ctx.attr._stdlib_all, - get = _get_stdlib, - ), - actions = struct( - archive = emit_archive, - asm = emit_asm, - binary = emit_binary, - compile = emit_compile if ctx.executable._compile else bootstrap_compile, - cover = emit_cover, - library = emit_library, - link = emit_link if ctx.executable._link else bootstrap_link, - pack = emit_pack, - ), - tools = struct( - asm = ctx.executable._asm, - compile = ctx.executable._compile, - pack = ctx.executable._pack, - link = ctx.executable._link, - cgo = ctx.executable._cgo, - test_generator = ctx.executable._test_generator, - cover = ctx.executable._cover, - ), - flags = struct( - compile = (), - link = ctx.attr.link_flags, - link_cgo = ctx.attr.cgo_link_flags, - ), - data = struct( - crosstool = ctx.files._crosstool, - package_list = ctx.file._package_list, - extension = _goos_to_extension(ctx.attr.goos), - ), - )] - -def _stdlib_all(): - stdlibs = [] - for goos, goarch in GOOS_GOARCH: - stdlibs.extend([ - Label("@go_stdlib_{}_{}_cgo".format(goos, goarch)), - Label("@go_stdlib_{}_{}_pure".format(goos, goarch)), - Label("@go_stdlib_{}_{}_cgo_race".format(goos, goarch)), - Label("@go_stdlib_{}_{}_pure_race".format(goos, goarch)), - ]) - return stdlibs - -def _asm(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:asm") - -def _compile(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:compile") - -def _pack(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:pack") - -def _link(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:link") - -def _cgo(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:cgo") - -def _test_generator(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:generate_test_main") - -def _cover(bootstrap): - if bootstrap: - return None - return Label("//go/tools/builders:cover") - -_go_toolchain = rule( - _go_toolchain_impl, - attrs = { - # Minimum requirements to specify a toolchain - "goos": attr.string(mandatory = True), - "goarch": attr.string(mandatory = True), - "cross_compile": attr.bool(default = False), - # Optional extras to a toolchain - "link_flags": attr.string_list(default = []), - "cgo_link_flags": attr.string_list(default = []), - "bootstrap": attr.bool(default = False), - # Tools, missing from bootstrap toolchains - "_asm": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _asm), - "_compile": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _compile), - "_pack": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _pack), - "_link": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _link), - "_cgo": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _cgo), - "_test_generator": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _test_generator), - "_cover": attr.label(allow_files = True, single_file = True, executable = True, cfg = "host", default = _cover), - # Hidden internal attributes - "_stdlib_all": attr.label_list(default = _stdlib_all()), - "_crosstool": attr.label(default=Label("//tools/defaults:crosstool")), - "_package_list": attr.label(allow_files = True, single_file = True, default="@go_sdk//:packages.txt"), - }, -) - -def go_toolchain(name, target, host=None, constraints=[], **kwargs): - """See go/toolchains.rst#go-toolchain for full documentation.""" - - if not host: host = target - cross = host != target - goos, _, goarch = target.partition("_") - target_constraints = constraints + [ - "@io_bazel_rules_go//go/toolchain:" + goos, - "@io_bazel_rules_go//go/toolchain:" + goarch, - ] - host_goos, _, host_goarch = host.partition("_") - exec_constraints = [ - "@io_bazel_rules_go//go/toolchain:" + host_goos, - "@io_bazel_rules_go//go/toolchain:" + host_goarch, - ] - - impl_name = name + "-impl" - _go_toolchain( - name = impl_name, - goos = goos, - goarch = goarch, - cross_compile = cross, - bootstrap = False, - tags = ["manual"], - visibility = ["//visibility:public"], - **kwargs - ) - native.toolchain( - name = name, - toolchain_type = "@io_bazel_rules_go//go:toolchain", - exec_compatible_with = exec_constraints, - target_compatible_with = target_constraints, - toolchain = ":"+impl_name, - ) - - if not cross: - # If not cross, register a bootstrap toolchain - name = name + "-bootstrap" - impl_name = name + "-impl" - _go_toolchain( - name = impl_name, - goos = goos, - goarch = goarch, - bootstrap = True, - tags = ["manual"], - visibility = ["//visibility:public"], - **kwargs - ) - native.toolchain( - name = name, - toolchain_type = "@io_bazel_rules_go//go:bootstrap_toolchain", - exec_compatible_with = exec_constraints, - target_compatible_with = target_constraints, - toolchain = ":"+impl_name, - ) - -def _go_toolchain_flags(ctx): - return struct( - strip = ctx.attr.strip, - ) - -go_toolchain_flags = rule( - _go_toolchain_flags, - attrs = { - "strip": attr.string(mandatory=True), - }, -) diff --git a/starlark/src/syntax/testcases/guava.bzl b/starlark/src/syntax/testcases/guava.bzl deleted file mode 100644 index c71379eb..00000000 --- a/starlark/src/syntax/testcases/guava.bzl +++ /dev/null @@ -1,5 +0,0 @@ -GUAVA_VERSION = "21.0" - -GUAVA_BIN_SHA1 = "3a3d111be1be1b745edfa7d91678a12d7ed38709" - -GUAVA_DOC_URL = "https://google.github.io/guava/releases/" + GUAVA_VERSION + "/api/docs/" diff --git a/starlark/src/syntax/testcases/gwt.bzl b/starlark/src/syntax/testcases/gwt.bzl deleted file mode 100644 index deeb5d58..00000000 --- a/starlark/src/syntax/testcases/gwt.bzl +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Port of Buck native gwt_binary() rule. See discussion in context of -# https://github.com/facebook/buck/issues/109 -load("//tools/bzl:genrule2.bzl", "genrule2") -load("//tools/bzl:java.bzl", "java_library2") - -jar_filetype = FileType([".jar"]) - -BROWSERS = [ - "chrome", - "firefox", - "gecko1_8", - "safari", - "msie", - "ie8", - "ie9", - "ie10", - "edge", -] - -ALIASES = { - "chrome": "safari", - "firefox": "gecko1_8", - "msie": "ie10", - "edge": "gecko1_8", -} - -MODULE = "com.google.gerrit.GerritGwtUI" - -GWT_COMPILER = "com.google.gwt.dev.Compiler" - -GWT_JVM_ARGS = ["-Xmx512m"] - -GWT_COMPILER_ARGS = [ - "-XdisableClassMetadata", -] - -GWT_COMPILER_ARGS_RELEASE_MODE = GWT_COMPILER_ARGS + [ - "-XdisableCastChecking", -] - -PLUGIN_DEPS_NEVERLINK = [ - "//gerrit-plugin-api:lib-neverlink", -] - -GWT_PLUGIN_DEPS_NEVERLINK = [ - "//gerrit-plugin-gwtui:gwtui-api-lib-neverlink", - "//lib/gwt:user-neverlink", -] - -GWT_PLUGIN_DEPS = [ - "//gerrit-plugin-gwtui:gwtui-api-lib", -] - -GWT_TRANSITIVE_DEPS = [ - "//lib/gwt:ant", - "//lib/gwt:colt", - "//lib/gwt:javax-validation", - "//lib/gwt:javax-validation_src", - "//lib/gwt:jsinterop-annotations", - "//lib/gwt:jsinterop-annotations_src", - "//lib/gwt:tapestry", - "//lib/gwt:w3c-css-sac", - "//lib/ow2:ow2-asm", - "//lib/ow2:ow2-asm-analysis", - "//lib/ow2:ow2-asm-commons", - "//lib/ow2:ow2-asm-tree", - "//lib/ow2:ow2-asm-util", -] - -DEPS = GWT_TRANSITIVE_DEPS + [ - "//gerrit-gwtexpui:CSS", - "//lib:gwtjsonrpc", - "//lib/gwt:dev", - "//lib/jgit/org.eclipse.jgit:jgit-source", -] - -USER_AGENT_XML = """ - - - - -""" - -def gwt_module(gwt_xml=None, resources=[], srcs=[], **kwargs): - if gwt_xml: - resources += [gwt_xml] - - java_library2( - srcs = srcs, - resources = resources, - **kwargs) - -def _gwt_user_agent_module(ctx): - """Generate user agent specific GWT module.""" - if not ctx.attr.user_agent: - return None - - ua = ctx.attr.user_agent - impl = ua - if ua in ALIASES: - impl = ALIASES[ua] - - # intermediate artifact: user agent speific GWT xml file - gwt_user_agent_xml = ctx.new_file(ctx.label.name + "_gwt.xml") - ctx.file_action(output = gwt_user_agent_xml, - content=USER_AGENT_XML % (MODULE, impl)) - - # intermediate artifact: user agent specific zip with GWT module - gwt_user_agent_zip = ctx.new_file(ctx.label.name + "_gwt.zip") - gwt = '%s_%s.gwt.xml' % (MODULE.replace('.', '/'), ua) - dir = gwt_user_agent_zip.path + ".dir" - cmd = " && ".join([ - "p=$PWD", - "mkdir -p %s" % dir, - "cd %s" % dir, - "mkdir -p $(dirname %s)" % gwt, - "cp $p/%s %s" % (gwt_user_agent_xml.path, gwt), - "$p/%s cC $p/%s $(find . | sed 's|^./||')" % (ctx.executable._zip.path, gwt_user_agent_zip.path) - ]) - ctx.action( - inputs = [gwt_user_agent_xml] + ctx.files._zip, - outputs = [gwt_user_agent_zip], - command = cmd, - mnemonic = "GenerateUserAgentGWTModule") - - return struct( - zip=gwt_user_agent_zip, - module=MODULE + '_' + ua - ) - -def _gwt_binary_impl(ctx): - module = ctx.attr.module[0] - output_zip = ctx.outputs.output - output_dir = output_zip.path + '.gwt_output' - deploy_dir = output_zip.path + '.gwt_deploy' - - deps = _get_transitive_closure(ctx) - - paths = [] - for dep in deps: - paths.append(dep.path) - - gwt_user_agent_modules = [] - ua = _gwt_user_agent_module(ctx) - if ua: - paths.append(ua.zip.path) - gwt_user_agent_modules.append(ua.zip) - module = ua.module - - cmd = "external/local_jdk/bin/java %s -Dgwt.normalizeTimestamps=true -cp %s %s -war %s -deploy %s " % ( - " ".join(ctx.attr.jvm_args), - ":".join(paths), - GWT_COMPILER, - output_dir, - deploy_dir, - ) - # TODO(davido): clean up command concatenation - cmd += " ".join([ - "-style %s" % ctx.attr.style, - "-optimize %s" % ctx.attr.optimize, - "-strict", - " ".join(ctx.attr.compiler_args), - module + "\n", - "rm -rf %s/gwt-unitCache\n" % output_dir, - "root=`pwd`\n", - "cd %s; $root/%s Cc ../%s $(find .)\n" % ( - output_dir, - ctx.executable._zip.path, - output_zip.basename, - ) - ]) - - ctx.action( - inputs = list(deps) + ctx.files._jdk + ctx.files._zip + gwt_user_agent_modules, - outputs = [output_zip], - mnemonic = "GwtBinary", - progress_message = "GWT compiling " + output_zip.short_path, - command = "set -e\n" + cmd, - ) - -def _get_transitive_closure(ctx): - deps = set() - for dep in ctx.attr.module_deps: - deps += dep.java.transitive_runtime_deps - deps += dep.java.transitive_source_jars - for dep in ctx.attr.deps: - if hasattr(dep, 'java'): - deps += dep.java.transitive_runtime_deps - elif hasattr(dep, 'files'): - deps += dep.files - - return deps - -gwt_binary = rule( - attrs = { - "user_agent": attr.string(), - "style": attr.string(default = "OBF"), - "optimize": attr.string(default = "9"), - "deps": attr.label_list(allow_files = jar_filetype), - "module": attr.string_list(default = [MODULE]), - "module_deps": attr.label_list(allow_files = jar_filetype), - "compiler_args": attr.string_list(), - "jvm_args": attr.string_list(), - "_jdk": attr.label( - default = Label("//tools/defaults:jdk"), - ), - "_zip": attr.label( - default = Label("@bazel_tools//tools/zip:zipper"), - cfg = "host", - executable = True, - single_file = True, - ), - }, - outputs = { - "output": "%{name}.zip", - }, - implementation = _gwt_binary_impl, -) - -def gwt_genrule(suffix = ""): - dbg = 'ui_dbg' + suffix - opt = 'ui_opt' + suffix - module_dep = ':ui_module' + suffix - args = GWT_COMPILER_ARGS_RELEASE_MODE if suffix == "_r" else GWT_COMPILER_ARGS - - genrule2( - name = 'ui_optdbg' + suffix, - srcs = [ - ':' + dbg, - ':' + opt, - ], - cmd = 'cd $$TMP;' + - 'unzip -q $$ROOT/$(location :%s);' % dbg + - 'mv' + - ' gerrit_ui/gerrit_ui.nocache.js' + - ' gerrit_ui/dbg_gerrit_ui.nocache.js;' + - 'unzip -qo $$ROOT/$(location :%s);' % opt + - 'mkdir -p $$(dirname $@);' + - 'zip -qrD $$ROOT/$@ .', - outs = ['ui_optdbg' + suffix + '.zip'], - visibility = ['//visibility:public'], - ) - - gwt_binary( - name = opt, - module = [MODULE], - module_deps = [module_dep], - deps = DEPS, - compiler_args = args, - jvm_args = GWT_JVM_ARGS, - ) - - gwt_binary( - name = dbg, - style = 'PRETTY', - optimize = "0", - module_deps = [module_dep], - deps = DEPS, - compiler_args = GWT_COMPILER_ARGS, - jvm_args = GWT_JVM_ARGS, - ) - -def gen_ui_module(name, suffix = ""): - gwt_module( - name = name + suffix, - srcs = native.glob(['src/main/java/**/*.java']), - gwt_xml = 'src/main/java/%s.gwt.xml' % MODULE.replace('.', '/'), - resources = native.glob( - ['src/main/java/**/*'], - exclude = ['src/main/java/**/*.java'] + - ['src/main/java/%s.gwt.xml' % MODULE.replace('.', '/')]), - deps = [ - '//gerrit-gwtui-common:diffy_logo', - '//gerrit-gwtui-common:client', - '//gerrit-gwtexpui:CSS', - '//lib/codemirror:codemirror' + suffix, - '//lib/gwt:user', - ], - visibility = ['//visibility:public'], - ) - -def gwt_user_agent_permutations(): - for ua in BROWSERS: - gwt_binary( - name = "ui_%s" % ua, - user_agent = ua, - style = 'PRETTY', - optimize = "0", - module = [MODULE], - module_deps = [':ui_module'], - deps = DEPS, - compiler_args = GWT_COMPILER_ARGS, - jvm_args = GWT_JVM_ARGS, - ) diff --git a/starlark/src/syntax/testcases/hash.bzl b/starlark/src/syntax/testcases/hash.bzl deleted file mode 100644 index 62741540..00000000 --- a/starlark/src/syntax/testcases/hash.bzl +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Functions for producing the hash of an artifact.""" - -def sha256(ctx, artifact): - """Create an action to compute the SHA-256 of an artifact.""" - out = ctx.new_file(artifact.basename + ".sha256") - ctx.action( - executable = ctx.executable.sha256, - arguments = [artifact.path, out.path], - inputs = [artifact], - outputs = [out], - mnemonic = "SHA256") - return out - - -tools = { - "sha256": attr.label( - default=Label("//tools/build_defs/hash:sha256"), - cfg="host", - executable=True, - allow_files=True) -} diff --git a/starlark/src/syntax/testcases/hello.bzl b/starlark/src/syntax/testcases/hello.bzl deleted file mode 100644 index ffb6b073..00000000 --- a/starlark/src/syntax/testcases/hello.bzl +++ /dev/null @@ -1,20 +0,0 @@ -# Label of the template file to use. -_TEMPLATE = "//expand_template:hello.cc" - -def _hello_impl(ctx): - ctx.actions.expand_template( - template=ctx.file._template, - output=ctx.outputs.source_file, - substitutions={ - "{FIRSTNAME}": ctx.attr.firstname - }) - -hello = rule( - implementation=_hello_impl, - attrs={ - "firstname": attr.string(mandatory=True), - "_template": attr.label( - default=Label(_TEMPLATE), allow_files=True, single_file=True), - }, - outputs={"source_file": "%{name}.cc"}, -) diff --git a/starlark/src/syntax/testcases/http.bzl b/starlark/src/syntax/testcases/http.bzl deleted file mode 100644 index 28c4a79b..00000000 --- a/starlark/src/syntax/testcases/http.bzl +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for downloading files and archives over HTTP. - -### Setup - -To use these rules, load them in your `WORKSPACE` file as follows: - -```python -load( - "@bazel_tools//tools/build_defs/repo:http.bzl", - "http_archive", - "http_file", -) -``` - -These rules are improved versions of the native http rules and will eventually -replace the native rules. -""" - - -def _http_archive_impl(ctx): - """Implementation of the http_archive rule.""" - if ctx.attr.build_file and ctx.attr.build_file_content: - ctx.fail("Only one of build_file and build_file_content can be provided.") - - ctx.download_and_extract(ctx.attr.urls, "", ctx.attr.sha256, ctx.attr.type, - ctx.attr.strip_prefix) - ctx.file("WORKSPACE", "workspace(name = \"{name}\")\n".format(name=ctx.name)) - if ctx.attr.build_file: - print("ctx.attr.build_file %s" % str(ctx.attr.build_file)) - ctx.symlink(ctx.attr.build_file, "BUILD") - else: - ctx.file("BUILD", ctx.attr.build_file_content) - - -_HTTP_FILE_BUILD = """ -package(default_visibility = ["//visibility:public"]) - -filegroup( - name = "file", - srcs = ["downloaded"], -) -""" - -def _http_file_impl(ctx): - """Implementation of the http_file rule.""" - ctx.download(ctx.attr.urls, "file/downloaded", ctx.attr.sha256, - ctx.attr.executable) - ctx.file("WORKSPACE", "workspace(name = \"{name}\")".format(name=ctx.name)) - ctx.file("file/BUILD", _HTTP_FILE_BUILD) - - -_http_archive_attrs = { - "urls": attr.string_list(mandatory=True), - "sha256": attr.string(), - "strip_prefix": attr.string(), - "type": attr.string(), - "build_file": attr.label(), - "build_file_content": attr.string(), -} - - -http_archive = repository_rule( - implementation = _http_archive_impl, - attrs = _http_archive_attrs, -) -"""Downloads a Bazel repository as a compressed archive file, decompresses it, -and makes its targets available for binding. - -The repository should already contain a BUILD file. If it does not, use -`new_http_archive` instead. - -It supports the following file extensions: `"zip"`, `"jar"`, `"war"`, -`"tar.gz"`, `"tgz"`, `"tar.xz"`, and `tar.bz2`. - -Examples: - Suppose the current repository contains the source code for a chat program, - rooted at the directory `~/chat-app`. It needs to depend on an SSL library - which is available from http://example.com/openssl.zip. This `.zip` file - contains the following directory structure: - - ``` - WORKSPACE - src/ - openssl.cc - openssl.h - ``` - - In the local repository, the user creates a `openssl.BUILD` file which - contains the following target definition: - - ```python - cc_library( - name = "openssl-lib", - srcs = ["src/openssl.cc"], - hdrs = ["src/openssl.h"], - ) - ``` - - Targets in the `~/chat-app` repository can depend on this target if the - following lines are added to `~/chat-app/WORKSPACE`: - - ```python - http_archive( - name = "my_ssl", - urls = ["http://example.com/openssl.zip"], - sha256 = "03a58ac630e59778f328af4bcc4acb4f80208ed4", - build_file = "//:openssl.BUILD", - ) - ``` - - Then targets would specify `@my_ssl//:openssl-lib` as a dependency. - -Args: - name: A unique name for this rule. - build_file: The file to use as the `BUILD` file for this repository. - - Either `build_file` or `build_file_content` can be specified. - - This attribute is a label relative to the main workspace. The file does not - need to be named `BUILD`, but can be (something like `BUILD.new-repo-name` - may work well for distinguishing it from the repository's actual `BUILD` - files. - - build_file_content: The content for the BUILD file for this repository. - - Either `build_file` or `build_file_content` can be specified. - sha256: The expected SHA-256 of the file downloaded. - - This must match the SHA-256 of the file downloaded. _It is a security risk - to omit the SHA-256 as remote files can change._ At best omitting this - field will make your build non-hermetic. It is optional to make development - easier but should be set before shipping. - strip_prefix: A directory prefix to strip from the extracted files. - - Many archives contain a top-level directory that contains all of the useful - files in archive. Instead of needing to specify this prefix over and over - in the `build_file`, this field can be used to strip it from all of the - extracted files. - - For example, suppose you are using `foo-lib-latest.zip`, which contains the - directory `foo-lib-1.2.3/` under which there is a `WORKSPACE` file and are - `src/`, `lib/`, and `test/` directories that contain the actual code you - wish to build. Specify `strip_prefix = "foo-lib-1.2.3"` to use the - `foo-lib-1.2.3` directory as your top-level directory. - - Note that if there are files outside of this directory, they will be - discarded and inaccessible (e.g., a top-level license file). This includes - files/directories that start with the prefix but are not in the directory - (e.g., `foo-lib-1.2.3.release-notes`). If the specified prefix does not - match a directory in the archive, Bazel will return an error. - type: The archive type of the downloaded file. - - By default, the archive type is determined from the file extension of the - URL. If the file has no extension, you can explicitly specify one of the - following: `"zip"`, `"jar"`, `"war"`, `"tar.gz"`, `"tgz"`, `"tar.xz"`, - or `tar.bz2`. - urls: A URL to a file that will be made available to Bazel. - - This must be an file, http or https URL. Redirections are followed. - Authentication is not supported. -""" - - -http_file = repository_rule( - implementation = _http_file_impl, - attrs = { - "executable": attr.bool(), - "sha256": attr.string(), - "urls": attr.string_list(mandatory=True), - }, -) -"""Downloads a file from a URL and makes it available to be used as a file -group. - -Examples: - Suppose you need to have a debian package for your custom rules. This package - is available from http://example.com/package.deb. Then you can add to your - WORKSPACE file: - - ```python - http_file( - name = "my_deb", - urls = ["http://example.com/package.deb"], - sha256 = "03a58ac630e59778f328af4bcc4acb4f80208ed4", - ) - ``` - - Targets would specify `@my_deb//file` as a dependency to depend on this file. - -Args: - name: A unique name for this rule. - executable: If the downloaded file should be made executable. Defaults to - False. - sha256: The expected SHA-256 of the file downloaded. - - This must match the SHA-256 of the file downloaded. _It is a security risk - to omit the SHA-256 as remote files can change._ At best omitting this - field will make your build non-hermetic. It is optional to make development - easier but should be set before shipping. - urls: A URL to a file that will be made available to Bazel. - - This must be an file, http, or https URL. Redirections are followed. - Authentication is not supported. -""" diff --git a/starlark/src/syntax/testcases/image.bzl b/starlark/src/syntax/testcases/image.bzl deleted file mode 100644 index a64a12c9..00000000 --- a/starlark/src/syntax/testcases/image.bzl +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2017 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A rule for creating a D container image. - -The signature of this rule is compatible with d_binary. -""" - -load( - "//lang:image.bzl", - "dep_layer", - "app_layer", -) -load( - "//cc:image.bzl", - "DEFAULT_BASE", - _repositories = "repositories", -) -load("@io_bazel_rules_d//d:d.bzl", "d_binary") - -def repositories(): - _repositories() - -def d_image(name, base=None, deps=[], layers=[], **kwargs): - """Constructs a container image wrapping a d_binary target. - - Args: - layers: Augments "deps" with dependencies that should be put into - their own layers. - **kwargs: See d_binary. - """ - binary_name = name + "_binary" - - if layers: - print("d_image does not benefit from layers=[], got: %s" % layers) - - d_binary(name=binary_name, deps=deps + layers, **kwargs) - - index = 0 - base = base or DEFAULT_BASE - for dep in layers: - this_name = "%s_%d" % (name, index) - dep_layer(name=this_name, base=base, dep=dep) - base = this_name - index += 1 - - app_layer(name=name, base=base, binary=binary_name, layers=layers) diff --git a/starlark/src/syntax/testcases/import.bzl b/starlark/src/syntax/testcases/import.bzl deleted file mode 100644 index 9d0d5ebb..00000000 --- a/starlark/src/syntax/testcases/import.bzl +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rule for importing a container image.""" - -load( - "//skylib:filetype.bzl", - tgz_filetype = "tgz", -) -load( - "@bazel_tools//tools/build_defs/hash:hash.bzl", - _hash_tools = "tools", - _sha256 = "sha256", -) -load( - "//skylib:zip.bzl", - _gunzip = "gunzip", -) -load( - "//container:layers.bzl", - _assemble_image = "assemble", - _incr_load = "incremental_load", - _layer_tools = "tools", -) -load( - "//skylib:path.bzl", - "dirname", - "strip_prefix", - _canonicalize_path = "canonicalize", - _join_path = "join", -) - -def _unzip_layer(ctx, zipped_layer): - unzipped_layer = _gunzip(ctx, zipped_layer) - return unzipped_layer, _sha256(ctx, unzipped_layer) - -def _repository_name(ctx): - """Compute the repository name for the current rule.""" - return _join_path(ctx.attr.repository, ctx.label.package) - -def _container_import_impl(ctx): - """Implementation for the container_import rule.""" - - blobsums = [] - unzipped_layers = [] - diff_ids = [] - for layer in ctx.files.layers: - blobsums += [_sha256(ctx, layer)] - unzipped, diff_id = _unzip_layer(ctx, layer) - unzipped_layers += [unzipped] - diff_ids += [diff_id] - - # These are the constituent parts of the Container image, which each - # rule in the chain must preserve. - container_parts = { - # The path to the v2.2 configuration file. - "config": ctx.files.config[0], - "config_digest": _sha256(ctx, ctx.files.config[0]), - - # A list of paths to the layer .tar.gz files - "zipped_layer": ctx.files.layers, - # A list of paths to the layer digests. - "blobsum": blobsums, - - # A list of paths to the layer .tar files - "unzipped_layer": unzipped_layers, - # A list of paths to the layer diff_ids. - "diff_id": diff_ids, - - # We do not have a "legacy" field, because we are importing a - # more efficient form. - } - - # We support incrementally loading or assembling this single image - # with a temporary name given by its build rule. - images = { - _repository_name(ctx) + ":" + ctx.label.name: container_parts - } - - _incr_load(ctx, images, ctx.outputs.executable) - _assemble_image(ctx, images, ctx.outputs.out) - - runfiles = ctx.runfiles( - files = (container_parts["unzipped_layer"] + - container_parts["diff_id"] + - [container_parts["config"], - container_parts["config_digest"]])) - return struct(runfiles = runfiles, - files = depset([ctx.outputs.out]), - container_parts = container_parts) - -container_import = rule( - attrs = { - "config": attr.label(allow_files = [".json"]), - "layers": attr.label_list(allow_files = tgz_filetype), - "repository": attr.string(default = "bazel"), - } + _hash_tools + _layer_tools, - executable = True, - outputs = { - "out": "%{name}.tar", - }, - implementation = _container_import_impl, -) diff --git a/starlark/src/syntax/testcases/info.bzl b/starlark/src/syntax/testcases/info.bzl deleted file mode 100644 index 7a3da0b9..00000000 --- a/starlark/src/syntax/testcases/info.bzl +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", -) -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", -) - -def _go_info_script_impl(ctx): - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - out = declare_file(ctx, ext=".bash") - args = ctx.actions.args() - add_go_env(args, stdlib, mode) - args.add(["-script", "-out", out]) - ctx.actions.run( - inputs = [], - outputs = [out], - mnemonic = "GoInfo", - executable = ctx.executable._go_info, - arguments = [args], - ) - return [ - DefaultInfo( - files = depset([out]), - ), - ] - -_go_info_script = rule( - _go_info_script_impl, - attrs = { - "_go_info": attr.label( - allow_files = True, - single_file = True, - executable = True, - cfg = "host", - default="@io_bazel_rules_go//go/tools/builders:info"), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) - -def go_info(): - _go_info_script( - name = "go_info_script", - tags = ["manual"], - ) - native.sh_binary( - name = "go_info", - srcs = ["go_info_script"], - tags = ["manual"], - ) \ No newline at end of file diff --git a/starlark/src/syntax/testcases/intellij_plugin.bzl b/starlark/src/syntax/testcases/intellij_plugin.bzl deleted file mode 100644 index 207435d8..00000000 --- a/starlark/src/syntax/testcases/intellij_plugin.bzl +++ /dev/null @@ -1,220 +0,0 @@ -"""IntelliJ plugin target rule. - -Creates a plugin jar with the given plugin xml and any -optional plugin xmls. - -To provide optional plugin xmls, use the 'optional_plugin_xml' -rule. These will be renamed, put in the META-INF directory, -and the main plugin xml stamped with optional plugin dependencies -that point to the correct META-INF optional plugin xmls. - -optional_plugin_xml( - name = "optional_python_xml", - plugin_xml = "my_optional_python_plugin.xml", - module = "com.idea.python.module.id", -) - -intellij_plugin( - name = "my_plugin", - plugin_xml = ["my_plugin.xml"], - optional_plugin_xmls = [":optional_python_xml"], - deps = [ - ":code_deps", - ], -) - -""" - -optional_plugin_xml_provider = provider() - -def _optional_plugin_xml_impl(ctx): - attr = ctx.attr - optional_plugin_xmls = [] - if ctx.file.plugin_xml: - optional_plugin_xmls.append(struct( - plugin_xml = ctx.file.plugin_xml, - module = attr.module, - )) - return struct( - optional_plugin_xml_data = optional_plugin_xml_provider( - optional_plugin_xmls = optional_plugin_xmls, - ), - ) - -optional_plugin_xml = rule( - implementation = _optional_plugin_xml_impl, - attrs = { - "plugin_xml": attr.label(mandatory=True, allow_single_file=[".xml"]), - "module": attr.string(mandatory=True), - }, -) - -def _merge_optional_plugin_xmls(ctx): - # Collect optional plugin xmls - module_to_xmls = {} - for target in ctx.attr.optional_plugin_xmls: - if not hasattr(target, "optional_plugin_xml_data"): - fail("optional_plugin_xmls only accepts optional_plugin_xml targets") - for xml in target.optional_plugin_xml_data.optional_plugin_xmls: - module = xml.module - plugin_xmls = module_to_xmls.setdefault(module, []) - plugin_xmls.append(xml.plugin_xml) - - # Merge xmls with the same module dependency - module_to_merged_xmls = {} - for module, plugin_xmls in module_to_xmls.items(): - merged_name = "merged_xml_for_" + module + "_" + ctx.label.name + ".xml" - merged_file = ctx.new_file(merged_name) - ctx.action( - executable = ctx.executable._merge_xml_binary, - arguments = ["--output", merged_file.path] + [plugin_xml.path for plugin_xml in plugin_xmls], - inputs = list(plugin_xmls), - outputs = [merged_file], - progress_message = "Merging optional xmls", - mnemonic = "MergeOptionalXmls", - ) - module_to_merged_xmls[module] = merged_file - return module_to_merged_xmls - -def _add_optional_dependencies_to_plugin_xml(ctx, modules): - input_plugin_xml_file = ctx.file.plugin_xml - if not modules: - return input_plugin_xml_file - - # Add optional dependencies into the plugin xml - args = [] - final_plugin_xml_file = ctx.new_file("final_plugin_xml_" + ctx.label.name + ".xml") - args.extend(["--plugin_xml", input_plugin_xml_file.path]) - args.extend(["--output", final_plugin_xml_file.path]) - for module in modules: - args.append(module) - args.append(_filename_for_module_dependency(module)) - ctx.action( - executable = ctx.executable._append_optional_xml_elements, - arguments = args, - inputs = [input_plugin_xml_file], - outputs = [final_plugin_xml_file], - progress_message = "Adding optional dependencies to final plugin xml", - mnemonic = "AddModuleDependencies", - ) - return final_plugin_xml_file - -def _only_file(target): - return list(target.files)[0] - -def _filename_for_module_dependency(module): - """A unique filename for the optional xml dependency for a given module.""" - return "optional-" + module + ".xml" - -def _package_meta_inf_files(ctx, final_plugin_xml_file, module_to_merged_xmls): - jar_name = ctx.attr.jar_name - jar_file = ctx.new_file(jar_name) - - args = [] - args.extend(["--deploy_jar", ctx.file.deploy_jar.path]) - args.extend(["--output", jar_file.path]) - args.extend([final_plugin_xml_file.path, "plugin.xml"]) - for module, merged_xml in module_to_merged_xmls.items(): - args.append(merged_xml.path) - args.append(_filename_for_module_dependency(module)) - ctx.action( - executable = ctx.executable._package_meta_inf_files, - arguments = args, - inputs = [ctx.file.deploy_jar, final_plugin_xml_file] + module_to_merged_xmls.values(), - outputs = [jar_file], - mnemonic = "PackagePluginJar", - progress_message = "Packaging plugin jar", - ) - return jar_file - -def _intellij_plugin_jar_impl(ctx): - module_to_merged_xmls = _merge_optional_plugin_xmls(ctx) - final_plugin_xml_file = _add_optional_dependencies_to_plugin_xml(ctx, module_to_merged_xmls.keys()) - jar_file = _package_meta_inf_files(ctx, final_plugin_xml_file, module_to_merged_xmls) - files = set([jar_file]) - return struct( - files = files, - ) - -_intellij_plugin_jar = rule( - implementation = _intellij_plugin_jar_impl, - attrs = { - "deploy_jar": attr.label(mandatory=True, allow_single_file=[".jar"]), - "plugin_xml": attr.label(mandatory=True, allow_single_file=[".xml"]), - "optional_plugin_xmls": attr.label_list(), - "jar_name": attr.string(mandatory=True), - "_merge_xml_binary": attr.label( - default = Label("//build_defs:merge_xml"), - executable = True, - cfg = "host", - ), - "_append_optional_xml_elements": attr.label( - default = Label("//build_defs:append_optional_xml_elements"), - executable = True, - cfg = "host", - ), - "_package_meta_inf_files": attr.label( - default = Label("//build_defs:package_meta_inf_files"), - executable = True, - cfg = "host", - ), - }, -) - -def intellij_plugin(name, deps, plugin_xml, optional_plugin_xmls=[], jar_name=None, **kwargs): - """Creates an intellij plugin from the given deps and plugin.xml. - - Args: - name: The name of the target - deps: Any java dependencies rolled up into the plugin jar. - plugin_xml: An xml file to be placed in META-INF/plugin.jar - optional_plugin_xmls: A list of optional_plugin_xml targets. - jar_name: The name of the final plugin jar, or .jar if None - **kwargs: Any further arguments to be passed to the final target - """ - binary_name = name + "_binary" - deploy_jar = binary_name + "_deploy.jar" - native.java_binary( - name = binary_name, - runtime_deps = deps, - create_executable = 0, - ) - jar_target_name = name + "_intellij_plugin_jar" - _intellij_plugin_jar( - name = jar_target_name, - deploy_jar = deploy_jar, - jar_name = jar_name or (name + ".jar"), - plugin_xml = plugin_xml, - optional_plugin_xmls = optional_plugin_xmls, - ) - # included (with tag) as a hack so that IJwB can recognize this is an intellij plugin - native.java_import( - name = name, - jars = [jar_target_name], - tags = ["intellij-plugin"], - **kwargs) - -def _append_optional_dependencies(name, plugin_xml, module_to_merged_xml): - """Appends optional dependency xml elements to plugin xml.""" - append_elements_tool = "//build_defs:append_optional_xml_elements" - args = [ - "./$(location {append_elements_tool})", - "--plugin_xml=$(location {plugin_xml})", - "--optional_xml_files={merged_optional_xml_files}", - ] - dictionary = {k: _filename_for_module_dependency(k) for k in module_to_merged_xml.keys()} - cmd = " ".join(args).format( - append_elements_tool=append_elements_tool, - plugin_xml=plugin_xml, - merged_optional_xml_files='"%s"' % str(dictionary).replace('"', '\\"'), - ) + "> $@" - - srcs = module_to_merged_xml.values() + [plugin_xml] - - native.genrule( - name = name, - srcs = srcs, - outs = [name + ".xml"], - cmd = cmd, - tools = [append_elements_tool], - ) diff --git a/starlark/src/syntax/testcases/intellij_plugin_debug_target.bzl b/starlark/src/syntax/testcases/intellij_plugin_debug_target.bzl deleted file mode 100644 index 37ec6f6b..00000000 --- a/starlark/src/syntax/testcases/intellij_plugin_debug_target.bzl +++ /dev/null @@ -1,126 +0,0 @@ -"""IntelliJ plugin debug target rule used for debugging IntelliJ plugins. - -Creates a plugin target debuggable from IntelliJ. Any files in -the 'deps' attribute are deployed to the plugin sandbox. - -Any files are stripped of their prefix and installed into -/plugins. If you need structure, first put the files -into a pkgfilegroup. The files will be installed relative to the -'plugins' directory if present in the pkgfilegroup prefix. - -intellij_plugin_debug_targets can be nested. - -pkgfilegroup( - name = "foo_files", - srcs = [ - ":my_plugin_jar", - ":my_additional_plugin_files", - ], - prefix = "plugins/foo/lib", -) - -intellij_plugin_debug_target( - name = "my_debug_target", - deps = [ - ":my_jar", - ], -) - -""" - -SUFFIX = ".intellij-plugin-debug-target-deploy-info" - -def _trim_start(path, prefix): - return path[len(prefix):] if path.startswith(prefix) else path - -def _pkgfilegroup_deploy_file(ctx, f): - strip_prefix = ctx.rule.attr.strip_prefix - prefix = ctx.rule.attr.prefix - if strip_prefix == ".": - stripped_relative_path = f.basename - elif strip_prefix.startswith("/"): - stripped_relative_path = _trim_start(f.short_path, strip_prefix[1:]) - else: - stripped_relative_path = _trim_start(f.short_path, PACKAGE_NAME) - stripped_relative_path = _trim_start(stripped_relative_path, strip_prefix) - stripped_relative_path = _trim_start(stripped_relative_path, "/") - - # If there's a 'plugins' directory, make destination relative to that - plugini = prefix.find("plugins/") - plugins_prefix = prefix[plugini + len("plugins/"):] if plugini >= 0 else prefix - - # If the install location is still absolute, fail - if plugins_prefix.startswith("/"): - fail("Cannot compute plugins-relative install directory for pkgfilegroup") - - dest = plugins_prefix + "/" + stripped_relative_path if plugins_prefix else stripped_relative_path - return struct( - src = f, - deploy_location = dest, - ) - -def _flat_deploy_file(f): - return struct( - src = f, - deploy_location = f.basename, - ) - -def _intellij_plugin_debug_target_aspect_impl(target, ctx): - aspect_intellij_plugin_deploy_info = None - - if ctx.rule.kind == "intellij_plugin_debug_target": - aspect_intellij_plugin_deploy_info = target.intellij_plugin_deploy_info - elif ctx.rule.kind == "pkgfilegroup": - aspect_intellij_plugin_deploy_info = struct( - deploy_files = [_pkgfilegroup_deploy_file(ctx, f) for f in target.files], - ) - else: - aspect_intellij_plugin_deploy_info = struct( - deploy_files = [_flat_deploy_file(f) for f in target.files], - ) - - return struct( - files = target.files, - aspect_intellij_plugin_deploy_info = aspect_intellij_plugin_deploy_info, - ) - -_intellij_plugin_debug_target_aspect = aspect( - implementation = _intellij_plugin_debug_target_aspect_impl, -) - -def _build_deploy_info_file(deploy_file): - return struct( - execution_path = deploy_file.src.path, - deploy_location = deploy_file.deploy_location, - ) - -def _intellij_plugin_debug_target_impl(ctx): - files = set() - deploy_files = [] - for target in ctx.attr.deps: - files = files | target.files - deploy_files.extend(target.aspect_intellij_plugin_deploy_info.deploy_files) - deploy_info = struct( - deploy_files = [_build_deploy_info_file(f) for f in deploy_files] - ) - output = ctx.new_file(ctx.label.name + SUFFIX) - ctx.file_action(output, deploy_info.to_proto()) - - # We've already consumed any dependent intellij_plugin_debug_targets into our own, - # do not build or report these - files = set([f for f in files if not f.path.endswith(SUFFIX)]) - files = files | set([output]) - - return struct( - files = files, - intellij_plugin_deploy_info = struct( - deploy_files = deploy_files, - ) - ) - -intellij_plugin_debug_target = rule( - implementation = _intellij_plugin_debug_target_impl, - attrs = { - "deps": attr.label_list(aspects = [_intellij_plugin_debug_target_aspect]), - }, -) diff --git a/starlark/src/syntax/testcases/java.bzl b/starlark/src/syntax/testcases/java.bzl deleted file mode 100644 index b0c3619a..00000000 --- a/starlark/src/syntax/testcases/java.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Syntactic sugar for native java_library() rule: -# accept exported_deps attributes - -def java_library2(deps=[], exported_deps=[], exports=[], **kwargs): - if exported_deps: - deps += exported_deps - exports += exported_deps - native.java_library( - deps = deps, - exports = exports, - **kwargs) diff --git a/starlark/src/syntax/testcases/java_rules_skylark.bzl b/starlark/src/syntax/testcases/java_rules_skylark.bzl deleted file mode 100644 index 4b12958c..00000000 --- a/starlark/src/syntax/testcases/java_rules_skylark.bzl +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -java_filetype = FileType([".java"]) -jar_filetype = FileType([".jar"]) -srcjar_filetype = FileType([".jar", ".srcjar"]) - -# This is a quick and dirty rule to make Bazel compile itself. It's not -# production ready. - -def java_library_impl(ctx): - javac_options = ctx.fragments.java.default_javac_flags - class_jar = ctx.outputs.class_jar - compile_time_jars = depset(order="topological") - runtime_jars = depset(order="topological") - for dep in ctx.attr.deps: - compile_time_jars += dep.compile_time_jars - runtime_jars += dep.runtime_jars - - jars = jar_filetype.filter(ctx.files.jars) - neverlink_jars = jar_filetype.filter(ctx.files.neverlink_jars) - compile_time_jars += jars + neverlink_jars - runtime_jars += jars - compile_time_jars_list = list(compile_time_jars) # TODO: This is weird. - - build_output = class_jar.path + ".build_output" - java_output = class_jar.path + ".build_java" - javalist_output = class_jar.path + ".build_java_list" - sources = ctx.files.srcs - - sources_param_file = ctx.new_file(ctx.bin_dir, class_jar, "-2.params") - ctx.file_action( - output = sources_param_file, - content = cmd_helper.join_paths("\n", depset(sources)), - executable = False) - - # Cleaning build output directory - cmd = "set -e;rm -rf " + build_output + " " + java_output + " " + javalist_output + "\n" - cmd += "mkdir " + build_output + " " + java_output + "\n" - files = " @" + sources_param_file.path - - if ctx.files.srcjars: - files += " @" + javalist_output - for file in ctx.files.srcjars: - cmd += "%s tf %s | grep '\.java$' | sed 's|^|%s/|' >> %s\n" % (ctx.file._jar.path, file.path, java_output, javalist_output) - cmd += "unzip %s -d %s >/dev/null\n" % (file.path, java_output) - - if ctx.files.srcs or ctx.files.srcjars: - cmd += ctx.file._javac.path - cmd += " " + " ".join(javac_options) - if compile_time_jars: - cmd += " -classpath '" + cmd_helper.join_paths(ctx.configuration.host_path_separator, compile_time_jars) + "'" - cmd += " -d " + build_output + files + "\n" - - # We haven't got a good story for where these should end up, so - # stick them in the root of the jar. - for r in ctx.files.resources: - cmd += "cp %s %s\n" % (r.path, build_output) - cmd += (ctx.file._jar.path + " cf " + class_jar.path + " -C " + build_output + " .\n" + - "touch " + build_output + "\n") - ctx.action( - inputs = (sources + compile_time_jars_list + [sources_param_file] + - [ctx.file._jar] + ctx.files._jdk + ctx.files.resources + ctx.files.srcjars), - outputs = [class_jar], - mnemonic='JavacBootstrap', - command=cmd, - use_default_shell_env=True) - - runfiles = ctx.runfiles(collect_data = True) - - return struct(files = depset([class_jar]), - compile_time_jars = compile_time_jars + [class_jar], - runtime_jars = runtime_jars + [class_jar], - runfiles = runfiles) - - -def java_binary_impl(ctx): - library_result = java_library_impl(ctx) - - deploy_jar = ctx.outputs.deploy_jar - manifest = ctx.outputs.manifest - build_output = deploy_jar.path + ".build_output" - main_class = ctx.attr.main_class - ctx.file_action( - output = manifest, - content = "Main-Class: " + main_class + "\n", - executable = False) - - # Cleaning build output directory - cmd = "set -e;rm -rf " + build_output + ";mkdir " + build_output + "\n" - for jar in library_result.runtime_jars: - cmd += "unzip -qn " + jar.path + " -d " + build_output + "\n" - cmd += (ctx.file._jar.path + " cmf " + manifest.path + " " + - deploy_jar.path + " -C " + build_output + " .\n" + - "touch " + build_output + "\n") - - ctx.action( - inputs=list(library_result.runtime_jars) + [manifest] + ctx.files._jdk, - outputs=[deploy_jar], - mnemonic='Deployjar', - command=cmd, - use_default_shell_env=True) - - # Write the wrapper. - executable = ctx.outputs.executable - ctx.file_action( - output = executable, - content = '\n'.join([ - "#!/bin/bash", - "# autogenerated - do not edit.", - "case \"$0\" in", - "/*) self=\"$0\" ;;", - "*) self=\"$PWD/$0\";;", - "esac", - "", - "if [[ -z \"$JAVA_RUNFILES\" ]]; then", - " if [[ -e \"${self}.runfiles\" ]]; then", - " export JAVA_RUNFILES=\"${self}.runfiles\"", - " fi", - " if [[ -n \"$JAVA_RUNFILES\" ]]; then", - " export TEST_SRCDIR=${TEST_SRCDIR:-$JAVA_RUNFILES}", - " fi", - "fi", - "", - - "jvm_bin=%s" % (ctx.file._java.path), - "if [[ ! -x ${jvm_bin} ]]; then", - " jvm_bin=$(which java)", - "fi", - - # We extract the .so into a temp dir. If only we could mmap - # directly from the zip file. - "DEPLOY=$(dirname $self)/$(basename %s)" % deploy_jar.path, - - # This works both on Darwin and Linux, with the darwin path - # looking like tmp.XXXXXXXX.{random} - "SO_DIR=$(mktemp -d -t tmp.XXXXXXXXX)", - "function cleanup() {", - " rm -rf ${SO_DIR}", - "}", - "trap cleanup EXIT", - "unzip -q -d ${SO_DIR} ${DEPLOY} \"*.so\" \"*.dll\" \"*.dylib\" >& /dev/null", - ("${jvm_bin} -Djava.library.path=${SO_DIR} %s -jar $DEPLOY \"$@\"" - % ' '.join(ctx.attr.jvm_flags)) , - "", - ]), - executable = True) - - runfiles = ctx.runfiles(files = [deploy_jar, executable] + ctx.files._jdk, collect_data = True) - files_to_build = depset([deploy_jar, manifest, executable]) - files_to_build += library_result.files - - return struct(files = files_to_build, runfiles = runfiles) - - -def java_import_impl(ctx): - # TODO(bazel-team): Why do we need to filter here? The attribute - # already says only jars are allowed. - jars = depset(jar_filetype.filter(ctx.files.jars)) - neverlink_jars = depset(jar_filetype.filter(ctx.files.neverlink_jars)) - runfiles = ctx.runfiles(collect_data = True) - return struct(files = jars, - compile_time_jars = jars + neverlink_jars, - runtime_jars = jars, - runfiles = runfiles) - - -java_library_attrs = { - "_java": attr.label(default=Label("//tools/jdk:java"), single_file=True), - "_javac": attr.label(default=Label("//tools/jdk:javac"), single_file=True), - "_jar": attr.label(default=Label("//tools/jdk:jar"), single_file=True), - "_jdk": attr.label(default=Label("//tools/jdk:jdk"), allow_files=True), - "data": attr.label_list(allow_files=True, cfg="data"), - "resources": attr.label_list(allow_files=True), - "srcs": attr.label_list(allow_files=java_filetype), - "jars": attr.label_list(allow_files=jar_filetype), - "neverlink_jars": attr.label_list(allow_files=jar_filetype), - "srcjars": attr.label_list(allow_files=srcjar_filetype), - "deps": attr.label_list( - allow_files=False, - providers = ["compile_time_jars", "runtime_jars"]), - } - -java_library = rule( - java_library_impl, - attrs = java_library_attrs, - outputs = { - "class_jar": "lib%{name}.jar", - }, - fragments = ['java', 'cpp'], -) - -# A copy to avoid conflict with native rule. -bootstrap_java_library = rule( - java_library_impl, - attrs = java_library_attrs, - outputs = { - "class_jar": "lib%{name}.jar", - }, - fragments = ['java'], -) - -java_binary_attrs_common = java_library_attrs + { - "jvm_flags": attr.string_list(), - "jvm": attr.label(default=Label("//tools/jdk:jdk"), allow_files=True), -} - -java_binary_attrs = java_binary_attrs_common + { - "main_class": attr.string(mandatory=True), -} - -java_binary_outputs = { - "class_jar": "lib%{name}.jar", - "deploy_jar": "%{name}_deploy.jar", - "manifest": "%{name}_MANIFEST.MF" -} - -java_binary = rule(java_binary_impl, - executable = True, - attrs = java_binary_attrs, - outputs = java_binary_outputs, - fragments = ['java', 'cpp'], -) - -# A copy to avoid conflict with native rule -bootstrap_java_binary = rule(java_binary_impl, - executable = True, - attrs = java_binary_attrs, - outputs = java_binary_outputs, - fragments = ['java'], -) - -java_test = rule(java_binary_impl, - executable = True, - attrs = java_binary_attrs_common + { - "main_class": attr.string(default="org.junit.runner.JUnitCore"), - # TODO(bazel-team): it would be better if we could offer a - # test_class attribute, but the "args" attribute is hard - # coded in the bazel infrastructure. - }, - outputs = java_binary_outputs, - test = True, - fragments = ['java', 'cpp'], -) - -java_import = rule( - java_import_impl, - attrs = { - "jars": attr.label_list(allow_files=jar_filetype), - "srcjar": attr.label(allow_files=srcjar_filetype), - "neverlink_jars": attr.label_list(allow_files=jar_filetype, default=[]), - }) diff --git a/starlark/src/syntax/testcases/javadoc.bzl b/starlark/src/syntax/testcases/javadoc.bzl deleted file mode 100644 index 341b9c18..00000000 --- a/starlark/src/syntax/testcases/javadoc.bzl +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Javadoc rule. - -def _impl(ctx): - zip_output = ctx.outputs.zip - - transitive_jar_set = set() - source_jars = set() - for l in ctx.attr.libs: - source_jars += l.java.source_jars - transitive_jar_set += l.java.transitive_deps - - transitive_jar_paths = [j.path for j in transitive_jar_set] - dir = ctx.outputs.zip.path + ".dir" - source = ctx.outputs.zip.path + ".source" - external_docs = ["http://docs.oracle.com/javase/8/docs/api"] + ctx.attr.external_docs - cmd = [ - "rm -rf %s" % source, - "mkdir %s" % source, - " && ".join(["unzip -qud %s %s" % (source, j.path) for j in source_jars]), - "rm -rf %s" % dir, - "mkdir %s" % dir, - " ".join([ - ctx.file._javadoc.path, - "-Xdoclint:-missing", - "-protected", - "-encoding UTF-8", - "-charset UTF-8", - "-notimestamp", - "-quiet", - "-windowtitle '%s'" % ctx.attr.title, - " ".join(['-link %s' % url for url in external_docs]), - "-sourcepath %s" % source, - "-subpackages ", - ":".join(ctx.attr.pkgs), - " -classpath ", - ":".join(transitive_jar_paths), - "-d %s" % dir]), - "find %s -exec touch -t 198001010000 '{}' ';'" % dir, - "(cd %s && zip -qr ../%s *)" % (dir, ctx.outputs.zip.basename), - ] - ctx.action( - inputs = list(transitive_jar_set) + list(source_jars) + ctx.files._jdk, - outputs = [zip_output], - command = " && ".join(cmd)) - -java_doc = rule( - attrs = { - "libs": attr.label_list(allow_files = False), - "pkgs": attr.string_list(), - "title": attr.string(), - "external_docs": attr.string_list(), - "_javadoc": attr.label( - default = Label("@local_jdk//:bin/javadoc"), - single_file = True, - allow_files = True, - ), - "_jdk": attr.label( - default = Label("@local_jdk//:jdk-default"), - allow_files = True, - ), - }, - outputs = {"zip": "%{name}.zip"}, - implementation = _impl, -) diff --git a/starlark/src/syntax/testcases/jekyll.bzl b/starlark/src/syntax/testcases/jekyll.bzl deleted file mode 100644 index 623b161b..00000000 --- a/starlark/src/syntax/testcases/jekyll.bzl +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def _bucket_from_worspace_name(wname): - revlist = [] - for part in wname.split("_"): - revlist.insert(0, part) - return ".".join(revlist) - -def _impl(ctx): - """Quick and non-hermetic rule to build a Jekyll site.""" - source = ctx.actions.declare_directory(ctx.attr.name + "-srcs") - output = ctx.actions.declare_directory(ctx.attr.name + "-build") - - ctx.actions.run_shell(inputs = ctx.files.srcs, - outputs = [source], - command = ("mkdir -p %s\n" % (source.path)) + - "\n".join([ - "tar xf %s -C %s" % (src.path, source.path) for src in ctx.files.srcs]) - ) - ctx.actions.run( - inputs = [source], - outputs = [output], - executable = "jekyll", - use_default_shell_env = True, - arguments = ["build", "-q", "-s", source.path, "-d", output.path] - ) - ctx.actions.run( - inputs = [output], - outputs = [ctx.outputs.out], - executable = "tar", - arguments = ["cf", ctx.outputs.out.path, "-C", output.path, "."] - ) - - # Create a shell script to serve the site locally or push with the --push - # flag. - bucket = ctx.attr.bucket if ctx.attr.bucket else _bucket_from_worspace_name(ctx.workspace_name) - - ctx.actions.expand_template( - template=ctx.file._jekyll_build_tpl, - output=ctx.outputs.executable, - substitutions={ - "%{workspace_name}": ctx.workspace_name, - "%{source_dir}": source.short_path, - "%{prod_dir}": output.short_path, - "%{bucket}": bucket, - }, - is_executable=True) - return [DefaultInfo(runfiles=ctx.runfiles(files=[source, output]))] - -jekyll_build = rule( - implementation = _impl, - executable = True, - attrs = { - "srcs": attr.label_list(allow_empty=False), - "bucket": attr.string(), - "_jekyll_build_tpl": attr.label( - default=":jekyll_build.sh.tpl", - allow_files=True, - single_file=True)}, - outputs = {"out": "%{name}.tar"}) diff --git a/starlark/src/syntax/testcases/jenkins.bzl b/starlark/src/syntax/testcases/jenkins.bzl deleted file mode 100644 index 53d45f75..00000000 --- a/starlark/src/syntax/testcases/jenkins.bzl +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Some definition to setup jenkins and build the corresponding docker images -load(":jenkins_docker_build.bzl", "jenkins_docker_build") -load(":jenkins_node.bzl", "jenkins_node") -load(":jenkins_nodes.bzl", "jenkins_nodes", "jenkins_node_names") -load(":jenkins_job.bzl", "jenkins_job", "bazel_git_job", "bazel_github_job") - diff --git a/starlark/src/syntax/testcases/jenkins_base.bzl b/starlark/src/syntax/testcases/jenkins_base.bzl deleted file mode 100644 index 937d0bdc..00000000 --- a/starlark/src/syntax/testcases/jenkins_base.bzl +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Some definition to create a base image for jenkins in docker - -load("@io_bazel_rules_docker//docker:docker.bzl", "docker_pull") -JENKINS_PLUGINS_URL = "http://mirrors.xmission.com/jenkins/plugins/{name}/{version}/{name}.hpi" - -def _jenkins_image_impl(repository_ctx): - repository_ctx.file("plugins/BUILD", """ -load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar") -pkg_tar( - name = "plugins", - files = glob(["**/*.jpi"]), - mode = "0644", - strip_prefix = ".", - package_dir = "/usr/share/jenkins/ref/plugins", - visibility = ["//:__pkg__"], -) -""") - for plugin in repository_ctx.attr.plugins: - config = repository_ctx.attr.plugins[plugin] - dest = "plugins/" + plugin + ".jpi" - repository_ctx.download( - JENKINS_PLUGINS_URL.format(name=plugin, version=config[0]), dest, - config[1]) - if len(config) >= 3 and config[2] == "pinned": - repository_ctx.file(dest + ".pinned", "") - repository_ctx.file("BUILD", """ -load("@io_bazel_rules_docker//docker:docker.bzl", "docker_build") - -docker_build( - name = "image", - base = "{base}", - tars = ["//plugins"], - directory = "/", - volumes = [{volumes}], - visibility = ["//visibility:public"], -) -""".format( - base = repository_ctx.attr.base, - volumes = ", ".join(['"%s"' % f for f in repository_ctx.attr.volumes]) -)) - -jenkins_image_ = repository_rule( - implementation = _jenkins_image_impl, - attrs = { - "base": attr.string(mandatory=True), - "plugins": attr.string_list_dict(mandatory=True), - "volumes": attr.string_list(default=[]), - }) - -def jenkins_base(name, plugins, volumes=[], digest=None, version="1.642.4"): - base = "jenkins_" + version.replace(".", "_") - if not native.existing_rule(base): - kwargs = {} - if digest: - kwargs["digest"] = digest - else: - kwargs["tag"] = version - docker_pull( - name = base, - registry = "index.docker.io", - repository = "jenkins/jenkins", - **kwargs - ) - jenkins_image_( - name=name, - plugins=plugins, - base="@%s//image" % base, - volumes=volumes) diff --git a/starlark/src/syntax/testcases/jenkins_docker_build.bzl b/starlark/src/syntax/testcases/jenkins_docker_build.bzl deleted file mode 100644 index 8b186e8c..00000000 --- a/starlark/src/syntax/testcases/jenkins_docker_build.bzl +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Creation of the docker container for the jenkins master. - -load("@io_bazel_rules_docker//docker:docker.bzl", "docker_build") -load(":templates.bzl", "merge_files", "strip_suffix") -load(":vars.bzl", "MAIL_SUBSTITUTIONS") - -def _build_jobs_impl(ctx): - output = ctx.outputs.out - folders_to_create = {} - args = [ - "--output=" + output.path, - "--mode=0644", - "--directory=/usr/share/jenkins/ref/jobs", - ] - suffixes = ctx.attr.strip_suffixes - # Group fob by folders - for f in ctx.files.jobs: - if f.owner and "/" in f.owner.name: - segments = f.owner.name.split("/") - for i in range(1, len(segments)): - folders_to_create["/jobs/".join(segments[:i])] = True - p = strip_suffix(f.owner.name.replace("/", "/jobs/"), suffixes) - args.append("--file=%s=%s/config.xml" % (f.path, p)) - else: - p = strip_suffix(f.basename[:-len(f.extension)-1], suffixes) - args.append("--file=%s=%s/config.xml" % (f.path, p)) - - for folder in folders_to_create: - args.append("--file=%s=%s/config.xml" % (ctx.file._folder_xml.path, folder)) - - ctx.action( - executable = ctx.executable._build_tar, - arguments = args, - inputs = ctx.files.jobs + [ctx.file._folder_xml], - outputs = [output], - mnemonic="TarJobs" - ) - -_build_jobs = rule( - attrs = { - "jobs": attr.label_list(allow_files=True), - "strip_suffixes": attr.string_list(default=["-staging", "-test"]), - "_folder_xml": attr.label( - default=Label("//jenkins/build_defs:folder.xml"), - allow_files=True, - single_file=True), - "_build_tar": attr.label( - default=Label("@bazel_tools//tools/build_defs/pkg:build_tar"), - cfg="host", - executable=True, - allow_files=True), - }, - outputs = {"out": "%{name}.tar"}, - implementation = _build_jobs_impl, -) - - -def jenkins_docker_build(name, plugins = None, base = "//jenkins/base", configs = [], - jobs = [], substitutions = {}, visibility = None, tars = []): - """Build the docker image for the Jenkins instance.""" - substitutions = substitutions + MAIL_SUBSTITUTIONS - # Expands config files in a tar ball - merge_files( - name = "%s-configs" % name, - srcs = configs, - directory = "/usr/share/jenkins/ref", - strip_prefixes = [ - "jenkins/config", - "jenkins", - ], - substitutions = substitutions) - tars += ["%s-configs" % name] - - # Create the structures for jobs - _build_jobs(name=name + "-jobs", jobs=jobs) - tars += ["%s-jobs" % name] - - ### FINAL IMAGE ### - docker_build( - name = name, - tars = tars, - # Workaround no way to specify owner in pkg_tar - # TODO(dmarting): use https://cr.bazel.build/10255 when it hits a release. - user = "root", - entrypoint = [ - "/bin/tini", - "--", - "/bin/bash", - "-c", - "[ -d /opt/lib ] && chown -R jenkins /opt/lib; su jenkins -c /usr/local/bin/jenkins.sh", - ], - # End of workaround - base = base, - directory = "/", - visibility = visibility, - ) diff --git a/starlark/src/syntax/testcases/jenkins_job.bzl b/starlark/src/syntax/testcases/jenkins_job.bzl deleted file mode 100644 index dc09f0a9..00000000 --- a/starlark/src/syntax/testcases/jenkins_job.bzl +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Jenkins job creation - -load(":templates.bzl", "expand_template") -load(":vars.bzl", "MAIL_SUBSTITUTIONS") - -def _to_groovy_list(lst): - return "[%s]" % (",".join(['"%s"' % e for e in lst])) - -def jenkins_job(name, config, substitutions = {}, deps = [], deps_aliases = {}, - project='bazel', org='bazelbuild', git_url=None, project_url=None, - folder=None, test_platforms=["linux-x86_64"], - create_filegroups=True): - """Create a job configuration on Jenkins. - - Args: - name: the name of the job to create - config: the configuration file for the job - substitutions: additional substitutions to pass to the template generation - deps: list of dependencies (templates included by the config file) - project: the project name on github - org: the project organization on github, default 'bazelbuild' - git_url: the URL to the git project, defaulted to the Github URL - project_url: the project url, defaulted to the Git URL - test_platforms: platforms on which to run that job when inside of a - dockerized test, by default only 'linux-x86_64' - create_filegroups: create filegroups named /all, /staging - and /test that contains the files needed to be included - to include that job respectively for the production service, the - staging service and the docker test version. This is to be set - to false is the calling macros already creates those filegroups. - """ - github_project = "%s/%s" % (org, project) - github_url = "https://github.com/" + github_project - if not git_url: - git_url = github_url - if not project_url: - project_url = git_url - deps = deps + [deps_aliases[k] for k in deps_aliases] - substitutions = substitutions + { - "GITHUB_URL": github_url, - "GIT_URL": git_url, - "GITHUB_PROJECT": github_project, - "PROJECT_URL": project_url, - "production": "true", - } + MAIL_SUBSTITUTIONS - substitutions["SEND_EMAIL"] = "1" - # RESTRICT_CONFIGURATION can be use to restrict configuration of the groovy jobs - if (not "RESTRICT_CONFIGURATION" in substitutions) or ( - not substitutions["RESTRICT_CONFIGURATION"]): - substitutions["RESTRICT_CONFIGURATION"] = "[:]" - expand_template( - name = name, - template = config, - out = "%s.xml" % name, - deps = deps, - deps_aliases = deps_aliases, - substitutions = substitutions, - ) - if create_filegroups: - native.filegroup(name = name + "/all", srcs = [name]) - substitutions["SEND_EMAIL"] = "0" - substitutions["BAZEL_BUILD_RECIPIENT"] = "" - substitutions["production"] = "false" - expand_template( - name = name + "-staging", - template = config, - out = "%s-staging.xml" % name, - deps = deps, - deps_aliases = deps_aliases, - substitutions = substitutions, - ) - if create_filegroups: - native.filegroup(name = name + "/staging", srcs = [name + "-staging"]) - - if test_platforms: - substitutions["RESTRICT_CONFIGURATION"] += " + [node:%s]" % _to_groovy_list(test_platforms) - expand_template( - name = name + "-test", - template = config, - out = "%s-test.xml" % name, - deps = deps, - deps_aliases = deps_aliases, - substitutions = substitutions, - ) - if create_filegroups: - native.filegroup(name = name + "/test", srcs = [name + "-test"]) - -def bazel_git_job(**kwargs): - """Override bazel_github_job to test a project that is not on GitHub.""" - kwargs["github_enabled"] = False - if not "git_url" in kwargs: - if not "project_url" in kwargs: - fail("Neither project_url nor git_url was specified") - kwargs["git_url"] = kwargs - bazel_github_job(**kwargs) - -def bazel_github_job(name, branch="master", project=None, org="bazelbuild", - project_url=None, workspace=".", git_url=None, - config="//jenkins/build_defs:default.json", - test_platforms=["linux-x86_64"], - enable_trigger=True, - poll=None, - gerrit_project=None, - enabled=True, - pr_enabled=True, - github_enabled=True, - run_sequential=False, - sauce_enabled=False, - use_upstream_branch=False): - """Create a generic github job configuration to build against Bazel head.""" - if poll == None: - poll = org != "bazelbuild" - if not project: - project = name - - substitutions = { - "WORKSPACE": workspace, - "PROJECT_NAME": project, - "BRANCH": branch, - "NAME": name, - "disabled": str(not enabled).lower(), - "enable_trigger": str(enable_trigger and github_enabled).lower(), - "poll": str(poll).lower(), - "github": str(github_enabled), - "GERRIT_PROJECT": str(gerrit_project) if gerrit_project else "", - "RUN_SEQUENTIAL": str(run_sequential).lower(), - "SAUCE_ENABLED": str(sauce_enabled).lower(), - "GLOBAL_USE_UPSTREAM_BRANCH": str(use_upstream_branch) - } - - all_files = [name + ".xml"] - test_files = [name + "-test.xml"] - staging_files = [name + "-staging.xml"] - - kwargs = {} - if not github_enabled: - kwargs["git_url"] = git_url - - jenkins_job( - name = name, - config = "//jenkins/build_defs:bazel-job.xml.tpl", - deps_aliases = { - "JSON_CONFIGURATION": config, - }, - substitutions=substitutions, - project=project, - org=org, - project_url=project_url, - test_platforms=test_platforms, - create_filegroups=False, - **kwargs) - - if enabled and config: - jenkins_job( - name = "Global/" + name, - config = "//jenkins/build_defs:bazel-job-Global.xml.tpl", - deps_aliases = { - "JSON_CONFIGURATION": config, - }, - substitutions=substitutions, - git_url=git_url, - project=project, - org=org, - project_url=project_url, - test_platforms=test_platforms, - create_filegroups=False) - all_files.append("Global/%s.xml" % name) - test_files.append("Global/%s-test.xml" % name) - staging_files.append("Global/%s-staging.xml" % name) - - if pr_enabled and config: - jenkins_job( - name = "PR/" + name, - config = "//jenkins/build_defs:bazel-job-PR.xml.tpl", - deps_aliases = { - "JSON_CONFIGURATION": config, - }, - substitutions=substitutions, - project=project, - org=org, - project_url=project_url, - test_platforms=test_platforms, - create_filegroups=False) - all_files.append("PR/%s.xml" % name) - test_files.append("PR/%s-test.xml" % name) - staging_files.append("PR/%s-staging.xml" % name) - - if gerrit_project: - jenkins_job( - name = "CR/" + name, - config = "//jenkins/build_defs:bazel-job-Gerrit.xml.tpl", - deps_aliases = { - "JSON_CONFIGURATION": config, - }, - substitutions=substitutions, - project=project, - org=org, - project_url=project_url, - test_platforms=test_platforms) - all_files.append("CR/%s.xml" % name) - test_files.append("CR/%s-test.xml" % name) - staging_files.append("CR/%s-staging.xml" % name) - - native.filegroup(name = "%s/all" % name, srcs = all_files) - if test_platforms: - native.filegroup(name = "%s/test" % name, srcs = test_files) - native.filegroup(name = "%s/staging" % name, srcs = staging_files) diff --git a/starlark/src/syntax/testcases/jenkins_node.bzl b/starlark/src/syntax/testcases/jenkins_node.bzl deleted file mode 100644 index ebe00564..00000000 --- a/starlark/src/syntax/testcases/jenkins_node.bzl +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Setup jenkins and build the corresponding docker images - -load("@io_bazel_rules_docker//docker:docker.bzl", "docker_build") -load(":templates.bzl", "expand_template") - -JENKINS_SERVER = "http://jenkins:80" - -def jenkins_node(name, remote_fs = "/home/ci", num_executors = 1, mode = "NORMAL", - labels = [], docker_base = None, preference = 1, - visibility = None): - """Create a node configuration on Jenkins, with possible docker image. - - Args: - name: Name of the node on Jenkins. - remote_fs: path to the home of the Jenkins user. - num_executors: number of executors (i.e. concurrent build) this machine can have. - mode: NORMAL for "Utilize this node as much as possible" - EXCLUSIVE for "Only build jobs with label restrictions matching this node" - labels: list of Jenkins labels for this node (the node name is always added). - docker_base: base for the corresponding docker image to create if we should create one - (if docker_base is not specified, then a corresponding machine should be configured - to connect to the Jenkins master). - preference: A preference factor, if a node as a factor of 1 and another a factor of - 4, then the second one will be scheduled 4 time more jobs than the first one. - visibility: rule visibility. - """ - native.genrule( - name = name, - cmd = """cat >$@ <<'EOF' - - - %s - - %s - %s - %s - - - - - - %s - - - -EOF -""" % (name, remote_fs, num_executors, mode, " ".join([name] + labels), preference), - outs = ["nodes/%s/config.xml" % name], - visibility = visibility, - ) - if docker_base: - # Generate docker image startup script - expand_template( - name = name + ".docker-launcher", - out = name + ".docker-launcher.sh", - template = "slave_setup.sh", - substitutions = { - "NODE_NAME": name, - "HOME_FS": remote_fs, - "JENKINS_SERVER": JENKINS_SERVER, - }, - executable = True, - ) - # Generate docker image - docker_build( - name = name + ".docker", - base = docker_base, - volumes = [remote_fs], - files = [":%s.docker-launcher.sh" % name], - data_path = ".", - entrypoint = [ - "/bin/bash", - "/%s.docker-launcher.sh" % name, - ], - visibility = visibility, - ) diff --git a/starlark/src/syntax/testcases/jenkins_nodes.bzl b/starlark/src/syntax/testcases/jenkins_nodes.bzl deleted file mode 100644 index 80a805f2..00000000 --- a/starlark/src/syntax/testcases/jenkins_nodes.bzl +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Macros to ease the creation of machines -load(":jenkins_node.bzl", "jenkins_node") - -def jenkins_node_names(name, count): - """Returns the names for `count` production jenkins node prefixed by `name`.""" - return ["%s-%s" % (name, i) for i in range(1, count+1)] - -def _extend_kwargs(kwargs, extra_args): - result = {} - for k,v in kwargs.items(): - result[k] = v - if extra_args: - for k,v in extra_args.items(): - result[k] = v - return result - -def jenkins_nodes(name, - count, - labels=None, - prod_args=None, - staging_args=None, - install_bazel=True, - **kwargs): - """Create a set of Jenkins nodes on the system. - - It creates `count` production nodes with name prefix `name`. - - Example: - If `name` is `darwin-x86_64` and `count` is two, it will - create two production nodes `darwin-x86_64-1` and - `darwin-x86_64-2` and one staging node `darwin-x86_64-staging`. - - Args: - name: prefix of each node name, it should be the platform - name (e.g., darwin-x86_64, ubuntu-14.04-x86_64, ...). - count: number of production node to create. - labels: Jenkins node labels to apply to this node (in addition to - the "install-bazel" label and the `name` itself). - prod_args: dictionary of aditional arguments for production only - nodes that will be passed to `jenkins_node`. - staging_args: dictionary of aditional arguments for staging only - nodes that will be passed to `jenkins_node`. - install_bazel: if the "install-bazel" label should be added to labels. - **kwargs: other arguments to be passed verbatim to `jenkins_node`. - """ - labels = [ - name] + (["install-bazel"] if install_bazel else []) + (labels if labels else []) - prod_kwargs = _extend_kwargs(kwargs, prod_args) - [jenkins_node( - name = n, - labels = labels, - **prod_kwargs - ) for n in jenkins_node_names(name, count)] - staging_kwargs = _extend_kwargs(kwargs, staging_args) - jenkins_node( - name = "%s-staging" % name, - labels = labels, - **staging_kwargs) diff --git a/starlark/src/syntax/testcases/jetty.bzl b/starlark/src/syntax/testcases/jetty.bzl deleted file mode 100644 index 3c1b2a57..00000000 --- a/starlark/src/syntax/testcases/jetty.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# !!!! THIS IS A GENERATED FILE TO NOT EDIT IT BY HAND !!!! -# -# To regenerate this file, run ./update_deps.sh from the root of the -# git repository. - -DIGESTS = { - # "gcr.io/distroless/java/jetty:debug" circa 2017-10-10 04:10 +0000 - "debug": "sha256:93a701b4152061f784ecaed2cddd19214ea84079f607a23360f39293214cd20f", - # "gcr.io/distroless/java/jetty:latest" circa 2017-10-10 04:10 +0000 - "latest": "sha256:9969e4cf3b485181bf6d3de1df06bb518d79ff45e2ba70e4cb87ca48b31db2a0", -} diff --git a/starlark/src/syntax/testcases/jgit.bzl b/starlark/src/syntax/testcases/jgit.bzl deleted file mode 100644 index 38eb7919..00000000 --- a/starlark/src/syntax/testcases/jgit.bzl +++ /dev/null @@ -1,66 +0,0 @@ -load("//tools/bzl:maven_jar.bzl", "GERRIT", "MAVEN_LOCAL", "MAVEN_CENTRAL", "maven_jar") - -_JGIT_VERS = "4.7.0.201704051617-r.37-gc80d8c590" - -_DOC_VERS = "4.7.0.201704051617-r" # Set to _JGIT_VERS unless using a snapshot - -JGIT_DOC_URL = "http://download.eclipse.org/jgit/site/" + _DOC_VERS + "/apidocs" - -_JGIT_REPO = GERRIT # Leave here even if set to MAVEN_CENTRAL. - -# set this to use a local version. -# "/home//projects/jgit" -LOCAL_JGIT_REPO = "" - -def jgit_repos(): - if LOCAL_JGIT_REPO: - native.local_repository( - name = "jgit", - path = LOCAL_JGIT_REPO, - ) - else: - jgit_maven_repos() - -def jgit_maven_repos(): - maven_jar( - name = "jgit_lib", - artifact = "org.eclipse.jgit:org.eclipse.jgit:" + _JGIT_VERS, - repository = _JGIT_REPO, - sha1 = "edb739cd1e7c72dab361a8f6011807ae7fae35e2", - src_sha1 = "ddf922143dd88ec8fbd2c44f48f203340e6b4d54", - unsign = True, - ) - maven_jar( - name = "jgit_servlet", - artifact = "org.eclipse.jgit:org.eclipse.jgit.http.server:" + _JGIT_VERS, - repository = _JGIT_REPO, - sha1 = "e864cb9f7e16d77ff75805708cd82e6f82a73246", - unsign = True, - ) - maven_jar( - name = "jgit_archive", - artifact = "org.eclipse.jgit:org.eclipse.jgit.archive:" + _JGIT_VERS, - repository = _JGIT_REPO, - sha1 = "cc944356eb8ca74446341729d539f5b9faccb698", - ) - maven_jar( - name = "jgit_junit", - artifact = "org.eclipse.jgit:org.eclipse.jgit.junit:" + _JGIT_VERS, - repository = _JGIT_REPO, - sha1 = "eae23cc952d8b9d332287f7a4d4200c17ae78411", - unsign = True, - ) - -def jgit_dep(name): - mapping = { - "@jgit_junit//jar": "@jgit//org.eclipse.jgit.junit:junit", - "@jgit_lib//jar:src": "@jgit//org.eclipse.jgit:libjgit-src.jar", - "@jgit_lib//jar": "@jgit//org.eclipse.jgit:jgit", - "@jgit_servlet//jar":"@jgit//org.eclipse.jgit.http.server:jgit-servlet", - "@jgit_archive//jar": "@jgit//org.eclipse.jgit.archive:jgit-archive", - } - - if LOCAL_JGIT_REPO: - return mapping[name] - else: - return name diff --git a/starlark/src/syntax/testcases/jobs.bzl b/starlark/src/syntax/testcases/jobs.bzl deleted file mode 100644 index 53288a9f..00000000 --- a/starlark/src/syntax/testcases/jobs.bzl +++ /dev/null @@ -1,37 +0,0 @@ -def _is_staging(job): - job_desc = native.existing_rule(job + "-staging") - job_subs = job_desc["substitutions"] - is_bazel = "PROJECT_NAME" in job_subs - is_gerrit = "GERRIT_PROJECT" in job_subs and job_subs["GERRIT_PROJECT"] != "" - # Take job with Gerrit review, or jobs that are not bazel jovbs - is_gerrit_or_not_bazel = is_gerrit or not is_bazel - # Gold jobs are some bazel job that we include for testing - is_gold = job in ["TensorFlow", "Tutorial", "rules_k8s", "rules_python"] - return (is_gold or is_gerrit_or_not_bazel) - - -def _is_testing(job): - # We include all test but the docker ones (they needs access to the docker server). - return not "docker" in job and job != "continuous-integration" - - -def job_lists(name = "jobs", visibility = None): - jobs = native.existing_rules() - - native.filegroup( - name = name, - srcs = [j for j in jobs if j.endswith("/all")], - visibility = visibility, - ) - - native.filegroup( - name = "staging-" + name, - srcs = [j for j in jobs if j.endswith("/staging") and _is_staging(j[:-8])], - visibility = visibility, - ) - - native.filegroup( - name = "test-" + name, - srcs = [j for j in jobs if j.endswith("/test") and _is_testing(j[:-5])], - visibility = visibility, - ) diff --git a/starlark/src/syntax/testcases/js.bzl b/starlark/src/syntax/testcases/js.bzl deleted file mode 100644 index 788301c1..00000000 --- a/starlark/src/syntax/testcases/js.bzl +++ /dev/null @@ -1,383 +0,0 @@ -NPMJS = "NPMJS" - -GERRIT = "GERRIT:" - -NPM_VERSIONS = { - "bower": "1.8.0", - "crisper": "2.0.2", - "vulcanize": "1.14.8", -} - -NPM_SHA1S = { - "bower": "55dbebef0ad9155382d9e9d3e497c1372345b44a", - "crisper": "7183c58cea33632fb036c91cefd1b43e390d22a2", - "vulcanize": "679107f251c19ab7539529b1e3fdd40829e6fc63", -} - -def _npm_tarball(name): - return "%s@%s.npm_binary.tgz" % (name, NPM_VERSIONS[name]) - -def _npm_binary_impl(ctx): - """rule to download a NPM archive.""" - name = ctx.name - version= NPM_VERSIONS[name] - sha1 = NPM_VERSIONS[name] - - dir = '%s-%s' % (name, version) - filename = '%s.tgz' % dir - base = '%s@%s.npm_binary.tgz' % (name, version) - dest = ctx.path(base) - repository = ctx.attr.repository - if repository == GERRIT: - url = 'http://gerrit-maven.storage.googleapis.com/npm-packages/%s' % filename - elif repository == NPMJS: - url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename) - else: - fail('repository %s not in {%s,%s}' % (repository, GERRIT, NPMJS)) - - python = ctx.which("python") - script = ctx.path(ctx.attr._download_script) - - sha1 = NPM_SHA1S[name] - args = [python, script, "-o", dest, "-u", url, "-v", sha1] - out = ctx.execute(args) - if out.return_code: - fail("failed %s: %s" % (args, out.stderr)) - ctx.file("BUILD", "package(default_visibility=['//visibility:public'])\nfilegroup(name='tarball', srcs=['%s'])" % base, False) - -npm_binary = repository_rule( - attrs = { - # Label resolves within repo of the .bzl file. - "_download_script": attr.label(default = Label("//tools:download_file.py")), - "repository": attr.string(default = NPMJS), - }, - local = True, - implementation = _npm_binary_impl, -) - -# for use in repo rules. -def _run_npm_binary_str(ctx, tarball, args): - python_bin = ctx.which("python") - return " ".join([ - python_bin, - ctx.path(ctx.attr._run_npm), - ctx.path(tarball)] + args) - -def _bower_archive(ctx): - """Download a bower package.""" - download_name = '%s__download_bower.zip' % ctx.name - renamed_name = '%s__renamed.zip' % ctx.name - version_name = '%s__version.json' % ctx.name - - cmd = [ - ctx.which("python"), - ctx.path(ctx.attr._download_bower), - '-b', '%s' % _run_npm_binary_str(ctx, ctx.attr._bower_archive, []), - '-n', ctx.name, - '-p', ctx.attr.package, - '-v', ctx.attr.version, - '-s', ctx.attr.sha1, - '-o', download_name, - ] - - out = ctx.execute(cmd) - if out.return_code: - fail("failed %s: %s" % (" ".join(cmd), out.stderr)) - - _bash(ctx, " && " .join([ - "TMP=$(mktemp -d || mktemp -d -t bazel-tmp)", - "cd $TMP", - "mkdir bower_components", - "cd bower_components", - "unzip %s" % ctx.path(download_name), - "cd ..", - "zip -r %s bower_components" % renamed_name,])) - - dep_version = ctx.attr.semver if ctx.attr.semver else ctx.attr.version - ctx.file(version_name, - '"%s":"%s#%s"' % (ctx.name, ctx.attr.package, dep_version)) - ctx.file( - "BUILD", - "\n".join([ - "package(default_visibility=['//visibility:public'])", - "filegroup(name = 'zipfile', srcs = ['%s'], )" % download_name, - "filegroup(name = 'version_json', srcs = ['%s'], visibility=['//visibility:public'])" % version_name, - ]), False) - -def _bash(ctx, cmd): - cmd_list = ["bash", "-c", cmd] - out = ctx.execute(cmd_list) - if out.return_code: - fail("failed %s: %s" % (" ".join(cmd_list), out.stderr)) - -bower_archive = repository_rule( - _bower_archive, - attrs = { - "_bower_archive": attr.label(default = Label("@bower//:%s" % _npm_tarball("bower"))), - "_run_npm": attr.label(default = Label("//tools/js:run_npm_binary.py")), - "_download_bower": attr.label(default = Label("//tools/js:download_bower.py")), - "sha1": attr.string(mandatory = True), - "version": attr.string(mandatory = True), - "package": attr.string(mandatory = True), - "semver": attr.string(), - }, -) - -def _bower_component_impl(ctx): - transitive_zipfiles = set([ctx.file.zipfile]) - for d in ctx.attr.deps: - transitive_zipfiles += d.transitive_zipfiles - - transitive_licenses = set() - if ctx.file.license: - transitive_licenses += set([ctx.file.license]) - - for d in ctx.attr.deps: - transitive_licenses += d.transitive_licenses - - transitive_versions = set(ctx.files.version_json) - for d in ctx.attr.deps: - transitive_versions += d.transitive_versions - - return struct( - transitive_zipfiles=transitive_zipfiles, - transitive_versions=transitive_versions, - transitive_licenses=transitive_licenses, - ) - -_common_attrs = { - "deps": attr.label_list(providers = [ - "transitive_zipfiles", - "transitive_versions", - "transitive_licenses", - ]), -} - -def _js_component(ctx): - dir = ctx.outputs.zip.path + ".dir" - name = ctx.outputs.zip.basename - if name.endswith(".zip"): - name = name[:-4] - dest = "%s/%s" % (dir, name) - cmd = " && ".join([ - "mkdir -p %s" % dest, - "cp %s %s/" % (' '.join([s.path for s in ctx.files.srcs]), dest), - "cd %s" % dir, - "find . -exec touch -t 198001010000 '{}' ';'", - "zip -qr ../%s *" % ctx.outputs.zip.basename - ]) - - ctx.action( - inputs = ctx.files.srcs, - outputs = [ctx.outputs.zip], - command = cmd, - mnemonic = "GenBowerZip") - - licenses = set() - if ctx.file.license: - licenses += set([ctx.file.license]) - - return struct( - transitive_zipfiles=list([ctx.outputs.zip]), - transitive_versions=set([]), - transitive_licenses=licenses) - -js_component = rule( - _js_component, - attrs = _common_attrs + { - "srcs": attr.label_list(allow_files = [".js"]), - "license": attr.label(allow_single_file = True), - }, - outputs = { - "zip": "%{name}.zip", - }, -) - -_bower_component = rule( - _bower_component_impl, - attrs = _common_attrs + { - "zipfile": attr.label(allow_single_file = [".zip"]), - "license": attr.label(allow_single_file = True), - "version_json": attr.label(allow_files = [".json"]), - - # If set, define by hand, and don't regenerate this entry in bower2bazel. - "seed": attr.bool(default = False), - }, -) - -# TODO(hanwen): make license mandatory. -def bower_component(name, license=None, **kwargs): - prefix = "//lib:LICENSE-" - if license and not license.startswith(prefix): - license = prefix + license - _bower_component( - name=name, - license=license, - zipfile="@%s//:zipfile"% name, - version_json="@%s//:version_json" % name, - **kwargs) - -def _bower_component_bundle_impl(ctx): - """A bunch of bower components zipped up.""" - zips = set([]) - for d in ctx.attr.deps: - zips += d.transitive_zipfiles - - versions = set([]) - for d in ctx.attr.deps: - versions += d.transitive_versions - - licenses = set([]) - for d in ctx.attr.deps: - licenses += d.transitive_versions - - out_zip = ctx.outputs.zip - out_versions = ctx.outputs.version_json - - ctx.action( - inputs=list(zips), - outputs=[out_zip], - command=" && ".join([ - "p=$PWD", - "rm -rf %s.dir" % out_zip.path, - "mkdir -p %s.dir/bower_components" % out_zip.path, - "cd %s.dir/bower_components" % out_zip.path, - "for z in %s; do unzip -q $p/$z ; done" % " ".join(sorted([z.path for z in zips])), - "cd ..", - "find . -exec touch -t 198001010000 '{}' ';'", - "zip -qr $p/%s bower_components/*" % out_zip.path, - ]), - mnemonic="BowerCombine") - - ctx.action( - inputs=list(versions), - outputs=[out_versions], - mnemonic="BowerVersions", - command="(echo '{' ; for j in %s ; do cat $j; echo ',' ; done ; echo \\\"\\\":\\\"\\\"; echo '}') > %s" % (" ".join([v.path for v in versions]), out_versions.path)) - - return struct( - transitive_zipfiles=zips, - transitive_versions=versions, - transitive_licenses=licenses) - -bower_component_bundle = rule( - _bower_component_bundle_impl, - attrs = _common_attrs, - outputs = { - "zip": "%{name}.zip", - "version_json": "%{name}-versions.json", - }, -) -"""Groups a set of bower components together in a zip file. - -Outputs: - NAME-versions.json: - a JSON file containing a PKG-NAME => PKG-NAME#VERSION mapping for the - transitive dependencies. - NAME.zip: - a zip file containing the transitive dependencies for this bundle. -""" - -def _vulcanize_impl(ctx): - # intermediate artifact. - vulcanized = ctx.new_file( - ctx.configuration.genfiles_dir, ctx.outputs.html, ".vulcanized.html") - destdir = ctx.outputs.html.path + ".dir" - zips = [z for d in ctx.attr.deps for z in d.transitive_zipfiles ] - - hermetic_npm_binary = " ".join([ - 'python', - "$p/" + ctx.file._run_npm.path, - "$p/" + ctx.file._vulcanize_archive.path, - '--inline-scripts', - '--inline-css', - '--strip-comments', - '--out-html', "$p/" + vulcanized.path, - ctx.file.app.path - ]) - - pkg_dir = ctx.attr.pkg.lstrip("/") - cmd = " && ".join([ - # unpack dependencies. - "export PATH", - "p=$PWD", - "rm -rf %s" % destdir, - "mkdir -p %s/%s/bower_components" % (destdir, pkg_dir), - "for z in %s; do unzip -qd %s/%s/bower_components/ $z; done" % ( - ' '.join([z.path for z in zips]), destdir, pkg_dir), - "tar -cf - %s | tar -C %s -xf -" % (" ".join([s.path for s in ctx.files.srcs]), destdir), - "cd %s" % destdir, - hermetic_npm_binary, - ]) - - # Node/NPM is not (yet) hermeticized, so we have to get the binary - # from the environment, and it may be under $HOME, so we can't run - # in the sandbox. - node_tweaks = dict( - use_default_shell_env = True, - execution_requirements = {"local": "1"}, - ) - ctx.action( - mnemonic = "Vulcanize", - inputs = [ctx.file._run_npm, ctx.file.app, - ctx.file._vulcanize_archive - ] + list(zips) + ctx.files.srcs, - outputs = [vulcanized], - command = cmd, - **node_tweaks) - - hermetic_npm_command = "export PATH && " + " ".join([ - 'python', - ctx.file._run_npm.path, - ctx.file._crisper_archive.path, - "--always-write-script", - "--source", vulcanized.path, - "--html", ctx.outputs.html.path, - "--js", ctx.outputs.js.path]) - - ctx.action( - mnemonic = "Crisper", - inputs = [ctx.file._run_npm, ctx.file.app, - ctx.file._crisper_archive, vulcanized], - outputs = [ctx.outputs.js, ctx.outputs.html], - command = hermetic_npm_command, - **node_tweaks) - -_vulcanize_rule = rule( - _vulcanize_impl, - attrs = { - "deps": attr.label_list(providers = ["transitive_zipfiles"]), - "app": attr.label( - mandatory = True, - allow_single_file = True, - ), - "srcs": attr.label_list(allow_files = [ - ".js", - ".html", - ".txt", - ".css", - ".ico", - ]), - "pkg": attr.string(mandatory = True), - "_run_npm": attr.label( - default = Label("//tools/js:run_npm_binary.py"), - allow_single_file = True, - ), - "_vulcanize_archive": attr.label( - default = Label("@vulcanize//:%s" % _npm_tarball("vulcanize")), - allow_single_file = True, - ), - "_crisper_archive": attr.label( - default = Label("@crisper//:%s" % _npm_tarball("crisper")), - allow_single_file = True, - ), - }, - outputs = { - "html": "%{name}.html", - "js": "%{name}.js", - }, -) - -def vulcanize(*args, **kwargs): - """Vulcanize runs vulcanize and crisper on a set of sources.""" - _vulcanize_rule(*args, pkg=PACKAGE_NAME, **kwargs) diff --git a/starlark/src/syntax/testcases/junit.bzl b/starlark/src/syntax/testcases/junit.bzl deleted file mode 100644 index 19974a76..00000000 --- a/starlark/src/syntax/testcases/junit.bzl +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Skylark rule to generate a Junit4 TestSuite -# Assumes srcs are all .java Test files -# Assumes junit4 is already added to deps by the user. - -# See https://github.com/bazelbuild/bazel/issues/1017 for background. - -_OUTPUT = """import org.junit.runners.Suite; -import org.junit.runner.RunWith; - -@RunWith(Suite.class) -@Suite.SuiteClasses({%s}) -public class %s {} -""" - -_PREFIXES = ("org", "com", "edu") - -def _SafeIndex(l, val): - for i, v in enumerate(l): - if val == v: - return i - return -1 - -def _AsClassName(fname): - fname = [x.path for x in fname.files][0] - toks = fname[:-5].split("/") - findex = -1 - for s in _PREFIXES: - findex = _SafeIndex(toks, s) - if findex != -1: - break - if findex == -1: - fail("%s does not contain any of %s", - fname, _PREFIXES) - return ".".join(toks[findex:]) + ".class" - -def _impl(ctx): - classes = ",".join( - [_AsClassName(x) for x in ctx.attr.srcs]) - ctx.file_action(output=ctx.outputs.out, content=_OUTPUT % ( - classes, ctx.attr.outname)) - -_GenSuite = rule( - attrs = { - "srcs": attr.label_list(allow_files = True), - "outname": attr.string(), - }, - outputs = {"out": "%{name}.java"}, - implementation = _impl, -) - -def junit_tests(name, srcs, **kwargs): - s_name = name + "TestSuite" - _GenSuite(name = s_name, - srcs = srcs, - outname = s_name) - native.java_test(name = name, - test_class = s_name, - srcs = srcs + [":"+s_name], - **kwargs) diff --git a/starlark/src/syntax/testcases/label.bzl b/starlark/src/syntax/testcases/label.bzl deleted file mode 100644 index 8297f456..00000000 --- a/starlark/src/syntax/testcases/label.bzl +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for dealing with labels and their string form.""" - -def string_to_label(label_list, string_list): - """Form a mapping from label strings to the resolved label.""" - label_string_dict = dict() - for i in range(len(label_list)): - string = string_list[i] - label = label_list[i] - label_string_dict[string] = label - return label_string_dict diff --git a/starlark/src/syntax/testcases/layers.bzl b/starlark/src/syntax/testcases/layers.bzl deleted file mode 100644 index e011f025..00000000 --- a/starlark/src/syntax/testcases/layers.bzl +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tools for dealing with Docker Image layers.""" - -load(":list.bzl", "reverse") -load(":path.bzl", _get_runfile_path="runfile") - -def get_from_target(unused_ctx, target): - if hasattr(target, "docker_layers"): - return target.docker_layers - else: - # TODO(mattmoor): Use containerregistry.client's FromTarball - # to create an entry from a tarball base image. - return [] - - -def assemble(ctx, layers, tags_to_names, output): - """Create the full image from the list of layers.""" - layers = [l["layer"] for l in layers] - args = [ - "--output=" + output.path, - ] + [ - "--tags=" + tag + "=@" + tags_to_names[tag].path - for tag in tags_to_names - ] + ["--layer=" + l.path for l in layers] - inputs = layers + tags_to_names.values() - ctx.action( - executable = ctx.executable.join_layers, - arguments = args, - inputs = inputs, - outputs = [output], - mnemonic = "JoinLayers" - ) - - -def incremental_load(ctx, layers, images, output): - """Generate the incremental load statement.""" - ctx.template_action( - template = ctx.file.incremental_load_template, - substitutions = { - "%{load_statements}": "\n".join([ - "incr_load '%s' '%s' '%s'" % (_get_runfile_path(ctx, l["name"]), - _get_runfile_path(ctx, l["id"]), - _get_runfile_path(ctx, l["layer"])) - # The last layer is the first in the list of layers. - # We reverse to load the layer from the parent to the child. - for l in reverse(layers)]), - "%{tag_statements}": "\n".join([ - "tag_layer '%s' '%s' '%s'" % ( - img, - _get_runfile_path(ctx, images[img]["name"]), - _get_runfile_path(ctx, images[img]["id"])) - for img in images - ]) - }, - output = output, - executable = True) - - -tools = { - "incremental_load_template": attr.label( - default=Label("//tools/build_defs/docker:incremental_load_template"), - single_file=True, - allow_files=True), - "join_layers": attr.label( - default=Label("//tools/build_defs/docker:join_layers"), - cfg="host", - executable=True, - allow_files=True) -} diff --git a/starlark/src/syntax/testcases/lib_cc_configure.bzl b/starlark/src/syntax/testcases/lib_cc_configure.bzl deleted file mode 100644 index 2287955a..00000000 --- a/starlark/src/syntax/testcases/lib_cc_configure.bzl +++ /dev/null @@ -1,123 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Base library for configuring the C++ toolchain.""" - - -def escape_string(arg): - """Escape percent sign (%) in the string so it can appear in the Crosstool.""" - if arg != None: - return str(arg).replace("%", "%%") - else: - return None - - -def auto_configure_fail(msg): - """Output failure message when auto configuration fails.""" - red = "\033[0;31m" - no_color = "\033[0m" - fail("\n%sAuto-Configuration Error:%s %s\n" % (red, no_color, msg)) - - -def auto_configure_warning(msg): - """Output warning message during auto configuration.""" - yellow = "\033[1;33m" - no_color = "\033[0m" - print("\n%sAuto-Configuration Warning:%s %s\n" % (yellow, no_color, msg)) - - -def get_env_var(repository_ctx, name, default = None, enable_warning = True): - """Find an environment variable in system path. Doesn't %-escape the value!""" - if name in repository_ctx.os.environ: - return repository_ctx.os.environ[name] - if default != None: - if enable_warning: - auto_configure_warning("'%s' environment variable is not set, using '%s' as default" % (name, default)) - return default - auto_configure_fail("'%s' environment variable is not set" % name) - - -def which(repository_ctx, cmd, default = None): - """A wrapper around repository_ctx.which() to provide a fallback value. Doesn't %-escape the value!""" - result = repository_ctx.which(cmd) - return default if result == None else str(result) - - -def which_cmd(repository_ctx, cmd, default = None): - """Find cmd in PATH using repository_ctx.which() and fail if cannot find it. Doesn't %-escape the cmd!""" - result = repository_ctx.which(cmd) - if result != None: - return str(result) - path = get_env_var(repository_ctx, "PATH") - if default != None: - auto_configure_warning("Cannot find %s in PATH, using '%s' as default.\nPATH=%s" % (cmd, default, path)) - return default - auto_configure_fail("Cannot find %s in PATH, please make sure %s is installed and add its directory in PATH.\nPATH=%s" % (cmd, cmd, path)) - return str(result) - - -def execute(repository_ctx, command, environment = None, - expect_failure = False): - """Execute a command, return stdout if succeed and throw an error if it fails. Doesn't %-escape the result!""" - if environment: - result = repository_ctx.execute(command, environment = environment) - else: - result = repository_ctx.execute(command) - if expect_failure != (result.return_code != 0): - if expect_failure: - auto_configure_fail( - "expected failure, command %s, stderr: (%s)" % ( - command, result.stderr)) - else: - auto_configure_fail( - "non-zero exit code: %d, command %s, stderr: (%s)" % ( - result.return_code, command, result.stderr)) - stripped_stdout = result.stdout.strip() - if not stripped_stdout: - auto_configure_fail( - "empty output from command %s, stderr: (%s)" % (command, result.stderr)) - return stripped_stdout - - -def get_cpu_value(repository_ctx): - """Compute the cpu_value based on the OS name. Doesn't %-escape the result!""" - os_name = repository_ctx.os.name.lower() - if os_name.startswith("mac os"): - return "darwin" - if os_name.find("freebsd") != -1: - return "freebsd" - if os_name.find("windows") != -1: - return "x64_windows" - # Use uname to figure out whether we are on x86_32 or x86_64 - result = repository_ctx.execute(["uname", "-m"]) - if result.stdout.strip() in ["power", "ppc64le", "ppc", "ppc64"]: - return "ppc" - if result.stdout.strip() in ["arm", "armv7l", "aarch64"]: - return "arm" - return "k8" if result.stdout.strip() in ["amd64", "x86_64", "x64"] else "piii" - - -def tpl(repository_ctx, template, substitutions={}, out=None): - if not out: - out = template - repository_ctx.template( - out, - Label("@bazel_tools//tools/cpp:%s.tpl" % template), - substitutions) - - -def is_cc_configure_debug(repository_ctx): - """Returns True if CC_CONFIGURE_DEBUG is set to 1.""" - env = repository_ctx.os.environ - return "CC_CONFIGURE_DEBUG" in env and env["CC_CONFIGURE_DEBUG"] == "1" diff --git a/starlark/src/syntax/testcases/library.bzl b/starlark/src/syntax/testcases/library.bzl deleted file mode 100644 index 842d081c..00000000 --- a/starlark/src/syntax/testcases/library.bzl +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "go_importpath", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", -) -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", - "GoSourceList", - "sources", -) -load("@io_bazel_rules_go//go/private:rules/prefix.bzl", - "go_prefix_default", -) -load("@io_bazel_rules_go//go/private:rules/aspect.bzl", - "collect_src", -) - -def _go_library_impl(ctx): - """Implements the go_library() rule.""" - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - - gosource = collect_src(ctx) - golib, goarchive = go_toolchain.actions.library(ctx, - go_toolchain = go_toolchain, - mode = mode, - source = gosource, - importpath = go_importpath(ctx), - importable = True, - ) - - return [ - golib, gosource, goarchive, - DefaultInfo( - files = depset([goarchive.data.file]), - ), - OutputGroupInfo( - cgo_exports = goarchive.cgo_exports, - ), - ] - -go_library = rule( - _go_library_impl, - attrs = { - "data": attr.label_list(allow_files = True, cfg = "data"), - "srcs": attr.label_list(allow_files = True), - "deps": attr.label_list(providers = [GoLibrary]), - "importpath": attr.string(), - "embed": attr.label_list(providers = [GoSourceList]), - "gc_goopts": attr.string_list(), - "_go_prefix": attr.label(default = go_prefix_default), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) -"""See go/core.rst#go_library for full documentation.""" diff --git a/starlark/src/syntax/testcases/license.bzl b/starlark/src/syntax/testcases/license.bzl deleted file mode 100644 index 38dfbe5e..00000000 --- a/starlark/src/syntax/testcases/license.bzl +++ /dev/null @@ -1,57 +0,0 @@ -def normalize_target_name(target): - return target.replace("//", "").replace("/", "__").replace(":", "___") - -def license_map(name, targets = [], opts = [], **kwargs): - """Generate XML for all targets that depend directly on a LICENSE file""" - xmls = [] - tools = [ "//tools/bzl:license-map.py", "//lib:all-licenses" ] - for target in targets: - subname = name + "_" + normalize_target_name(target) + ".xml" - xmls.append("$(location :%s)" % subname) - tools.append(subname) - native.genquery( - name = subname, - scope = [ target ], - - # Find everything that depends on a license file, but remove - # the license files themselves from this list. - expression = 'rdeps(%s, filter("//lib:LICENSE.*", deps(%s)),1) - filter("//lib:LICENSE.*", deps(%s))' % (target, target, target), - - # We are interested in the edges of the graph ({java_library, - # license-file} tuples). 'query' provides this in the XML output. - opts = [ "--output=xml", ], - ) - - # post process the XML into our favorite format. - native.genrule( - name = "gen_license_txt_" + name, - cmd = "python $(location //tools/bzl:license-map.py) %s %s > $@" % (" ".join(opts), " ".join(xmls)), - outs = [ name + ".txt" ], - tools = tools, - **kwargs - ) - -def license_test(name, target): - """Make sure a target doesn't depend on DO_NOT_DISTRIBUTE license""" - txt = name + "-forbidden.txt" - - # fully qualify target name. - if target[0] not in ":/": - target = ":" + target - if target[0] != "/": - target = "//" + PACKAGE_NAME + target - - forbidden = "//lib:LICENSE-DO_NOT_DISTRIBUTE" - native.genquery( - name = txt, - scope = [ target, forbidden ], - # Find everything that depends on a license file, but remove - # the license files themselves from this list. - expression = 'rdeps(%s, "%s", 1) - rdeps(%s, "%s", 0)' % (target, forbidden, target, forbidden), - ) - native.sh_test( - name = name, - srcs = [ "//tools/bzl:test_license.sh" ], - args = [ "$(location :%s)" % txt ], - data = [ txt ], - ) diff --git a/starlark/src/syntax/testcases/line_length.bzl b/starlark/src/syntax/testcases/line_length.bzl deleted file mode 100644 index 2fabf120..00000000 --- a/starlark/src/syntax/testcases/line_length.bzl +++ /dev/null @@ -1,35 +0,0 @@ -"""Test rule that fails if a source file has too long lines.""" - -def _check_file(f, columns): - """Return shell commands for testing file 'f'.""" - # We write information to stdout. It will show up in logs, so that the user - # knows what happened if the test fails. - return """ -echo Testing that {file} has at most {columns} columns... -grep -E '^.{{{columns}}}' {path} && err=1 -echo -""".format(columns=columns, path=f.path, file=f.short_path) - -def _impl(ctx): - script = "\n".join( - ["err=0"] + - [_check_file(f, ctx.attr.columns) for f in ctx.files.srcs] + - ["exit $err"]) - - # Write the file, it is executed by 'bazel test'. - ctx.actions.write( - output=ctx.outputs.executable, - content=script) - - # To ensure the files needed by the script are available, we put them in - # the runfiles. - runfiles = ctx.runfiles(files=ctx.files.srcs) - return [DefaultInfo(runfiles=runfiles)] - -line_length_test = rule( - implementation=_impl, - attrs={ - "columns": attr.int(default = 100), - "srcs": attr.label_list(allow_files=True) - }, - test=True) diff --git a/starlark/src/syntax/testcases/lines_sorted_test.bzl b/starlark/src/syntax/testcases/lines_sorted_test.bzl deleted file mode 100644 index c7f7f2b5..00000000 --- a/starlark/src/syntax/testcases/lines_sorted_test.bzl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2016 The Bazel Go Rules Authors. All rights reserved. -# Copyright 2016 The Closure Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:tools/files_equal_test.bzl", "files_equal_test") - -def lines_sorted_test(name, file, cmd="cat $< >$@", visibility=None, **kwargs): - """Tests that lines within a file are sorted.""" - - native.genrule( - name = name + "_lines", - testonly = True, - srcs = [file], - outs = [name + "_lines.txt"], - cmd = cmd, - visibility = visibility, - ) - - native.genrule( - name = name + "_lines_sorted", - testonly = True, - srcs = [name + "_lines.txt"], - outs = [name + "_lines_sorted.txt"], - cmd = "sort $< >$@", - visibility = visibility, - ) - - files_equal_test( - name = name, - actual = name + "_lines.txt", - golden = name + "_lines_sorted.txt", - visibility = visibility, - **kwargs - ) diff --git a/starlark/src/syntax/testcases/link.bzl b/starlark/src/syntax/testcases/link.bzl deleted file mode 100644 index 71f73acc..00000000 --- a/starlark/src/syntax/testcases/link.bzl +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "sets", - "to_set", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "LINKMODE_NORMAL", -) -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", - "bootstrap_action", -) - -def emit_link(ctx, go_toolchain, - archive = None, - mode = None, - executable = None, - gc_linkopts = [], - x_defs = {}): - """See go/toolchains.rst#link for full documentation.""" - - if archive == None: fail("archive is a required parameter") - if executable == None: fail("executable is a required parameter") - if mode == None: fail("mode is a required parameter") - - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - - config_strip = len(ctx.configuration.bin_dir.path) + 1 - pkg_depth = executable.dirname[config_strip:].count('/') + 1 - - ld = None - extldflags = [] - if stdlib.cgo_tools: - ld = stdlib.cgo_tools.compiler_executable - extldflags.extend(stdlib.cgo_tools.options) - extldflags.extend(["-Wl,-rpath,$ORIGIN/" + ("../" * pkg_depth)]) - - gc_linkopts, extldflags = _extract_extldflags(gc_linkopts, extldflags) - - # Add in any mode specific behaviours - if mode.race: - gc_linkopts.append("-race") - if mode.msan: - gc_linkopts.append("-msan") - if mode.static: - gc_linkopts.extend(["-linkmode", "external"]) - extldflags.append("-static") - if mode.link != LINKMODE_NORMAL: - fail("Link mode {} is not yet supported".format(mode.link)) - - link_opts = ["-L", "."] - - for p in archive.searchpaths: #TODO delay this depset expansion: - link_opts.extend(["-L", p]) - - for d in archive.cgo_deps: - if d.basename.endswith('.so'): - short_dir = d.dirname[len(d.root.path):] - extldflags.extend(["-Wl,-rpath,$ORIGIN/" + ("../" * pkg_depth) + short_dir]) - - link_opts.extend(["-o", executable.path]) - link_opts.extend(gc_linkopts) - - # Process x_defs, either adding them directly to linker options, or - # saving them to process through stamping support. - stamp_x_defs = {} - for k, v in x_defs.items(): - if v.startswith("{") and v.endswith("}"): - stamp_x_defs[k] = v[1:-1] - else: - link_opts.extend(["-X", "%s=%s" % (k, v)]) - - link_opts.extend(go_toolchain.flags.link) - if mode.strip: - link_opts.extend(["-w"]) - - if ld: - link_opts.extend([ - "-extld", ld, - "-extldflags", " ".join(extldflags), - ]) - link_opts.append(archive.data.file.path) - link_args = ctx.actions.args() - add_go_env(link_args, stdlib, mode) - # Stamping support - stamp_inputs = [] - if stamp_x_defs or ctx.attr.linkstamp: - stamp_inputs = [ctx.info_file, ctx.version_file] - link_args.add(stamp_inputs, before_each="-stamp") - for k,v in stamp_x_defs.items(): - link_args.add(["-X", "%s=%s" % (k, v)]) - # linkstamp option support: read workspace status files, - # converting "KEY value" lines to "-X $linkstamp.KEY=value" arguments - # to the go linker. - if ctx.attr.linkstamp: - link_args.add(["-linkstamp", ctx.attr.linkstamp]) - - link_args.add("--") - link_args.add(link_opts) - - ctx.actions.run( - inputs = sets.union(archive.libs, archive.cgo_deps, - go_toolchain.data.crosstool, stamp_inputs, stdlib.files), - outputs = [executable], - mnemonic = "GoLink", - executable = go_toolchain.tools.link, - arguments = [link_args], - ) - -def bootstrap_link(ctx, go_toolchain, - archive = None, - mode = None, - executable = None, - gc_linkopts = [], - x_defs = {}): - """See go/toolchains.rst#link for full documentation.""" - - if archive == None: fail("archive is a required parameter") - if executable == None: fail("executable is a required parameter") - if mode == None: fail("mode is a required parameter") - - if x_defs: fail("link does not accept x_defs in bootstrap mode") - - inputs = depset([archive.data.file]) - args = ["tool", "link", "-o", executable.path] - args.extend(gc_linkopts) - args.append(archive.data.file.path) - bootstrap_action(ctx, go_toolchain, mode, - inputs = inputs, - outputs = [executable], - mnemonic = "GoCompile", - arguments = args, - ) - -def _extract_extldflags(gc_linkopts, extldflags): - """Extracts -extldflags from gc_linkopts and combines them into a single list. - - Args: - gc_linkopts: a list of flags passed in through the gc_linkopts attributes. - ctx.expand_make_variables should have already been applied. - extldflags: a list of flags to be passed to the external linker. - - Return: - A tuple containing the filtered gc_linkopts with external flags removed, - and a combined list of external flags. - """ - filtered_gc_linkopts = [] - is_extldflags = False - for opt in gc_linkopts: - if is_extldflags: - is_extldflags = False - extldflags.append(opt) - elif opt == "-extldflags": - is_extldflags = True - else: - filtered_gc_linkopts.append(opt) - return filtered_gc_linkopts, extldflags - diff --git a/starlark/src/syntax/testcases/list.bzl b/starlark/src/syntax/testcases/list.bzl deleted file mode 100644 index 488d947a..00000000 --- a/starlark/src/syntax/testcases/list.bzl +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -GOOS = { - "android": None, - "darwin": "@bazel_tools//platforms:osx", - "dragonfly": None, - "freebsd": "@bazel_tools//platforms:freebsd", - "linux": "@bazel_tools//platforms:linux", - "nacl": None, - "netbsd": None, - "openbsd": None, - "plan9": None, - "solaris": None, - "windows": "@bazel_tools//platforms:windows", -} - -GOARCH = { - "386": "@bazel_tools//platforms:x86_32", - "amd64": "@bazel_tools//platforms:x86_64", - "amd64p32": None, - "arm": "@bazel_tools//platforms:arm", - "arm64": None, - "mips": None, - "mips64": None, - "mips64le": None, - "mipsle": None, - "ppc64": "@bazel_tools//platforms:ppc", - "ppc64le": None, - "s390x": "@bazel_tools//platforms:s390x", -} - -GOOS_GOARCH = ( - ("android", "386"), - ("android", "amd64"), - ("android", "arm"), - ("android", "arm64"), - ("darwin", "386"), - ("darwin", "amd64"), - ("darwin", "arm"), - ("darwin", "arm64"), - ("dragonfly", "amd64"), - ("freebsd", "386"), - ("freebsd", "amd64"), - ("freebsd", "arm"), - ("linux", "386"), - ("linux", "amd64"), - ("linux", "arm"), - ("linux", "arm64"), - ("linux", "mips"), - ("linux", "mips64"), - ("linux", "mips64le"), - ("linux", "mipsle"), - ("linux", "ppc64"), - ("linux", "ppc64le"), - ("linux", "s390x"), - ("nacl", "386"), - ("nacl", "amd64p32"), - ("nacl", "arm"), - ("netbsd", "386"), - ("netbsd", "amd64"), - ("netbsd", "arm"), - ("openbsd", "386"), - ("openbsd", "amd64"), - ("openbsd", "arm"), - ("plan9", "386"), - ("plan9", "amd64"), - ("plan9", "arm"), - ("solaris", "amd64"), - ("windows", "386"), - ("windows", "amd64"), -) - -def declare_config_settings(): - for goos in GOOS: - native.config_setting( - name = goos, - constraint_values = ["//go/toolchain:" + goos], - ) - for goarch in GOARCH: - native.config_setting( - name = goarch, - constraint_values = ["//go/toolchain:" + goarch], - ) - for goos, goarch in GOOS_GOARCH: - native.config_setting( - name = goos + "_" + goarch, - constraint_values = [ - "//go/toolchain:" + goos, - "//go/toolchain:" + goarch, - ], - ) diff --git a/starlark/src/syntax/testcases/list_source_repository.bzl b/starlark/src/syntax/testcases/list_source_repository.bzl deleted file mode 100644 index bb19a601..00000000 --- a/starlark/src/syntax/testcases/list_source_repository.bzl +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Create a repository that produce the list of sources of Bazel in order to test -# that all sources in Bazel are contained in the //:srcs filegroup. CI systems -# can set the SRCS_EXCLUDES environment variable to exclude certain files from -# being considered as sources. -"""A repository definition to fetch all sources in Bazel.""" - -def _impl(rctx): - workspace = rctx.path(Label("//:BUILD")).dirname - srcs_excludes = "XXXXXXXXXXXXXX1268778dfsdf4" - # Depending in ~/.git/logs/HEAD is a trick to depends on something that - # change everytime the workspace content change. - r = rctx.execute(["test", "-f", "%s/.git/logs/HEAD" % workspace]) - if r.return_code == 0: - # We only add the dependency if it exists. - unused_var = rctx.path(Label("//:.git/logs/HEAD")) # pylint: disable=unused-variable - - if "SRCS_EXCLUDES" in rctx.os.environ: - srcs_excludes = rctx.os.environ["SRCS_EXCLUDES"] - r = rctx.execute(["find", str(workspace), "-type", "f"]) - rctx.file("find.result.raw", r.stdout.replace(str(workspace) + "/", "")) - rctx.file("BUILD", """ -genrule( - name = "sources", - outs = ["sources.txt"], - srcs = ["find.result.raw"], - visibility = ["//visibility:public"], - cmd = " | ".join([ - "cat $<", - "grep -Ev '^(\\\\.git|out/|output/|bazel-|derived|tools/defaults/BUILD)'", - "grep -Ev '%s'", - "sort -u > $@", - ]), -) -""" % srcs_excludes) - -list_source_repository = repository_rule( - implementation = _impl, - environ = ["SRCS_EXCLUDES"]) -"""Create a //:sources target containing the list of sources of Bazel. - -SRCS_EXCLUDES give a regex of files to excludes in the list.""" diff --git a/starlark/src/syntax/testcases/load.bzl b/starlark/src/syntax/testcases/load.bzl deleted file mode 100644 index 21b26d2b..00000000 --- a/starlark/src/syntax/testcases/load.bzl +++ /dev/null @@ -1,7 +0,0 @@ -def declare_maven(item): - sha = item.get("sha1") - if sha != None: - native.maven_jar(name = item["name"], artifact = item["artifact"], sha1 = sha) - else: - native.maven_jar(name = item["name"], artifact = item["artifact"]) - native.bind(name = item["bind"], actual = item["actual"]) diff --git a/starlark/src/syntax/testcases/maven.bzl b/starlark/src/syntax/testcases/maven.bzl deleted file mode 100644 index c255c0ca..00000000 --- a/starlark/src/syntax/testcases/maven.bzl +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Merge maven files - -def cmd(jars): - return ('$(location //tools:merge_jars) $@ ' - + ' '.join(['$(location %s)' % j for j in jars])) - -def merge_maven_jars(name, srcs, **kwargs): - native.genrule( - name = '%s__merged_bin' % name, - cmd = cmd(srcs), - tools = srcs + ['//tools:merge_jars'], - outs = ['%s__merged.jar' % name], - ) - native.java_import( - name = name, - jars = [':%s__merged_bin' % name], - **kwargs - ) diff --git a/starlark/src/syntax/testcases/maven_jar.bzl b/starlark/src/syntax/testcases/maven_jar.bzl deleted file mode 100644 index 2dbeae76..00000000 --- a/starlark/src/syntax/testcases/maven_jar.bzl +++ /dev/null @@ -1,162 +0,0 @@ -GERRIT = "GERRIT:" - -GERRIT_API = "GERRIT_API:" - -MAVEN_CENTRAL = "MAVEN_CENTRAL:" - -MAVEN_LOCAL = "MAVEN_LOCAL:" - -def _maven_release(ctx, parts): - """induce jar and url name from maven coordinates.""" - if len(parts) not in [3, 4]: - fail('%s:\nexpected id="groupId:artifactId:version[:classifier]"' - % ctx.attr.artifact) - if len(parts) == 4: - group, artifact, version, classifier = parts - file_version = version + '-' + classifier - else: - group, artifact, version = parts - file_version = version - - jar = artifact.lower() + '-' + file_version - url = '/'.join([ - ctx.attr.repository, - group.replace('.', '/'), - artifact, - version, - artifact + '-' + file_version]) - - return jar, url - -# Creates a struct containing the different parts of an artifact's FQN -def _create_coordinates(fully_qualified_name): - parts = fully_qualified_name.split(":") - packaging = None - classifier = None - - if len(parts) == 3: - group_id, artifact_id, version = parts - elif len(parts) == 4: - group_id, artifact_id, version, packaging = parts - elif len(parts) == 5: - group_id, artifact_id, version, packaging, classifier = parts - else: - fail("Invalid fully qualified name for artifact: %s" % fully_qualified_name) - - return struct( - fully_qualified_name = fully_qualified_name, - group_id = group_id, - artifact_id = artifact_id, - packaging = packaging, - classifier = classifier, - version = version, - ) - -def _format_deps(attr, deps): - formatted_deps = "" - if deps: - if len(deps) == 1: - formatted_deps += "%s = [\'%s\']," % (attr, deps[0]) - else: - formatted_deps += "%s = [\n" % attr - for dep in deps: - formatted_deps += " \'%s\',\n" % dep - formatted_deps += " ]," - return formatted_deps - -def _generate_build_file(ctx, binjar, srcjar): - srcjar_attr = "" - if srcjar: - srcjar_attr = 'srcjar = "%s",' % srcjar - contents = """ -# DO NOT EDIT: automatically generated BUILD file for maven_jar rule {rule_name} -package(default_visibility = ['//visibility:public']) -java_import( - name = 'jar', - jars = ['{binjar}'], - {srcjar_attr} - {deps} - {exports} -) -java_import( - name = 'neverlink', - jars = ['{binjar}'], - neverlink = 1, - {deps} - {exports} -) -\n""".format(srcjar_attr = srcjar_attr, - rule_name = ctx.name, - binjar = binjar, - deps = _format_deps("deps", ctx.attr.deps), - exports = _format_deps("exports", ctx.attr.exports)) - if srcjar: - contents += """ -java_import( - name = 'src', - jars = ['{srcjar}'], -) -""".format(srcjar = srcjar) - ctx.file('%s/BUILD' % ctx.path("jar"), contents, False) - -def _maven_jar_impl(ctx): - """rule to download a Maven archive.""" - coordinates = _create_coordinates(ctx.attr.artifact) - - name = ctx.name - sha1 = ctx.attr.sha1 - - parts = ctx.attr.artifact.split(':') - # TODO(davido): Only releases for now, implement handling snapshots - jar, url = _maven_release(ctx, parts) - - binjar = jar + '.jar' - binjar_path = ctx.path('/'.join(['jar', binjar])) - binurl = url + '.jar' - - python = ctx.which("python") - script = ctx.path(ctx.attr._download_script) - - args = [python, script, "-o", binjar_path, "-u", binurl] - if ctx.attr.sha1: - args.extend(["-v", sha1]) - if ctx.attr.unsign: - args.append('--unsign') - for x in ctx.attr.exclude: - args.extend(['-x', x]) - - out = ctx.execute(args) - - if out.return_code: - fail("failed %s: %s" % (' '.join(args), out.stderr)) - - srcjar = None - if ctx.attr.src_sha1 or ctx.attr.attach_source: - srcjar = jar + '-src.jar' - srcurl = url + '-sources.jar' - srcjar_path = ctx.path('jar/' + srcjar) - args = [python, script, "-o", srcjar_path, "-u", srcurl] - if ctx.attr.src_sha1: - args.extend(['-v', ctx.attr.src_sha1]) - out = ctx.execute(args) - if out.return_code: - fail("failed %s: %s" % (args, out.stderr)) - - _generate_build_file(ctx, binjar, srcjar) - -maven_jar = repository_rule( - attrs = { - "artifact": attr.string(mandatory = True), - "sha1": attr.string(), - "src_sha1": attr.string(), - "_download_script": attr.label(default = Label("//tools:download_file.py")), - "repository": attr.string(default = MAVEN_CENTRAL), - "attach_source": attr.bool(default = True), - "unsign": attr.bool(default = False), - "deps": attr.string_list(), - "exports": attr.string_list(), - "exclude": attr.string_list(), - }, - local = True, - implementation = _maven_jar_impl, -) diff --git a/starlark/src/syntax/testcases/maven_rules.bzl b/starlark/src/syntax/testcases/maven_rules.bzl deleted file mode 100644 index a27431cf..00000000 --- a/starlark/src/syntax/testcases/maven_rules.bzl +++ /dev/null @@ -1,362 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Implementations of Maven rules in Skylark: -# 1) maven_jar(name, artifact, repository, sha1, settings) -# The API of this is largely the same as the native maven_jar rule, -# except for the server attribute, which is not implemented. The optional -# settings supports passing a custom Maven settings.xml to download the JAR. -# 2) maven_aar(name, artifact, repository, sha1) -# The API of this rule is the same as maven_jar except that the artifact must -# be the Maven coordinate of an AAR and it does not support the historical -# repository and server attributes. -# 3) maven_dependency_plugin() -# This rule downloads the maven-dependency-plugin used internally -# for testing and the implementation for the fetching of artifacts. -# -# Maven coordinates are expected to be in this form: -# groupId:artifactId:version[:packaging][:classifier] -# -# Installation requirements prior to using this rule: -# 1) Maven binary: `mvn` -# 2) Maven plugin: `maven-dependency-plugin:2.10` -# Get it: $ mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get \ -# -Dartifact=org.apache.maven.plugins:maven-dependency-plugin:2.10 \ -# -Dmaven.repo.local=$HOME/.m2/repository # or specify your own local repository - -"""Rules for retrieving Maven dependencies (experimental)""" - -MAVEN_CENTRAL_URL = "https://repo1.maven.org/maven2" - -# Binary dependencies needed for running the bash commands -DEPS = ["mvn", "openssl", "awk"] - -MVN_PLUGIN = "org.apache.maven.plugins:maven-dependency-plugin:2.10" - - -def _execute(ctx, command): - return ctx.execute(["bash", "-c", """ -set -ex -%s""" % command]) - - -# Fail fast -def _check_dependencies(ctx): - for dep in DEPS: - if ctx.which(dep) == None: - fail("%s requires %s as a dependency. Please check your PATH." % (ctx.name, dep)) - - -def _validate_attr(ctx): - if hasattr(ctx.attr, "server") and (ctx.attr.server != None): - fail("%s specifies a 'server' attribute which is currently not supported." % ctx.name) - - -def _artifact_dir(coordinates): - return "/".join(coordinates.group_id.split(".") + - [coordinates.artifact_id, coordinates.version]) - - -# Creates a struct containing the different parts of an artifact's FQN. -# If the fully_qualified_name does not specify a packaging and the rule does -# not set a default packaging then JAR is assumed. -def _create_coordinates(fully_qualified_name, packaging="jar"): - parts = fully_qualified_name.split(":") - classifier = None - - if len(parts) == 3: - group_id, artifact_id, version = parts - # Updates the FQN with the default packaging so that the Maven plugin - # downloads the correct artifact. - fully_qualified_name = "%s:%s" % (fully_qualified_name, packaging) - elif len(parts) == 4: - group_id, artifact_id, version, packaging = parts - elif len(parts) == 5: - group_id, artifact_id, version, packaging, classifier = parts - else: - fail("Invalid fully qualified name for artifact: %s" % fully_qualified_name) - - return struct( - fully_qualified_name = fully_qualified_name, - group_id = group_id, - artifact_id = artifact_id, - packaging = packaging, - classifier = classifier, - version = version, - ) - - -# NOTE: Please use this method to define ALL paths that the maven_* -# rules use. Doing otherwise will lead to inconsistencies and/or errors. -# -# CONVENTION: *_path refers to files, *_dir refers to directories. -def _create_paths(ctx, coordinates): - """Creates a struct that contains the paths to create the cache WORKSPACE""" - - # e.g. guava-18.0.jar - artifact_filename = "%s-%s" % (coordinates.artifact_id, - coordinates.version) - if coordinates.classifier: - artifact_filename += "-" + coordinates.classifier - artifact_filename += "." + coordinates.packaging - sha1_filename = "%s.sha1" % artifact_filename - - # e.g. com/google/guava/guava/18.0 - relative_artifact_dir = _artifact_dir(coordinates) - - # The symlink to the actual artifact is stored in this dir, along with the - # BUILD file. The dir has the same name as the packaging to support syntax - # like @guava//jar and @google_play_services//aar. - symlink_dir = coordinates.packaging - - m2 = ".m2" - m2_repo = "/".join([m2, "repository"]) # .m2/repository - - return struct( - artifact_filename = artifact_filename, - sha1_filename = sha1_filename, - - symlink_dir = ctx.path(symlink_dir), - - # e.g. external/com_google_guava_guava/ \ - # .m2/repository/com/google/guava/guava/18.0/guava-18.0.jar - artifact_path = ctx.path("/".join([m2_repo, relative_artifact_dir, artifact_filename])), - artifact_dir = ctx.path("/".join([m2_repo, relative_artifact_dir])), - - sha1_path = ctx.path("/".join([m2_repo, relative_artifact_dir, sha1_filename])), - - # e.g. external/com_google_guava_guava/jar/guava-18.0.jar - symlink_artifact_path = ctx.path("/".join([symlink_dir, artifact_filename])), - ) - -_maven_jar_build_file_template = """ -# DO NOT EDIT: automatically generated BUILD file for maven_jar rule {rule_name} - -java_import( - name = 'jar', - jars = ['{artifact_filename}'], - deps = [ -{deps_string} - ], - visibility = ['//visibility:public'] -) - -filegroup( - name = 'file', - srcs = ['{artifact_filename}'], - visibility = ['//visibility:public'] -)\n""" - -_maven_aar_build_file_template = """ -# DO NOT EDIT: automatically generated BUILD file for maven_aar rule {rule_name} - -aar_import( - name = 'aar', - aar = '{artifact_filename}', - deps = [ -{deps_string} - ], - visibility = ['//visibility:public'], -) - -filegroup( - name = 'file', - srcs = ['{artifact_filename}'], - visibility = ['//visibility:public'] -)\n""" - -# Provides the syntax "@jar_name//jar" for dependencies -def _generate_build_file(ctx, template, paths): - deps_string = "\n".join(["'%s'," % dep for dep in ctx.attr.deps]) - contents = template.format( - rule_name = ctx.name, - artifact_filename = paths.artifact_filename, - deps_string = deps_string) - ctx.file('%s/BUILD' % paths.symlink_dir, contents, False) - - -def _file_exists(ctx, filename): - return _execute(ctx, "[[ -f %s ]] && exit 0 || exit 1" % filename).return_code == 0 - - -# Constructs the maven command to retrieve the dependencies from remote -# repositories using the dependency plugin, and executes it. -def _mvn_download(ctx, paths, fully_qualified_name): - # If a custom settings file exists, we'll use that. If not, Maven will use the default settings. - mvn_flags = "" - if hasattr(ctx.attr, "settings") and ctx.attr.settings != None: - ctx.symlink(ctx.attr.settings, "settings.xml") - mvn_flags += "-s %s " % "settings.xml" - - # dependency:get step. Downloads the artifact into the local repository. - mvn_get = MVN_PLUGIN + ":get" - mvn_artifact = "-Dartifact=%s" % fully_qualified_name - mvn_transitive = "-Dtransitive=false" - if hasattr(ctx.attr, "repository") and ctx.attr.repository != "": - mvn_flags += "-Dmaven.repo.remote=%s " % ctx.attr.repository - command = " ".join(["mvn", mvn_flags, mvn_get, mvn_transitive, mvn_artifact]) - exec_result = _execute(ctx, command) - if exec_result.return_code != 0: - fail("%s\n%s\nFailed to fetch Maven dependency" % (exec_result.stdout, exec_result.stderr)) - - # dependency:copy step. Moves the artifact from the local repository into //external. - mvn_copy = MVN_PLUGIN + ":copy" - mvn_output_dir = "-DoutputDirectory=%s" % paths.artifact_dir - command = " ".join(["mvn", mvn_flags, mvn_copy, mvn_artifact, mvn_output_dir]) - exec_result = _execute(ctx, command) - if exec_result.return_code != 0: - fail("%s\n%s\nFailed to fetch Maven dependency" % (exec_result.stdout, exec_result.stderr)) - - -def _check_sha1(ctx, paths, sha1): - actual_sha1 = _execute(ctx, "openssl sha1 %s | awk '{printf $2}'" % paths.artifact_path).stdout - - if sha1.lower() != actual_sha1.lower(): - fail(("{rule_name} has SHA-1 of {actual_sha1}, " + - "does not match expected SHA-1 ({expected_sha1})").format( - rule_name = ctx.name, - expected_sha1 = sha1, - actual_sha1 = actual_sha1)) - else: - _execute(ctx, "echo %s %s > %s" % (sha1, paths.artifact_path, paths.sha1_path)) - - -def _maven_artifact_impl(ctx, default_rule_packaging, build_file_template): - # Ensure that we have all of the dependencies installed - _check_dependencies(ctx) - - # Provide warnings and errors about attributes - _validate_attr(ctx) - - # Create a struct to contain the different parts of the artifact FQN - coordinates = _create_coordinates(ctx.attr.artifact, default_rule_packaging) - - # Create a struct to store the relative and absolute paths needed for this rule - paths = _create_paths(ctx, coordinates) - - _generate_build_file( - ctx = ctx, - template = build_file_template, - paths = paths, - ) - - if _execute(ctx, "mkdir -p %s" % paths.symlink_dir).return_code != 0: - fail("%s: Failed to create dirs in execution root.\n" % ctx.name) - - # Download the artifact - _mvn_download( - ctx = ctx, - paths = paths, - fully_qualified_name = coordinates.fully_qualified_name - ) - - if (ctx.attr.sha1 != ""): - _check_sha1( - ctx = ctx, - paths = paths, - sha1 = ctx.attr.sha1, - ) - - ctx.symlink(paths.artifact_path, paths.symlink_artifact_path) - - -_common_maven_rule_attrs = { - "artifact": attr.string( - default = "", - mandatory = True, - ), - "sha1": attr.string(default = ""), - "settings": attr.label(default = None), - # Allow the user to specify deps for the generated java_import or aar_import - # since maven_jar and maven_aar do not automatically pull in transitive - # dependencies. - "deps": attr.label_list(), -} - -def _maven_jar_impl(ctx): - _maven_artifact_impl(ctx, "jar", _maven_jar_build_file_template) - - -def _maven_aar_impl(ctx): - _maven_artifact_impl(ctx, "aar", _maven_aar_build_file_template) - -maven_jar = repository_rule( - implementation=_maven_jar_impl, - attrs=_common_maven_rule_attrs + { - # Needed for compatability reasons with the native maven_jar rule. - "repository": attr.string(default = ""), - "server": attr.label(default = None), - }, - local=False, -) - -maven_aar = repository_rule( - implementation=_maven_aar_impl, - attrs=_common_maven_rule_attrs, - local=False, -) - - -def _maven_dependency_plugin_impl(ctx): - _BUILD_FILE = """ -# DO NOT EDIT: automatically generated BUILD file for maven_dependency_plugin - -filegroup( - name = 'files', - srcs = glob(['**']), - visibility = ['//visibility:public'] -) -""" - ctx.file("BUILD", _BUILD_FILE, False) - - _SETTINGS_XML = """ - - - {localRepository} - - - central - {mirror} - *,default - - - -""".format( - localRepository = ctx.path("repository"), - mirror = MAVEN_CENTRAL_URL, - ) - settings_path = ctx.path("settings.xml") - ctx.file("%s" % settings_path, _SETTINGS_XML, False) - - # Download the plugin with transitive dependencies - mvn_flags = "-s %s" % settings_path - mvn_get = MVN_PLUGIN + ":get" - mvn_artifact = "-Dartifact=%s" % MVN_PLUGIN - command = " ".join(["mvn", mvn_flags, mvn_get, mvn_artifact]) - - exec_result = _execute(ctx, command) - if exec_result.return_code != 0: - fail("%s\nFailed to fetch Maven dependency" % exec_result.stderr) - - -_maven_dependency_plugin = repository_rule( - implementation=_maven_dependency_plugin_impl, -) - - -def maven_dependency_plugin(): - _maven_dependency_plugin(name = "m2") diff --git a/starlark/src/syntax/testcases/mode.bzl b/starlark/src/syntax/testcases/mode.bzl deleted file mode 100644 index fe8b3a8f..00000000 --- a/starlark/src/syntax/testcases/mode.bzl +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Modes are documented in go/modes.rst#compilation-modes - -LINKMODE_NORMAL = "normal" -LINKMODE_SHARED = "shared" -LINKMODE_PIE = "pie" -LINKMODE_PLUGIN = "plugin" - -def mode_string(mode): - result = [mode.goos, mode.goarch] - if mode.static: - result.append("static") - if mode.race: - result.append("race") - if mode.msan: - result.append("msan") - if mode.pure: - result.append("pure") - if mode.debug: - result.append("debug") - if mode.strip: - result.append("stripped") - if not result or not mode.link == LINKMODE_NORMAL: - result.append(mode.link) - return "_".join(result) - -def _ternary(*values): - for v in values: - if v == None: continue - if type(v) == "bool": return v - if type(v) != "string": fail("Invalid value type {}".format(type(v))) - v = v.lower() - if v == "on": return True - if v == "off": return False - if v == "auto": continue - fail("Invalid value {}".format(v)) - fail("_ternary failed to produce a final result from {}".format(values)) - -def get_mode(ctx, toolchain_flags): - if "@io_bazel_rules_go//go:toolchain" in ctx.toolchains: - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - else: - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:bootstrap_toolchain"] - - # We always have to use the pure stdlib in cross compilation mode - force_pure = "on" if go_toolchain.cross_compile else "auto" - - #TODO: allow link mode selection - static = _ternary( - getattr(ctx.attr, "static", None), - "static" in ctx.features, - ) - race = _ternary( - getattr(ctx.attr, "race", None), - "race" in ctx.features, - ) - msan = _ternary( - getattr(ctx.attr, "msan", None), - "msan" in ctx.features, - ) - pure = _ternary( - getattr(ctx.attr, "pure", None), - force_pure, - "pure" in ctx.features, - ) - debug = ctx.var["COMPILATION_MODE"] == "debug" - strip_mode = "sometimes" - if toolchain_flags: - strip_mode = toolchain_flags.strip - strip = True - if strip_mode == "always": - strip = True - elif strip_mode == "sometimes": - strip = not debug - goos = getattr(ctx.attr, "goos", None) - if goos == None or goos == "auto": - goos = go_toolchain.default_goos - elif not pure: - fail("If goos is set, pure must be true") - goarch = getattr(ctx.attr, "goarch", None) - if goarch == None or goarch == "auto": - goarch = go_toolchain.default_goarch - elif not pure: - fail("If goarch is set, pure must be true") - - return struct( - static = static, - race = race, - msan = msan, - pure = pure, - link = LINKMODE_NORMAL, - debug = debug, - strip = strip, - goos = goos, - goarch = goarch, - ) diff --git a/starlark/src/syntax/testcases/oci.bzl b/starlark/src/syntax/testcases/oci.bzl deleted file mode 100644 index 1e056aa7..00000000 --- a/starlark/src/syntax/testcases/oci.bzl +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for manipulation OCI images.""" - -load( - "//container:container.bzl", - "container_push", - oci_bundle = "container_bundle", - oci_flatten = "container_flatten", - oci_image = "container_image", - oci_import = "container_import", - oci_pull = "container_pull", -) - -def oci_push(*args, **kwargs): - if "format" in kwargs: - fail("Cannot override 'format' attribute on oci_push", - attr="format") - kwargs["format"] = "OCI" - container_push(*args, **kwargs) diff --git a/starlark/src/syntax/testcases/osx_archs.bzl b/starlark/src/syntax/testcases/osx_archs.bzl deleted file mode 100644 index a55b3efa..00000000 --- a/starlark/src/syntax/testcases/osx_archs.bzl +++ /dev/null @@ -1,31 +0,0 @@ -"""Information regarding crosstool-supported architectures.""" -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# List of architectures supported by osx crosstool. -OSX_TOOLS_NON_DEVICE_ARCHS = [ - "darwin_x86_64", - "ios_i386", - "ios_x86_64", - "watchos_i386", - "tvos_x86_64", -] - -OSX_TOOLS_ARCHS = [ - "armeabi-v7a", - "ios_armv7", - "ios_arm64", - "watchos_armv7k", - "tvos_arm64", -] + OSX_TOOLS_NON_DEVICE_ARCHS diff --git a/starlark/src/syntax/testcases/osx_cc_configure.bzl b/starlark/src/syntax/testcases/osx_cc_configure.bzl deleted file mode 100644 index bac3c0df..00000000 --- a/starlark/src/syntax/testcases/osx_cc_configure.bzl +++ /dev/null @@ -1,98 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Configuring the C++ toolchain on macOS.""" - -load("@bazel_tools//tools/osx:xcode_configure.bzl", "run_xcode_locator") - -load( - "@bazel_tools//tools/cpp:lib_cc_configure.bzl", - "escape_string", -) - -load( - "@bazel_tools//tools/cpp:unix_cc_configure.bzl", - "get_escaped_cxx_inc_directories", - "tpl", - "get_env", - "find_cc", - "configure_unix_toolchain" -) - - -def _get_escaped_xcode_cxx_inc_directories(repository_ctx, cc, xcode_toolchains): - """Compute the list of default C++ include paths on Xcode-enabled darwin. - - Args: - repository_ctx: The repository context. - cc: The default C++ compiler on the local system. - xcode_toolchains: A list containing the xcode toolchains available - Returns: - include_paths: A list of builtin include paths. - """ - - # TODO(cparsons): Falling back to the default C++ compiler builtin include - # paths shouldn't be unnecessary once all actions are using xcrun. - include_dirs = get_escaped_cxx_inc_directories(repository_ctx, cc) - for toolchain in xcode_toolchains: - include_dirs.append(escape_string(toolchain.developer_dir)) - return include_dirs - - -def configure_osx_toolchain(repository_ctx): - """Configure C++ toolchain on macOS.""" - xcode_toolchains = [] - (xcode_toolchains, xcodeloc_err) = run_xcode_locator( - repository_ctx, - Label("@bazel_tools//tools/osx:xcode_locator.m")) - if xcode_toolchains: - cc = find_cc(repository_ctx) - tpl(repository_ctx, "osx_cc_wrapper.sh", { - "%{cc}": escape_string(str(cc)), - "%{env}": escape_string(get_env(repository_ctx)) - }, "cc_wrapper.sh") - repository_ctx.symlink( - Label("@bazel_tools//tools/objc:xcrunwrapper.sh"), "xcrunwrapper.sh") - repository_ctx.symlink( - Label("@bazel_tools//tools/objc:libtool.sh"), "libtool") - repository_ctx.symlink( - Label("@bazel_tools//tools/objc:make_hashed_objlist.py"), - "make_hashed_objlist.py") - repository_ctx.symlink( - Label("@bazel_tools//tools/osx/crosstool:wrapped_ar.tpl"), - "wrapped_ar") - repository_ctx.symlink( - Label("@bazel_tools//tools/osx/crosstool:wrapped_clang.tpl"), - "wrapped_clang") - repository_ctx.symlink( - Label("@bazel_tools//tools/osx/crosstool:wrapped_clang_pp.tpl"), - "wrapped_clang_pp") - repository_ctx.symlink( - Label("@bazel_tools//tools/osx/crosstool:BUILD.tpl"), - "BUILD") - repository_ctx.symlink( - Label("@bazel_tools//tools/osx/crosstool:osx_archs.bzl"), - "osx_archs.bzl") - escaped_include_paths = _get_escaped_xcode_cxx_inc_directories(repository_ctx, cc, xcode_toolchains) - escaped_cxx_include_directories = [] - for path in escaped_include_paths: - escaped_cxx_include_directories.append(("cxx_builtin_include_directory: \"%s\"" % path)) - if xcodeloc_err: - escaped_cxx_include_directories.append("# Error: " + xcodeloc_err + "\n") - repository_ctx.template( - "CROSSTOOL", - Label("@bazel_tools//tools/osx/crosstool:CROSSTOOL.tpl"), - {"%{cxx_builtin_include_directory}": "\n".join(escaped_cxx_include_directories)}) - else: - configure_unix_toolchain(repository_ctx, cpu_value = "darwin") diff --git a/starlark/src/syntax/testcases/pack.bzl b/starlark/src/syntax/testcases/pack.bzl deleted file mode 100644 index a4f13400..00000000 --- a/starlark/src/syntax/testcases/pack.bzl +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) - -def emit_pack(ctx, go_toolchain, - in_lib = None, - out_lib = None, - objects = [], - archive = None, - mode = None): - """See go/toolchains.rst#pack for full documentation.""" - - if in_lib == None: fail("in_lib is a required parameter") - if out_lib == None: fail("out_lib is a required parameter") - if mode == None: fail("mode is a required parameter") - - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - inputs = [in_lib] + stdlib.files - - arguments = ctx.actions.args() - add_go_env(arguments, stdlib, mode) - arguments.add([ - "-in", in_lib, - "-out", out_lib, - ]) - inputs.extend(objects) - arguments.add(objects, before_each="-obj") - - if archive: - inputs.append(archive) - arguments.add(["-arc", archive]) - - ctx.actions.run( - inputs = inputs, - outputs = [out_lib], - mnemonic = "GoPack", - executable = go_toolchain.tools.pack, - arguments = [arguments], - ) diff --git a/starlark/src/syntax/testcases/package.bzl b/starlark/src/syntax/testcases/package.bzl deleted file mode 100644 index 3c32bb25..00000000 --- a/starlark/src/syntax/testcases/package.bzl +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -sh_bang_template = (" && ".join([ - "echo '#!/usr/bin/env bash' > $@", - "echo \"# this script should run from the root of your workspace.\" >> $@", - "echo \"set -e\" >> $@", - "echo \"\" >> $@", - "echo 'if [[ \"$$VERBOSE\" ]]; then set -x ; fi' >> $@", - "echo \"\" >> $@", - "echo %s >> $@", - "echo \"\" >> $@", - "echo %s >> $@", -])) - -def maven_package( - version, - repository = None, - url = None, - jar = {}, - src = {}, - doc = {}, - war = {}): - - build_cmd = ['bazel', 'build'] - mvn_cmd = ['python', 'tools/maven/mvn.py', '-v', version] - api_cmd = mvn_cmd[:] - api_targets = [] - for type,d in [('jar', jar), ('java-source', src), ('javadoc', doc)]: - for a,t in sorted(d.items()): - api_cmd.append('-s %s:%s:$(location %s)' % (a,type,t)) - api_targets.append(t) - - native.genrule( - name = 'gen_api_install', - cmd = sh_bang_template % ( - ' '.join(build_cmd + api_targets), - ' '.join(api_cmd + ['-a', 'install'])), - srcs = api_targets, - outs = ['api_install.sh'], - executable = True, - testonly = 1, - ) - - if repository and url: - native.genrule( - name = 'gen_api_deploy', - cmd = sh_bang_template % ( - ' '.join(build_cmd + api_targets), - ' '.join(api_cmd + ['-a', 'deploy', - '--repository', repository, - '--url', url])), - srcs = api_targets, - outs = ['api_deploy.sh'], - executable = True, - testonly = 1, - ) - - war_cmd = mvn_cmd[:] - war_targets = [] - for a,t in sorted(war.items()): - war_cmd.append('-s %s:war:$(location %s)' % (a,t)) - war_targets.append(t) - - native.genrule( - name = 'gen_war_install', - cmd = sh_bang_template % (' '.join(build_cmd + war_targets), - ' '.join(war_cmd + ['-a', 'install'])), - srcs = war_targets, - outs = ['war_install.sh'], - executable = True, - ) - - if repository and url: - native.genrule( - name = 'gen_war_deploy', - cmd = sh_bang_template % ( - ' '.join(build_cmd + war_targets), - ' '.join(war_cmd + [ - '-a', 'deploy', - '--repository', repository, - '--url', url])), - srcs = war_targets, - outs = ['war_deploy.sh'], - executable = True, - ) diff --git a/starlark/src/syntax/testcases/passwd.bzl b/starlark/src/syntax/testcases/passwd.bzl deleted file mode 100644 index 62f0012f..00000000 --- a/starlark/src/syntax/testcases/passwd.bzl +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar") - -def _impl(ctx): - """Core implementation of passwd_file.""" - - f = "%s:x:%s:%s:%s:%s:%s\n" % ( - ctx.attr.username, - ctx.attr.uid, - ctx.attr.gid, - ctx.attr.info, - ctx.attr.home, - ctx.attr.shell - ) - ctx.file_action( - output = ctx.outputs.out, - content = f, - executable=False - ) - build_tar = ctx.executable.build_tar - args = [ - "--output=" + ctx.outputs.tar.path, - "--file=%s=/etc/passwd" % ctx.outputs.out.path - ] - arg_file = ctx.new_file(ctx.attr.name + ".args") - ctx.file_action(arg_file, "\n".join(args)) - - ctx.action( - executable = build_tar, - arguments = ["--flagfile=" + arg_file.path], - inputs = [ctx.outputs.out, arg_file], - outputs = [ctx.outputs.tar], - use_default_shell_env = True - ) - -passwd_file = rule( - attrs = { - "username": attr.string(mandatory = True), - "uid": attr.int(default = 1000), - "gid": attr.int(default = 1000), - "info": attr.string(default = "user"), - "home": attr.string(default = "/home"), - "shell": attr.string(default = "/bin/bash"), - "build_tar": attr.label( - default = Label("@bazel_tools//tools/build_defs/pkg:build_tar"), - cfg = "host", - executable = True, - allow_files = True, - ), - }, - executable = False, - outputs = { - "out": "%{name}.passwd", - "tar": "%{name}.passwd.tar", - }, - implementation = _impl, -) diff --git a/starlark/src/syntax/testcases/path.bzl b/starlark/src/syntax/testcases/path.bzl deleted file mode 100644 index 4c8d1169..00000000 --- a/starlark/src/syntax/testcases/path.bzl +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:providers.bzl", "GoLibrary", "GoPath") -load("@io_bazel_rules_go//go/private:common.bzl", "declare_file") - - -def _tag(ctx, path, outputs): - """this generates a existance tag file for dependancies, and returns the path to the tag file""" - tag = declare_file(ctx, path=path+".tag") - path, _, _ = tag.short_path.rpartition("/") - ctx.actions.write(tag, content="") - outputs.append(tag) - return path - -def _go_path_impl(ctx): - print(""" -EXPERIMENTAL: the go_path rule is still very experimental -Please do not rely on it for production use, but feel free to use it and file issues -""") - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - # First gather all the library rules - golibs = depset() - for dep in ctx.attr.deps: - golibs += dep[GoLibrary].transitive - - # Now scan them for sources - seen_libs = {} - seen_paths = {} - outputs = [] - packages = [] - for golib in golibs: - if golib.importpath in seen_libs: - # We found two different library rules that map to the same import path - # This is legal in bazel, but we can't build a valid go path for it. - # TODO: we might be able to ignore this if the content is identical - print("""Duplicate package -Found {} in - {} - {} -""".format(golib.importpath, golib.name, seen_libs[golib.importpath].name)) - # for now we don't fail if we see duplicate packages - # the most common case is the same source from two different workspaces - continue - seen_libs[golib.importpath] = golib - package_files = [] - prefix = "src/" + golib.importpath + "/" - for src in golib.srcs: - outpath = prefix + src.basename - if outpath in seen_paths: - # If we see the same path twice, it's a fatal error - fail("Duplicate path {}".format(outpath)) - seen_paths[outpath] = True - out = declare_file(ctx, path=outpath) - package_files += [out] - outputs += [out] - if ctx.attr.mode == "copy": - ctx.actions.expand_template(template=src, output=out, substitutions={}) - elif ctx.attr.mode == "link": - ctx.actions.run_shell( - command='ln -s $(readlink "$1") "$2"', - arguments=[src.path, out.path], - mnemonic = "GoLn", - inputs=[src], - outputs=[out], - ) - else: - fail("Invalid go path mode '{}'".format(ctx.attr.mode)) - packages += [struct( - golib = golib, - dir = _tag(ctx, prefix, outputs), - files = package_files, - )] - gopath = _tag(ctx, "", outputs) - return [ - DefaultInfo( - files = depset(outputs), - ), - GoPath( - gopath = gopath, - packages = packages, - srcs = outputs, - ) - ] - -go_path = rule( - _go_path_impl, - attrs = { - "deps": attr.label_list(providers=[GoLibrary]), - "mode": attr.string(default="copy", values=["link", "copy"]), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) diff --git a/starlark/src/syntax/testcases/paths.bzl b/starlark/src/syntax/testcases/paths.bzl deleted file mode 100644 index 925abccc..00000000 --- a/starlark/src/syntax/testcases/paths.bzl +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylib module containing file path manipulation functions. - -NOTE: The functions in this module currently only support paths with Unix-style -path separators (forward slash, "/"); they do not handle Windows-style paths -with backslash separators or drive letters. -""" - - -def _basename(p): - """Returns the basename (i.e., the file portion) of a path. - - Note that if `p` ends with a slash, this function returns an empty string. - This matches the behavior of Python's `os.path.basename`, but differs from - the Unix `basename` command (which would return the path segment preceding - the final slash). - - Args: - p: The path whose basename should be returned. - Returns: - The basename of the path, which includes the extension. - """ - return p.rpartition("/")[-1] - - -def _dirname(p): - """Returns the dirname of a path. - - The dirname is the portion of `p` up to but not including the file portion - (i.e., the basename). Any slashes immediately preceding the basename are not - included, unless omitting them would make the dirname empty. - - Args: - p: The path whose dirname should be returned. - Returns: - The dirname of the path. - """ - prefix, sep, _ = p.rpartition("/") - if not prefix: - return sep - else: - # If there are multiple consecutive slashes, strip them all out as Python's - # os.path.dirname does. - return prefix.rstrip("/") - - -def _is_absolute(path): - """Returns `True` if `path` is an absolute path. - - Args: - path: A path (which is a string). - Returns: - `True` if `path` is an absolute path. - """ - return path.startswith("/") - - -def _join(path, *others): - """Joins one or more path components intelligently. - - This function mimics the behavior of Python's `os.path.join` function on POSIX - platform. It returns the concatenation of `path` and any members of `others`, - inserting directory separators before each component except the first. The - separator is not inserted if the path up until that point is either empty or - already ends in a separator. - - If any component is an absolute path, all previous components are discarded. - - Args: - path: A path segment. - *others: Additional path segments. - Returns: - A string containing the joined paths. - """ - result = path - - for p in others: - if _is_absolute(p): - result = p - elif not result or result.endswith("/"): - result += p - else: - result += "/" + p - - return result - - -def _normalize(path): - """Normalizes a path, eliminating double slashes and other redundant segments. - - This function mimics the behavior of Python's `os.path.normpath` function on - POSIX platforms; specifically: - - - If the entire path is empty, "." is returned. - - All "." segments are removed, unless the path consists solely of a single - "." segment. - - Trailing slashes are removed, unless the path consists solely of slashes. - - ".." segments are removed as long as there are corresponding segments - earlier in the path to remove; otherwise, they are retained as leading ".." - segments. - - Single and double leading slashes are preserved, but three or more leading - slashes are collapsed into a single leading slash. - - Multiple adjacent internal slashes are collapsed into a single slash. - - Args: - path: A path. - Returns: - The normalized path. - """ - if not path: - return "." - - if path.startswith("//") and not path.startswith("///"): - initial_slashes = 2 - elif path.startswith("/"): - initial_slashes = 1 - else: - initial_slashes = 0 - is_relative = (initial_slashes == 0) - - components = path.split("/") - new_components = [] - - for component in components: - if component in ("", "."): - continue - if component == "..": - if new_components and new_components[-1] != "..": - # Only pop the last segment if it isn't another "..". - new_components.pop() - elif is_relative: - # Preserve leading ".." segments for relative paths. - new_components.append(component) - else: - new_components.append(component) - - path = "/".join(new_components) - if not is_relative: - path = ("/" * initial_slashes) + path - - return path or "." - - -def _relativize(path, start): - """Returns the portion of `path` that is relative to `start`. - - Because we do not have access to the underlying file system, this - implementation differs slightly from Python's `os.path.relpath` in that it - will fail if `path` is not beneath `start` (rather than use parent segments to - walk up to the common file system root). - - Relativizing paths that start with parent directory references is not allowed. - - Args: - path: The path to relativize. - start: The ancestor path against which to relativize. - Returns: - The portion of `path` that is relative to `start`. - """ - segments = _normalize(path).split("/") - start_segments = _normalize(start).split("/") - if start_segments == ["."]: - start_segments = [] - start_length = len(start_segments) - - if (path.startswith("..") or start.startswith("..")): - fail("Cannot relativize paths above the current (unknown) directory") - - if (path.startswith("/") != start.startswith("/") or - len(segments) < start_length): - fail("Path '%s' is not beneath '%s'" % (path, start)) - - for ancestor_segment, segment in zip(start_segments, segments): - if ancestor_segment != segment: - fail("Path '%s' is not beneath '%s'" % (path, start)) - - length = len(segments) - start_length - result_segments = segments[-length:] - return "/".join(result_segments) - - -def _replace_extension(p, new_extension): - """Replaces the extension of the file at the end of a path. - - If the path has no extension, the new extension is added to it. - - Args: - p: The path whose extension should be replaced. - new_extension: The new extension for the file. The new extension should - begin with a dot if you want the new filename to have one. - Returns: - The path with the extension replaced (or added, if it did not have one). - """ - return _split_extension(p)[0] + new_extension - - -def _split_extension(p): - """Splits the path `p` into a tuple containing the root and extension. - - Leading periods on the basename are ignored, so - `path.split_extension(".bashrc")` returns `(".bashrc", "")`. - - Args: - p: The path whose root and extension should be split. - Returns: - A tuple `(root, ext)` such that the root is the path without the file - extension, and `ext` is the file extension (which, if non-empty, contains - the leading dot). The returned tuple always satisfies the relationship - `root + ext == p`. - """ - b = _basename(p) - last_dot_in_basename = b.rfind(".") - - # If there is no dot or the only dot in the basename is at the front, then - # there is no extension. - if last_dot_in_basename <= 0: - return (p, "") - - dot_distance_from_end = len(b) - last_dot_in_basename - return (p[:-dot_distance_from_end], p[-dot_distance_from_end:]) - - -paths = struct( - basename=_basename, - dirname=_dirname, - is_absolute=_is_absolute, - join=_join, - normalize=_normalize, - relativize=_relativize, - replace_extension=_replace_extension, - split_extension=_split_extension, -) diff --git a/starlark/src/syntax/testcases/pkg.bzl b/starlark/src/syntax/testcases/pkg.bzl deleted file mode 100644 index edcd63ad..00000000 --- a/starlark/src/syntax/testcases/pkg.bzl +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for manipulation of various packaging.""" -load(":path.bzl", "dest_path", "compute_data_path") - -# Filetype to restrict inputs -tar_filetype = [".tar", ".tar.gz", ".tgz", ".tar.xz", ".tar.bz2"] -deb_filetype = [".deb", ".udeb"] - -def _pkg_tar_impl(ctx): - """Implementation of the pkg_tar rule.""" - # Compute the relative path - data_path = compute_data_path(ctx.outputs.out, ctx.attr.strip_prefix) - - build_tar = ctx.executable.build_tar - args = [ - "--output=" + ctx.outputs.out.path, - "--directory=" + ctx.attr.package_dir, - "--mode=" + ctx.attr.mode, - "--owner=" + ctx.attr.owner, - "--owner_name=" + ctx.attr.ownername, - ] - file_inputs = ctx.files.srcs[:] - args += ["--file=%s=%s" % (f.path, dest_path(f, data_path)) - for f in ctx.files.srcs] - for target, f_dest_path in ctx.attr.files.items(): - target_files = target.files.to_list() - if len(target_files) != 1: - fail("Inputs to pkg_tar.files_map must describe exactly one file.") - file_inputs += [target_files[0]] - args += ["--file=%s=%s" % (target_files[0].path, f_dest_path)] - if ctx.attr.modes: - args += ["--modes=%s=%s" % (key, ctx.attr.modes[key]) for key in ctx.attr.modes] - if ctx.attr.owners: - args += ["--owners=%s=%s" % (key, ctx.attr.owners[key]) for key in ctx.attr.owners] - if ctx.attr.ownernames: - args += ["--owner_names=%s=%s" % (key, ctx.attr.ownernames[key]) - for key in ctx.attr.ownernames] - if ctx.attr.extension: - dotPos = ctx.attr.extension.find('.') - if dotPos > 0: - dotPos += 1 - args += ["--compression=%s" % ctx.attr.extension[dotPos:]] - args += ["--tar=" + f.path for f in ctx.files.deps] - args += ["--link=%s:%s" % (k, ctx.attr.symlinks[k]) - for k in ctx.attr.symlinks] - arg_file = ctx.new_file(ctx.label.name + ".args") - ctx.file_action(arg_file, "\n".join(args)) - - ctx.action( - command = "%s --flagfile=%s" % (build_tar.path, arg_file.path), - inputs = file_inputs + ctx.files.deps + [arg_file, build_tar], - outputs = [ctx.outputs.out], - mnemonic="PackageTar", - use_default_shell_env = True, - ) - - -def _pkg_deb_impl(ctx): - """The implementation for the pkg_deb rule.""" - files = [ctx.file.data] - args = [ - "--output=" + ctx.outputs.deb.path, - "--changes=" + ctx.outputs.changes.path, - "--data=" + ctx.file.data.path, - "--package=" + ctx.attr.package, - "--architecture=" + ctx.attr.architecture, - "--maintainer=" + ctx.attr.maintainer, - ] - if ctx.attr.preinst: - args += ["--preinst=@" + ctx.file.preinst.path] - files += [ctx.file.preinst] - if ctx.attr.postinst: - args += ["--postinst=@" + ctx.file.postinst.path] - files += [ctx.file.postinst] - if ctx.attr.prerm: - args += ["--prerm=@" + ctx.file.prerm.path] - files += [ctx.file.prerm] - if ctx.attr.postrm: - args += ["--postrm=@" + ctx.file.postrm.path] - files += [ctx.file.postrm] - - # Conffiles can be specified by a file or a string list - if ctx.attr.conffiles_file: - if ctx.attr.conffiles: - fail("Both conffiles and conffiles_file attributes were specified") - args += ["--conffile=@" + ctx.file.conffiles_file.path] - files += [ctx.file.conffiles_file] - elif ctx.attr.conffiles: - args += ["--conffile=%s" % cf for cf in ctx.attr.conffiles] - - # Version and description can be specified by a file or inlined - if ctx.attr.version_file: - if ctx.attr.version: - fail("Both version and version_file attributes were specified") - args += ["--version=@" + ctx.file.version_file.path] - files += [ctx.file.version_file] - elif ctx.attr.version: - args += ["--version=" + ctx.attr.version] - else: - fail("Neither version_file nor version attribute was specified") - - if ctx.attr.description_file: - if ctx.attr.description: - fail("Both description and description_file attributes were specified") - args += ["--description=@" + ctx.file.description_file.path] - files += [ctx.file.description_file] - elif ctx.attr.description: - args += ["--description=" + ctx.attr.description] - else: - fail("Neither description_file nor description attribute was specified") - - # Built using can also be specified by a file or inlined (but is not mandatory) - if ctx.attr.built_using_file: - if ctx.attr.built_using: - fail("Both build_using and built_using_file attributes were specified") - args += ["--built_using=@" + ctx.file.built_using_file.path] - files += [ctx.file.built_using_file] - elif ctx.attr.built_using: - args += ["--built_using=" + ctx.attr.built_using] - - if ctx.attr.priority: - args += ["--priority=" + ctx.attr.priority] - if ctx.attr.section: - args += ["--section=" + ctx.attr.section] - if ctx.attr.homepage: - args += ["--homepage=" + ctx.attr.homepage] - - args += ["--distribution=" + ctx.attr.distribution] - args += ["--urgency=" + ctx.attr.urgency] - args += ["--depends=" + d for d in ctx.attr.depends] - args += ["--suggests=" + d for d in ctx.attr.suggests] - args += ["--enhances=" + d for d in ctx.attr.enhances] - args += ["--conflicts=" + d for d in ctx.attr.conflicts] - args += ["--pre_depends=" + d for d in ctx.attr.predepends] - args += ["--recommends=" + d for d in ctx.attr.recommends] - - ctx.action( - executable = ctx.executable.make_deb, - arguments = args, - inputs = files, - outputs = [ctx.outputs.deb, ctx.outputs.changes], - mnemonic="MakeDeb" - ) - ctx.action( - command = "ln -s %s %s" % (ctx.outputs.deb.basename, ctx.outputs.out.path), - inputs = [ctx.outputs.deb], - outputs = [ctx.outputs.out]) - -# A rule for creating a tar file, see README.md -_real_pkg_tar = rule( - implementation = _pkg_tar_impl, - attrs = { - "strip_prefix": attr.string(), - "package_dir": attr.string(default="/"), - "deps": attr.label_list(allow_files=tar_filetype), - "srcs": attr.label_list(allow_files=True), - "files": attr.label_keyed_string_dict(allow_files=True), - "mode": attr.string(default="0555"), - "modes": attr.string_dict(), - "owner": attr.string(default="0.0"), - "ownername": attr.string(default="."), - "owners": attr.string_dict(), - "ownernames": attr.string_dict(), - "extension": attr.string(default="tar"), - "symlinks": attr.string_dict(), - # Implicit dependencies. - "build_tar": attr.label( - default=Label("//tools/build_defs/pkg:build_tar"), - cfg="host", - executable=True, - allow_files=True) - }, - outputs = { - "out": "%{name}.%{extension}", - }, - executable = False) - -def pkg_tar(**kwargs): - # Compatibility with older versions of pkg_tar that define files as - # a flat list of labels. - if "srcs" not in kwargs: - if "files" in kwargs: - if not hasattr(kwargs["files"], "items"): - label = "%s//%s:%s" % (REPOSITORY_NAME, PACKAGE_NAME, kwargs["name"]) - print("%s: you provided a non dictionary to the pkg_tar `files` attribute. " % (label,) + - "This attribute was renamed to `srcs`. " + - "Consider renaming it in your BUILD file.") - kwargs["srcs"] = kwargs.pop("files") - _real_pkg_tar(**kwargs) - -# A rule for creating a deb file, see README.md -pkg_deb = rule( - implementation = _pkg_deb_impl, - attrs = { - "data": attr.label(mandatory=True, allow_files=tar_filetype, single_file=True), - "package": attr.string(mandatory=True), - "architecture": attr.string(default="all"), - "distribution": attr.string(default="unstable"), - "urgency": attr.string(default="medium"), - "maintainer": attr.string(mandatory=True), - "preinst": attr.label(allow_files=True, single_file=True), - "postinst": attr.label(allow_files=True, single_file=True), - "prerm": attr.label(allow_files=True, single_file=True), - "postrm": attr.label(allow_files=True, single_file=True), - "conffiles_file": attr.label(allow_files=True, single_file=True), - "conffiles": attr.string_list(default=[]), - "version_file": attr.label(allow_files=True, single_file=True), - "version": attr.string(), - "description_file": attr.label(allow_files=True, single_file=True), - "description": attr.string(), - "built_using_file": attr.label(allow_files=True, single_file=True), - "built_using": attr.string(), - "priority": attr.string(), - "section": attr.string(), - "homepage": attr.string(), - "depends": attr.string_list(default=[]), - "suggests": attr.string_list(default=[]), - "enhances": attr.string_list(default=[]), - "conflicts": attr.string_list(default=[]), - "predepends": attr.string_list(default=[]), - "recommends": attr.string_list(default=[]), - # Implicit dependencies. - "make_deb": attr.label( - default=Label("//tools/build_defs/pkg:make_deb"), - cfg="host", - executable=True, - allow_files=True) - }, - outputs = { - "out": "%{name}.deb", - "deb": "%{package}_%{version}_%{architecture}.deb", - "changes": "%{package}_%{version}_%{architecture}.changes" - }, - executable = False) diff --git a/starlark/src/syntax/testcases/pkg_war.bzl b/starlark/src/syntax/testcases/pkg_war.bzl deleted file mode 100644 index edaaab04..00000000 --- a/starlark/src/syntax/testcases/pkg_war.bzl +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# War packaging. - -jar_filetype = FileType([".jar"]) - -LIBS = [ - "//gerrit-war:init", - "//gerrit-war:log4j-config", - "//gerrit-war:version", - "//lib:postgresql", - "//lib/bouncycastle:bcpkix", - "//lib/bouncycastle:bcprov", - "//lib/bouncycastle:bcpg", - "//lib/log:impl_log4j", -] - -PGMLIBS = [ - "//gerrit-pgm:pgm", -] - -def _add_context(in_file, output): - input_path = in_file.path - return [ - 'unzip -qd %s %s' % (output, input_path) - ] - -def _add_file(in_file, output): - output_path = output - input_path = in_file.path - short_path = in_file.short_path - n = in_file.basename - - if short_path.startswith('gerrit-'): - n = short_path.split('/')[0] + '-' + n - - output_path += n - return [ - 'test -L %s || ln -s $(pwd)/%s %s' % (output_path, input_path, output_path) - ] - -def _make_war(input_dir, output): - return '(%s)' % ' && '.join([ - 'root=$(pwd)', - 'cd %s' % input_dir, - "find . -exec touch -t 198001010000 '{}' ';' 2> /dev/null", - 'zip -9qr ${root}/%s .' % (output.path), - ]) - -def _war_impl(ctx): - war = ctx.outputs.war - build_output = war.path + '.build_output' - inputs = [] - - # Create war layout - cmd = [ - 'set -e;rm -rf ' + build_output, - 'mkdir -p ' + build_output, - 'mkdir -p %s/WEB-INF/lib' % build_output, - 'mkdir -p %s/WEB-INF/pgm-lib' % build_output, - ] - - # Add lib - transitive_lib_deps = set() - for l in ctx.attr.libs: - if hasattr(l, 'java'): - transitive_lib_deps += l.java.transitive_runtime_deps - elif hasattr(l, 'files'): - transitive_lib_deps += l.files - - for dep in transitive_lib_deps: - cmd += _add_file(dep, build_output + '/WEB-INF/lib/') - inputs.append(dep) - - # Add pgm lib - transitive_pgmlib_deps = set() - for l in ctx.attr.pgmlibs: - transitive_pgmlib_deps += l.java.transitive_runtime_deps - - for dep in transitive_pgmlib_deps: - if dep not in inputs: - cmd += _add_file(dep, build_output + '/WEB-INF/pgm-lib/') - inputs.append(dep) - - # Add context - transitive_context_deps = set() - if ctx.attr.context: - for jar in ctx.attr.context: - if hasattr(jar, 'java'): - transitive_context_deps += jar.java.transitive_runtime_deps - elif hasattr(jar, 'files'): - transitive_context_deps += jar.files - for dep in transitive_context_deps: - cmd += _add_context(dep, build_output) - inputs.append(dep) - - # Add zip war - cmd.append(_make_war(build_output, war)) - - ctx.action( - inputs = inputs, - outputs = [war], - mnemonic = 'WAR', - command = '\n'.join(cmd), - use_default_shell_env = True, - ) - -# context: go to the root directory -# libs: go to the WEB-INF/lib directory -# pgmlibs: go to the WEB-INF/pgm-lib directory -_pkg_war = rule( - attrs = { - "context": attr.label_list(allow_files = True), - "libs": attr.label_list(allow_files = jar_filetype), - "pgmlibs": attr.label_list(allow_files = False), - }, - outputs = {"war": "%{name}.war"}, - implementation = _war_impl, -) - -def pkg_war(name, ui = 'ui_optdbg', context = [], doc = False, **kwargs): - doc_ctx = [] - doc_lib = [] - ui_deps = [] - if ui == 'polygerrit' or ui == 'ui_optdbg' or ui == 'ui_optdbg_r': - ui_deps.append('//polygerrit-ui/app:polygerrit_ui') - if ui and ui != 'polygerrit': - ui_deps.append('//gerrit-gwtui:%s' % ui) - if doc: - doc_ctx.append('//Documentation:html') - doc_lib.append('//Documentation:index') - - _pkg_war( - name = name, - libs = LIBS + doc_lib, - pgmlibs = PGMLIBS, - context = doc_ctx + context + ui_deps + [ - '//gerrit-main:main_bin_deploy.jar', - '//gerrit-war:webapp_assets', - ], - **kwargs - ) diff --git a/starlark/src/syntax/testcases/plugin.bzl b/starlark/src/syntax/testcases/plugin.bzl deleted file mode 100644 index 59e7335e..00000000 --- a/starlark/src/syntax/testcases/plugin.bzl +++ /dev/null @@ -1,100 +0,0 @@ -load("//tools/bzl:genrule2.bzl", "genrule2") -load( - "//tools/bzl:gwt.bzl", - "GWT_PLUGIN_DEPS", - "GWT_PLUGIN_DEPS_NEVERLINK", - "GWT_TRANSITIVE_DEPS", - "GWT_COMPILER_ARGS", - "PLUGIN_DEPS_NEVERLINK", - "GWT_JVM_ARGS", - "gwt_binary", -) - -PLUGIN_DEPS = ["//gerrit-plugin-api:lib"] - -PLUGIN_TEST_DEPS = [ - "//gerrit-acceptance-framework:lib", - "//lib/bouncycastle:bcpg", - "//lib/bouncycastle:bcpkix", - "//lib/bouncycastle:bcprov", -] - -def gerrit_plugin( - name, - deps = [], - provided_deps = [], - srcs = [], - gwt_module = [], - resources = [], - manifest_entries = [], - target_suffix = "", - **kwargs): - native.java_library( - name = name + '__plugin', - srcs = srcs, - resources = resources, - deps = provided_deps + deps + GWT_PLUGIN_DEPS_NEVERLINK + PLUGIN_DEPS_NEVERLINK, - visibility = ['//visibility:public'], - **kwargs - ) - - static_jars = [] - if gwt_module: - static_jars = [':%s-static' % name] - - native.java_binary( - name = '%s__non_stamped' % name, - deploy_manifest_lines = manifest_entries + [ - "Gerrit-ApiType: plugin", - "Implementation-Vendor: Gerrit Code Review", - ], - main_class = 'Dummy', - runtime_deps = [ - ':%s__plugin' % name, - ] + static_jars, - visibility = ['//visibility:public'], - **kwargs - ) - - if gwt_module: - native.java_library( - name = name + '__gwt_module', - resources = list(set(srcs + resources)), - runtime_deps = deps + GWT_PLUGIN_DEPS, - visibility = ['//visibility:public'], - **kwargs - ) - genrule2( - name = '%s-static' % name, - cmd = ' && '.join([ - 'mkdir -p $$TMP/static', - 'unzip -qd $$TMP/static $(location %s__gwt_application)' % name, - 'cd $$TMP', - 'zip -qr $$ROOT/$@ .']), - tools = [':%s__gwt_application' % name], - outs = ['%s-static.jar' % name], - ) - gwt_binary( - name = name + '__gwt_application', - module = [gwt_module], - deps = GWT_PLUGIN_DEPS + GWT_TRANSITIVE_DEPS + ['//lib/gwt:dev'], - module_deps = [':%s__gwt_module' % name], - compiler_args = GWT_COMPILER_ARGS, - jvm_args = GWT_JVM_ARGS, - ) - - # TODO(davido): Remove manual merge of manifest file when this feature - # request is implemented: https://github.com/bazelbuild/bazel/issues/2009 - genrule2( - name = name + target_suffix, - stamp = 1, - srcs = ['%s__non_stamped_deploy.jar' % name], - cmd = " && ".join([ - "GEN_VERSION=$$(cat bazel-out/stable-status.txt | grep -w STABLE_BUILD_%s_LABEL | cut -d ' ' -f 2)" % name.upper(), - "cd $$TMP", - "unzip -q $$ROOT/$<", - "echo \"Implementation-Version: $$GEN_VERSION\n$$(cat META-INF/MANIFEST.MF)\" > META-INF/MANIFEST.MF", - "zip -qr $$ROOT/$@ ."]), - outs = ['%s%s.jar' % (name, target_suffix)], - visibility = ['//visibility:public'], - ) diff --git a/starlark/src/syntax/testcases/plugins.bzl b/starlark/src/syntax/testcases/plugins.bzl deleted file mode 100644 index 1127e549..00000000 --- a/starlark/src/syntax/testcases/plugins.bzl +++ /dev/null @@ -1,445 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Plugins for Jenkins -JENKINS_PLUGINS = { - "conditional-buildstep": [ - "1.3.5", - "bb505823eef199eef3b67bff5d7826d075329ce93c64784ae72d22fff88110d9", - ], - "javadoc": [ - "1.4", - "2236e563d057a98b1bb2e56975756a008986289aa01a61c4655a31b62c760a22", - ], - "scm-api": [ - "2.2.0", - "31f60f8ab713237a03e6357c97fe0e6f0bb493d274573751fc5483b489392670", - ], - "ssh-credentials": [ - "1.13", - "d4f979223cc543dfd9bf72a1177471bb08748d2c79e1115d7bb12198bbbf2010", - ], - "parameterized-trigger": [ - "2.35", - "5193a8e8bfc20fa2147210913874fa58c81ad0bc688df1ecf3fefe65303e727c", - ], - "copyartifact": [ - "1.38.1", - "0171f402f064615a14b0adca348c2d49ee338cd49d54539793105a191d941501", - ], - "mailer": [ - "1.20", - "e78b7bb32957d11dcf58e57f2d7b60903bf31ace7858d19febb3b11a374a1187", - ], - "run-condition": [ - "1.0", - "4e55ebf4bde1202784404d856f98f7de85470ed145cde06feb45f641891780fb", - ], - "script-security": [ - "1.31", - "fbd2f2fc8f7cb7fda738eef74eb9c732e79bb75a72561b5f8031f116a90ff2c7", - ], - "matrix-project": [ - "1.10", - "dd09d0c70ed566ac61accbf5afd4b9db3cd21c8bb4372d0d302df9052c6258dd", - ], - "junit": [ - "1.20", - "be0c9079c1ed7e3bc9d7157e30d0e8fdd733a83cba11ab184ddb48cea1a579c3", - ], - "credentials": [ - "2.1.13", - "e793895fb727b59d83a9e395984243cfee9abdf4d56238fb6f2594948b0487e5", - ], - "git-parameter": [ - "0.8.0", - "c7251742e90110a0c6345a47373a20f8d97e0562bf543f04e6d2e7c9313e6ef1", - ], - "github-api": [ - "1.86", - "b759e7365ca217623c1bbf068fd9401a491637afb55523a19e4f30122066ac45", - ], - "git-client": [ - "2.5.0", - "b26f4098bb8969cdf7125b79a937c2bec8df4065785c2eaeccc66d1bc3287b0f", - ], - "github": [ - "1.27.0", - "11038b94997b698b6b7898dc68fcc7e28202c308ba6af927fc22c7bba8cbfef0", - ], - "maven-plugin": [ - "2.15.1", - "61aeb7d787e312702b438c75ffd752394bbcaaf2b6d212a4fc169b3f28651c6e", - ], - "git": [ - "3.5.1", - "1f0a26022b2b61fe25679c49a6fabeabb0cfc9452d0dc40def73709d95cc3da3", - ], - "token-macro": [ - "2.1", - "a7e4a70beefb64aa715ba8679cd0a2d41eec0446e65d6d89311a92b63edfed2d", - ], - "nodelabelparameter": [ - "1.7.2", - "0f556ae48573db6bae28b24eae561121ed9931e8b1bc31ac4e586bc8d3238edf", - ], - "jquery": [ - "1.11.2-0", - "acf4940bd5a0d918d781b51a6f42f7a0cb9381ede8235582c629a5d347495029", - ], - "email-ext": [ - "2.57.2", - "b576b6c2cd69a159699341fe07f32ac824a02c309ec175902b7eba843d4c31e4", - ], - "google-login": [ - "1.3", - "4b1482347ddd0a2a54c1fdedfe46a519cc2ada60dfc774d2435f554287c52d25", - ], - "fail-the-build-plugin": [ - "1.0", - "c97db02dc6fef269780b77d2001a9bfb49bcdc9ac2ee242cd10445709bb7d09e", - ], - "scoring-load-balancer": [ - "1.0.1", - "a7229d2945e347afb472d3c45e83ea3c4409c8710c4168912601eb46684dd3a3", - ], - "greenballs": [ - "1.15", - "6c3722fb9ce2a446f0266e1911d87ef50898a10f38890bb6963e7e1e1c4296fb", - ], - "plain-credentials": [ - "1.4", - "4f1834f99ffd7f2a7db1972fe46ee42d06af819b27297193ac89eb33a14292d8", - ], - "ssh-agent": [ - "1.15", - "d1c955b883a3e82522a1b97ff044812193bc325c5eb095e23ba8673c2b16e181", - ], - "workflow-step-api": [ - "2.10", - "ebc6d6555805848bdfd20636fec9a57384f74ece082608dae70e303b4f45e762", - ], - "icon-shim": [ - "2.0.3", - "8ab2f1617b68561a7f0254fb27578840aa9b7e158d0bd8c51628dbc64e8ab0ca", - ], - "ghprb": [ - "1.36.2", - "ecf7cd881136e60e48e220172bd0ddedb77c50129bc9dac271b32c715be38541", - ], - "embeddable-build-status": [ - "1.9", - "9d950ce4bfdcb67e1b1198ea2b54e18de95ec3214b1cf1135141b5b54a62edb2", - ], - "build-timeout": [ - "1.18", - "6ea3eaa31d13314af1bcd7576fb07ee4a007c84ae56639eabc03e503de485dba", - ], - "build-monitor-plugin": [ - "1.11+build.201701152243", - "4e8d5e0a00410f06e07a05cf1011563df7cd169e1961d3f334fc159a55b6a29b", - ], - "ace-editor": [ - "1.1", - "abc97028893c8a71581a5f559ea48e8e1f1a65164faee96dabfed9e95e9abad2", - ], - "display-url-api": [ - "2.0", - "4e438996e2b262075e5c4ec0d1dd93a767456205bca735e80db633ac392fd21f", - ], - "jquery-detached": [ - "1.2.1", - "a05273cd20c11557ffcb7dcb75150f21d35dc8be28355548b831c2960d7f11c0", - ], - "sauce-ondemand": [ - "1.164", - "48649855b80bc30dad03ced7a4ba76ef3aa5daa939f0521720e5aec5b5d1d949", - ], - "structs": [ - "1.9", - "f8f8afdc02ecc4464d2efd82426c2983ad73d1322a7a64575734a7a8f5f572b9", - ], - "workflow-api": [ - "2.18", - "c272bb4030edd79febac8f1b89e18bbfc0c2c3b5ef23ddb911bb070c92cabcc8", - ], - "workflow-basic-steps": [ - "2.4", - "c57d5308a01df1990a7f491ddb6dfc7aab5b7b713ad9cfc458cbef76d27fdbab", - ], - "workflow-cps": [ - "2.39", - "2fe72eb2b644ffd269a449b44d7b7de4a07ea099cef970c58a6be0056336a85b", - ], - "workflow-job": [ - "2.10", - "fd5d5a68270fae99a4514eac6cc2b58ebc37673f5e535f778286a4d36d94405a", - ], - "workflow-scm-step": [ - "2.4", - "69d58ceb58111663e82928c8db367ba8e857c29aaee9c4264aeb4100fc459e22", - ], - "workflow-support": [ - "2.14", - "49bf65953ad684e5d4f0f0750dce6f5efed9e67f5d0de26da405945bbd46f01d", - ], - "ldap": [ - "1.15", - "2ace1e160dab885680d0eee1b543d456fa97328c4befc911c2c531bf7697dbe6", - ], - "matrix-auth": [ - "1.5", - "ac31e6736f8e6aed26064d00c6aad7f4b39bfc49d80d32be757b035d4224863b", - ], - "antisamy-markup-formatter": [ - "1.5", - "8e8e3e917d76b0432ab3c32a000e824f4ef32011ba5e77dd8b6b476310df8f1a", - ], - "pam-auth": [ - "1.3", - "1b1d32dca618f43f6c06786f3fde6cc0e0fa0c805cbb735fafd464cf2cfcf1e3", - ], - "ssh-slaves": [ - "1.17", - "5120c316190a160c85e6c77f97529f93a5a602658eeb18a4d527cf89a9d838df", - ], - "subversion": [ - "2.9", - "509d42da6a1011bd83a0dd0562894ec08ff6e9a11220574be3ca662579801e9a", - ], - "windows-slaves": [ - "1.3.1", - "4364f88286745a48962b86b53df3739a6978886e72ec83289a3cfc750f1adcc6", - ], - "translation": [ - "1.15", - "11a0dd4aaa66d506d1bfc32d367e9c1f28b957296b5729ae9bf0947f5f1301ce", - ], - "bouncycastle-api": [ - "2.16.1", - "87bbc7e1f385524d6c5f5f49365c1f7e1cce7a1aee908ddd3ac0884d6b9055fc", - ], - "mapdb-api": [ - "1.0.9.0", - "072c11a34cf21f87f9c44bf01b430c5ea77e8096d077e8533de654ef00f3f871", - ], - "pipeline-stage-step": [ - "2.2", - "08f077bc8cd98fbf6cb7383d8bcfe3db69796f89cedcfb6f13c8c9375b2a11c8", - ], - "pipeline-stage-view": [ - "2.6", - "e4b82a523fe8151a117c150073c8a0cad65277651ed6565150b6b058111d4c98", - ], - "handlebars": [ - "1.1.1", - "bc5cc7b3eca17ba6cec0a8def94f3aa78ad7a19387a19aa3a56f857a18966afa", - ], - "pipeline-rest-api": [ - "2.6", - "4ecc8c111f86902a86860943390e1b98463437a33116df4feb888226daf42aaa", - ], - "momentjs": [ - "1.1.1", - "ca3c2d264cff55f71e900dc7de1f13c0bfbffdb9b3419b854dce175bcb8a4848", - ], - "workflow-durable-task-step": [ - "2.11", - "8783a0717d4ab67aaecde4b6c12c77a90784a2f604b75e1ac98506b407c3c5c8", - ], - "durable-task": [ - "1.13", - "e242f459b6662b84f3ab99f51cdbdb048d7a2bb1f1a27ffafb5f0c4bd6d5735c", - ], - "pipeline-graph-analysis": [ - "1.3", - "bb3235a0b5a62cd7be3983bfd40a202c582757c9633d23858aaa38af88ee61ea", - ], - "pipeline-input-step": [ - "2.8", - "8e9fa1654ec0c0ec726c894b079828737b8ce6d9ea3d05cd4378c37acc3235db", - ], - "ansicolor": [ - "0.5.0", - "4fd5b38959812712b62576ce98d0302f799bd950f5a6f777a8c0d862df336526", - ], - "workflow-cps-global-lib": [ - "2.8", - "0f09eb5ea402141769d8d923519101d51baaa0e187ecdd3efe275483e4b0ca79", - ], - "cloudbees-folder": [ - "6.0.4", - "4372996561c5118cfdf2ca801d7436ca6f4131da99b47dee3c7d9cc099da8c32", - ], - "git-server": [ - "1.7", - "1a5dc733495681a6d1a6adce07e614e50f4e5d22580e6fafbd5ca260aa4367fc", - ], - "pipeline-build-step": [ - "2.5.1", - "a8ce93220bcd66b982757c2698504e9be2a0ec5445a3d27cc235d82de643f5c1", - ], - "credentials-binding": [ - "1.13", - "92284982f52e785cf802a1a83a3cd59c61f4d76c775f653a27ba2c21204fa4a3", - ], - "docker-workflow": [ - "1.10", - "72669ece071e808edbd21d18f1021c8830ba7591d7b18e63af5b67148d35bc02", - ], - "pubsub-light": [ - "1.8", - "08de8779e50f731cb1b62be27ebe30d93a358ec54e22852a1c639419f7e16dd3", - ], - "workflow-multibranch": [ - "2.14", - "823986f18fc02508d9edae9c9876d0fa08e0b4f4b785707e224eab01bda9f018", - ], - "blueocean-web": [ - "1.1.6", - "d5e7db75a84fe0ed7e77494c806a64c72bb0bd127a4968505d9f19fc2e4cdfa9", - ], - "blueocean-autofavorite": [ - "1.0.0", - "4ecaac0bbe0214469d16fe9a6953efb4c54d9cfcab6d96007890554c6eb04423", - ], - "blueocean-dashboard": [ - "1.1.6", - "8bd926d7ab1643c1bb0e7ed2af344c6b53199afc66860e1fe5badda660f76c56", - ], - "branch-api": [ - "2.0.9", - "f54cf77dc09d1d92ef2ec5ad6d04cb79fce7ac1d251dab7cb085c2ef398f2863", - ], - "blueocean-commons": [ - "1.1.6", - "c1768341bcd83c2e81d2438d7e350485cfa1f9f0fc84922a9efdfec26622a1a9", - ], - "pipeline-model-api": [ - "1.1.4", - "6b3ac82391981a5a6068819508bbd127e7acc0b9f68ef75c4641d488b1df57df", - ], - "pipeline-model-definition": [ - "1.1.4", - "ce98022ed5580b7fbd35adc57ee10d36b0ab79a43aa0f635d8907e008863ee48", - ], - "blueocean": [ - "1.1.6", - "f46548a9af58f47b3ddace3e56d6841d616a13c6fb4adcb3b041818f6e008cd7", - ], - "variant": [ - "1.1", - "971893fb05da213631b8ea977a32a5d4953f1cb7ab6fbdec9020d503550275ff", - ], - "blueocean-rest": [ - "1.1.6", - "91990d0e971346d418f2b09c0de013f83698611ced9a108828c96306e1cf1092", - ], - "favorite": [ - "2.3.0", - "cefc897311ce31aa0c02b290db8b353d81683272a3f3791aa84fb9572865ab82", - ], - "blueocean-display-url": [ - "2.0", - "ce46e5d2a0476cff482f7033ba2e8731fd5c4de010ba8fb7572f68ed22bbfb13", - ], - "blueocean-personalization": [ - "1.1.6", - "91b910c346c267bc17438b0423d240393df96cab5edfca7089ddfd47d191a8c1", - ], - "blueocean-pipeline-api-impl": [ - "1.1.6", - "dae16da9cf6d03ee9f006c2e52d49917f2895f6eb0a3cd55febbba135a8ef347", - ], - "blueocean-jwt": [ - "1.1.6", - "4c42a529efc3d759396eb3ed9658da699e4d62d808726fb904cf8e47738ea960", - ], - "blueocean-events": [ - "1.1.6", - "919da868b56a7838241cdf1f2cee676272f6ec2a3495ad02af28eed9e280c061", - ], - "github-branch-source": [ - "2.2.2", - "6e23c6c2b30c3e38ed1f83f8afe090ff0921f9b3c64f43f9549d2f76bd198288", - ], - "blueocean-i18n": [ - "1.1.6", - "50f4e02930676f2298bcf2e81e1f1dbdcec6578f326c4385afc1e89e6275a2ce", - ], - "blueocean-config": [ - "1.1.6", - "2c2dec55146fafdd48e1c0ea41d085776337dc045752fdc6d3c2db59ab26ec89", - ], - "blueocean-git-pipeline": [ - "1.1.6", - "9d984e502ce0f914d942c091163f6b3251b68a41f0a385af6c4d0756a5bbd1d0", - ], - "pipeline-stage-tags-metadata": [ - "1.1.4", - "5420089bf9984412c47824ea98d69400c5f9bf7794651b200d7f82bec97e2b55", - ], - "blueocean-pipeline-editor": [ - "0.2.0", - "cd3d0a7aeee37aa8df6135dc575a4d0458dc63f206f61034f57a79a58be83c64", - ], - "sse-gateway": [ - "1.15", - "56b19b7a48f646dc8269bae4aca41ec11f71756d618d302b1f8b9d688441850f", - ], - "blueocean-github-pipeline": [ - "1.1.6", - "21dbac4b336b2b022e3abaf0afb600c7e5576403afd3b53720e95c9ed2b4fe57", - ], - "blueocean-pipeline-scm-api": [ - "1.1.6", - "7c2be0081b02496f798f46f261a3081003e55f3df72bbedd865ef55191c0a5d6", - ], - "jackson2-api": [ - "2.7.3", - "6dfa1ba3db32ae47630b239399c3fc8d9102ff0b523a5c33dd32bc88d9874837", - ], - "pipeline-model-extensions": [ - "1.1.4", - "ca6edd0013553de18031c7c5daf20cee5723ca8d4719121223707742d1c2c713", - ], - "metrics": [ - "3.1.2.9", - "0828fe8dbd36d7f614ddc89cd2940f4004ee49cd9dca86c48cc1fd2f4495a542", - ], - "pipeline-model-declarative-agent": [ - "1.1.1", - "d5540c3973bf5f568910f93b3a002c413148b9d3dd4211ce5faf4a2a1214d4ef", - ], - "blueocean-rest-impl": [ - "1.1.6", - "192d525cf0abccee383278a5427d5e95251638d6075beea8259974458c998606", - ], - "docker-commons": [ - "1.8", - "468211603d88a6ac07004abf606d79a87f92432e2d6b90251f442183c8053ea3", - ], - "authentication-tokens": [ - "1.3", - "f05736bc7da0df3bba5ceb4d893089706812ced3134adb2edc9ae341467f1ae3", - ], - "test-results-analyzer": [ - "0.3.4", - "b3aa8c11d59a5c1ac007f86a054a259766bc64ee0ad91be0c2dd1981bce3c6f8", - ], - "htmlpublisher": [ - "1.13", - "c5c3d99125110c0d8a63472f6b66b4a2481b78caadbb17632ef86a6f3a19ec4c", - ], -} diff --git a/starlark/src/syntax/testcases/popular_repos.bzl b/starlark/src/syntax/testcases/popular_repos.bzl deleted file mode 100644 index ac4d3eda..00000000 --- a/starlark/src/syntax/testcases/popular_repos.bzl +++ /dev/null @@ -1,71 +0,0 @@ - -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -############################## -# Generated file, do not edit! -############################## - -load("@io_bazel_rules_go//go/private:go_repository.bzl", "go_repository") - -def _maybe(repo_rule, name, **kwargs): - if name not in native.existing_rules(): - repo_rule(name=name, **kwargs) - -def popular_repos(): - _maybe( - go_repository, - name="org_golang_x_crypto", - importpath="golang.org/x/crypto", - strip_prefix="crypto-81e90905daefcd6fd217b62423c0908922eadb30", - type="zip", - urls=['https://codeload.github.com/golang/crypto/zip/81e90905daefcd6fd217b62423c0908922eadb30'], - ) - _maybe( - go_repository, - name="org_golang_x_net", - importpath="golang.org/x/net", - commit="57efc9c3d9f91fb3277f8da1cff370539c4d3dc5", - ) - _maybe( - go_repository, - name="org_golang_x_sys", - importpath="golang.org/x/sys", - commit="0b25a408a50076fbbcae6b7ac0ea5fbb0b085e79", - ) - _maybe( - go_repository, - name="org_golang_x_text", - importpath="golang.org/x/text", - commit="a9a820217f98f7c8a207ec1e45a874e1fe12c478", - ) - _maybe( - go_repository, - name="org_golang_x_tools", - importpath="golang.org/x/tools", - commit="663269851cdddc898f963782f74ea574bcd5c814", - ) - _maybe( - go_repository, - name="org_golang_google_grpc", - importpath="google.golang.org/grpc", - commit="3f10311ccf076b6b7cba28273df3290d42e60982", - build_file_proto_mode="disable", - ) - _maybe( - go_repository, - name="com_github_mattn_go_sqlite3", - importpath="github.com/mattn/go-sqlite3", - commit="83772a7051f5e30d8e59746a9e43dfa706b72f3b", - ) diff --git a/starlark/src/syntax/testcases/prefix.bzl b/starlark/src/syntax/testcases/prefix.bzl deleted file mode 100644 index 5035b7c5..00000000 --- a/starlark/src/syntax/testcases/prefix.bzl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# In Go, imports are always fully qualified with a URL, -# eg. github.com/user/project. Hence, a label //foo:bar from within a -# Bazel workspace must be referred to as -# "github.com/user/project/foo/bar". To make this work, each rule must -# know the repository's URL. This is achieved, by having all go rules -# depend on a globally unique target that has a "go_prefix" transitive -# info provider. - -def _go_prefix_impl(ctx): - """go_prefix_impl provides the go prefix to use as a transitive info provider.""" - return struct(go_prefix = ctx.attr.prefix) - -_go_prefix_rule = rule( - _go_prefix_impl, - attrs = { - "prefix": attr.string(mandatory = True), - }, -) - -def go_prefix(prefix): - """go_prefix sets the Go import name to be used for this workspace.""" - _go_prefix_rule(name = "go_prefix", - prefix = prefix, - visibility = ["//visibility:public" ] - ) - -def go_prefix_default(importpath): - return (None - if importpath - else Label("//:go_prefix", relative_to_caller_repository = True)) - diff --git a/starlark/src/syntax/testcases/printer.bzl b/starlark/src/syntax/testcases/printer.bzl deleted file mode 100644 index e1feff65..00000000 --- a/starlark/src/syntax/testcases/printer.bzl +++ /dev/null @@ -1,19 +0,0 @@ -"""Example of a rule that accesses its attributes.""" - -def _impl(ctx): - # Print debug information about the target. - print("Target {} has {} deps".format(ctx.label, len(ctx.attr.deps))) - - # For each target in deps, print its label and files. - for i, d in enumerate(ctx.attr.deps): - print(" {}. label = {}".format(i+1, d.label)) - # A label can represent any number of files (possibly 0). - print(" files = " + str([f.path for f in d.files])) - -printer = rule( - implementation=_impl, - attrs={ - # Do not declare "name": It is added automatically. - "number": attr.int(default = 1), - "deps": attr.label_list(allow_files=True), - }) diff --git a/starlark/src/syntax/testcases/prolog.bzl b/starlark/src/syntax/testcases/prolog.bzl deleted file mode 100644 index cae85ad5..00000000 --- a/starlark/src/syntax/testcases/prolog.bzl +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2016 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("//tools/bzl:genrule2.bzl", "genrule2") - -def prolog_cafe_library( - name, - srcs, - deps = [], - **kwargs): - genrule2( - name = name + '__pl2j', - cmd = '$(location //lib/prolog:compiler_bin) ' + - '$$(dirname $@) $@ ' + - '$(SRCS)', - srcs = srcs, - tools = ['//lib/prolog:compiler_bin'], - outs = [ name + '.srcjar' ], - ) - native.java_library( - name = name, - srcs = [':' + name + '__pl2j'], - deps = ['//lib/prolog:runtime'] + deps, - **kwargs - ) diff --git a/starlark/src/syntax/testcases/proto_alias.bzl b/starlark/src/syntax/testcases/proto_alias.bzl deleted file mode 100644 index e47728f9..00000000 --- a/starlark/src/syntax/testcases/proto_alias.bzl +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def proto_alias(name, version): - native.alias( - name = name, - actual = "//third_party/protobuf/" + version + ":" + name) diff --git a/starlark/src/syntax/testcases/protobuf.bzl b/starlark/src/syntax/testcases/protobuf.bzl deleted file mode 100644 index 23380bab..00000000 --- a/starlark/src/syntax/testcases/protobuf.bzl +++ /dev/null @@ -1,402 +0,0 @@ -def _GetPath(ctx, path): - if ctx.label.workspace_root: - return ctx.label.workspace_root + '/' + path - else: - return path - -def _IsNewExternal(ctx): - # Bazel 0.4.4 and older have genfiles paths that look like: - # bazel-out/local-fastbuild/genfiles/external/repo/foo - # After the exec root rearrangement, they look like: - # ../repo/bazel-out/local-fastbuild/genfiles/foo - return ctx.label.workspace_root.startswith("../") - -def _GenDir(ctx): - if _IsNewExternal(ctx): - # We are using the fact that Bazel 0.4.4+ provides repository-relative paths - # for ctx.genfiles_dir. - return ctx.genfiles_dir.path + ( - "/" + ctx.attr.includes[0] if ctx.attr.includes and ctx.attr.includes[0] else "") - # This means that we're either in the old version OR the new version in the local repo. - # Either way, appending the source path to the genfiles dir works. - return ctx.var["GENDIR"] + "/" + _SourceDir(ctx) - -def _SourceDir(ctx): - if not ctx.attr.includes: - return ctx.label.workspace_root - if not ctx.attr.includes[0]: - return _GetPath(ctx, ctx.label.package) - if not ctx.label.package: - return _GetPath(ctx, ctx.attr.includes[0]) - return _GetPath(ctx, ctx.label.package + '/' + ctx.attr.includes[0]) - -def _CcHdrs(srcs, use_grpc_plugin=False): - ret = [s[:-len(".proto")] + ".pb.h" for s in srcs] - if use_grpc_plugin: - ret += [s[:-len(".proto")] + ".grpc.pb.h" for s in srcs] - return ret - -def _CcSrcs(srcs, use_grpc_plugin=False): - ret = [s[:-len(".proto")] + ".pb.cc" for s in srcs] - if use_grpc_plugin: - ret += [s[:-len(".proto")] + ".grpc.pb.cc" for s in srcs] - return ret - -def _CcOuts(srcs, use_grpc_plugin=False): - return _CcHdrs(srcs, use_grpc_plugin) + _CcSrcs(srcs, use_grpc_plugin) - -def _PyOuts(srcs): - return [s[:-len(".proto")] + "_pb2.py" for s in srcs] - -def _RelativeOutputPath(path, include, dest=""): - if include == None: - return path - - if not path.startswith(include): - fail("Include path %s isn't part of the path %s." % (include, path)) - - if include and include[-1] != '/': - include = include + '/' - if dest and dest[-1] != '/': - dest = dest + '/' - - path = path[len(include):] - return dest + path - -def _proto_gen_impl(ctx): - """General implementation for generating protos""" - srcs = ctx.files.srcs - deps = [] - deps += ctx.files.srcs - source_dir = _SourceDir(ctx) - gen_dir = _GenDir(ctx) - if source_dir: - import_flags = ["-I" + source_dir, "-I" + gen_dir] - else: - import_flags = ["-I."] - - for dep in ctx.attr.deps: - import_flags += dep.proto.import_flags - deps += dep.proto.deps - - args = [] - if ctx.attr.gen_cc: - args += ["--cpp_out=" + gen_dir] - if ctx.attr.gen_py: - args += ["--python_out=" + gen_dir] - - inputs = srcs + deps - if ctx.executable.plugin: - plugin = ctx.executable.plugin - lang = ctx.attr.plugin_language - if not lang and plugin.basename.startswith('protoc-gen-'): - lang = plugin.basename[len('protoc-gen-'):] - if not lang: - fail("cannot infer the target language of plugin", "plugin_language") - - outdir = gen_dir - if ctx.attr.plugin_options: - outdir = ",".join(ctx.attr.plugin_options) + ":" + outdir - args += ["--plugin=protoc-gen-%s=%s" % (lang, plugin.path)] - args += ["--%s_out=%s" % (lang, outdir)] - inputs += [plugin] - - if args: - ctx.action( - inputs=inputs, - outputs=ctx.outputs.outs, - arguments=args + import_flags + [s.path for s in srcs], - executable=ctx.executable.protoc, - mnemonic="ProtoCompile", - use_default_shell_env=True, - ) - - return struct( - proto=struct( - srcs=srcs, - import_flags=import_flags, - deps=deps, - ), - ) - -proto_gen = rule( - attrs = { - "srcs": attr.label_list(allow_files = True), - "deps": attr.label_list(providers = ["proto"]), - "includes": attr.string_list(), - "protoc": attr.label( - cfg = "host", - executable = True, - single_file = True, - mandatory = True, - ), - "plugin": attr.label( - cfg = "host", - allow_files = True, - executable = True, - ), - "plugin_language": attr.string(), - "plugin_options": attr.string_list(), - "gen_cc": attr.bool(), - "gen_py": attr.bool(), - "outs": attr.output_list(), - }, - output_to_genfiles = True, - implementation = _proto_gen_impl, -) -"""Generates codes from Protocol Buffers definitions. - -This rule helps you to implement Skylark macros specific to the target -language. You should prefer more specific `cc_proto_library `, -`py_proto_library` and others unless you are adding such wrapper macros. - -Args: - srcs: Protocol Buffers definition files (.proto) to run the protocol compiler - against. - deps: a list of dependency labels; must be other proto libraries. - includes: a list of include paths to .proto files. - protoc: the label of the protocol compiler to generate the sources. - plugin: the label of the protocol compiler plugin to be passed to the protocol - compiler. - plugin_language: the language of the generated sources - plugin_options: a list of options to be passed to the plugin - gen_cc: generates C++ sources in addition to the ones from the plugin. - gen_py: generates Python sources in addition to the ones from the plugin. - outs: a list of labels of the expected outputs from the protocol compiler. -""" - -def cc_proto_library( - name, - srcs=[], - deps=[], - cc_libs=[], - include=None, - protoc="//:protoc", - internal_bootstrap_hack=False, - use_grpc_plugin=False, - default_runtime="//:protobuf", - **kargs): - """Bazel rule to create a C++ protobuf library from proto source files - - NOTE: the rule is only an internal workaround to generate protos. The - interface may change and the rule may be removed when bazel has introduced - the native rule. - - Args: - name: the name of the cc_proto_library. - srcs: the .proto files of the cc_proto_library. - deps: a list of dependency labels; must be cc_proto_library. - cc_libs: a list of other cc_library targets depended by the generated - cc_library. - include: a string indicating the include path of the .proto files. - protoc: the label of the protocol compiler to generate the sources. - internal_bootstrap_hack: a flag indicate the cc_proto_library is used only - for bootstraping. When it is set to True, no files will be generated. - The rule will simply be a provider for .proto files, so that other - cc_proto_library can depend on it. - use_grpc_plugin: a flag to indicate whether to call the grpc C++ plugin - when processing the proto files. - default_runtime: the implicitly default runtime which will be depended on by - the generated cc_library target. - **kargs: other keyword arguments that are passed to cc_library. - - """ - - includes = [] - if include != None: - includes = [include] - - if internal_bootstrap_hack: - # For pre-checked-in generated files, we add the internal_bootstrap_hack - # which will skip the codegen action. - proto_gen( - name=name + "_genproto", - srcs=srcs, - deps=[s + "_genproto" for s in deps], - includes=includes, - protoc=protoc, - visibility=["//visibility:public"], - ) - # An empty cc_library to make rule dependency consistent. - native.cc_library( - name=name, - **kargs) - return - - grpc_cpp_plugin = None - if use_grpc_plugin: - grpc_cpp_plugin = "//external:grpc_cpp_plugin" - - gen_srcs = _CcSrcs(srcs, use_grpc_plugin) - gen_hdrs = _CcHdrs(srcs, use_grpc_plugin) - outs = gen_srcs + gen_hdrs - - proto_gen( - name=name + "_genproto", - srcs=srcs, - deps=[s + "_genproto" for s in deps], - includes=includes, - protoc=protoc, - plugin=grpc_cpp_plugin, - plugin_language="grpc", - gen_cc=1, - outs=outs, - visibility=["//visibility:public"], - ) - - if default_runtime and not default_runtime in cc_libs: - cc_libs = cc_libs + [default_runtime] - if use_grpc_plugin: - cc_libs = cc_libs + ["//external:grpc_lib"] - - native.cc_library( - name=name, - srcs=gen_srcs, - hdrs=gen_hdrs, - deps=cc_libs + deps, - includes=includes, - **kargs) - -def internal_gen_well_known_protos_java(srcs): - """Bazel rule to generate the gen_well_known_protos_java genrule - - Args: - srcs: the well known protos - """ - root = Label("%s//protobuf_java" % (REPOSITORY_NAME)).workspace_root - pkg = PACKAGE_NAME + "/" if PACKAGE_NAME else "" - if root == "": - include = " -I%ssrc " % pkg - else: - include = " -I%s/%ssrc " % (root, pkg) - native.genrule( - name = "gen_well_known_protos_java", - srcs = srcs, - outs = [ - "wellknown.srcjar", - ], - cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" + - " %s $(SRCS) " % include + - " && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar", - tools = [":protoc"], - ) - -def internal_copied_filegroup(name, srcs, strip_prefix, dest, **kwargs): - """Macro to copy files to a different directory and then create a filegroup. - - This is used by the //:protobuf_python py_proto_library target to work around - an issue caused by Python source files that are part of the same Python - package being in separate directories. - - Args: - srcs: The source files to copy and add to the filegroup. - strip_prefix: Path to the root of the files to copy. - dest: The directory to copy the source files into. - **kwargs: extra arguments that will be passesd to the filegroup. - """ - outs = [_RelativeOutputPath(s, strip_prefix, dest) for s in srcs] - - native.genrule( - name = name + "_genrule", - srcs = srcs, - outs = outs, - cmd = " && ".join( - ["cp $(location %s) $(location %s)" % - (s, _RelativeOutputPath(s, strip_prefix, dest)) for s in srcs]), - ) - - native.filegroup( - name = name, - srcs = outs, - **kwargs) - -def py_proto_library( - name, - srcs=[], - deps=[], - py_libs=[], - py_extra_srcs=[], - include=None, - default_runtime="//:protobuf_python", - protoc="//:protoc", - use_grpc_plugin=False, - **kargs): - """Bazel rule to create a Python protobuf library from proto source files - - NOTE: the rule is only an internal workaround to generate protos. The - interface may change and the rule may be removed when bazel has introduced - the native rule. - - Args: - name: the name of the py_proto_library. - srcs: the .proto files of the py_proto_library. - deps: a list of dependency labels; must be py_proto_library. - py_libs: a list of other py_library targets depended by the generated - py_library. - py_extra_srcs: extra source files that will be added to the output - py_library. This attribute is used for internal bootstrapping. - include: a string indicating the include path of the .proto files. - default_runtime: the implicitly default runtime which will be depended on by - the generated py_library target. - protoc: the label of the protocol compiler to generate the sources. - use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin - when processing the proto files. - **kargs: other keyword arguments that are passed to cc_library. - - """ - outs = _PyOuts(srcs) - - includes = [] - if include != None: - includes = [include] - - grpc_python_plugin = None - if use_grpc_plugin: - grpc_python_plugin = "//external:grpc_python_plugin" - # Note: Generated grpc code depends on Python grpc module. This dependency - # is not explicitly listed in py_libs. Instead, host system is assumed to - # have grpc installed. - - proto_gen( - name=name + "_genproto", - srcs=srcs, - deps=[s + "_genproto" for s in deps], - includes=includes, - protoc=protoc, - gen_py=1, - outs=outs, - visibility=["//visibility:public"], - plugin=grpc_python_plugin, - plugin_language="grpc" - ) - - if default_runtime and not default_runtime in py_libs + deps: - py_libs = py_libs + [default_runtime] - - native.py_library( - name=name, - srcs=outs+py_extra_srcs, - deps=py_libs+deps, - imports=includes, - **kargs) - -def internal_protobuf_py_tests( - name, - modules=[], - **kargs): - """Bazel rules to create batch tests for protobuf internal. - - Args: - name: the name of the rule. - modules: a list of modules for tests. The macro will create a py_test for - each of the parameter with the source "google/protobuf/%s.py" - kargs: extra parameters that will be passed into the py_test. - - """ - for m in modules: - s = "python/google/protobuf/internal/%s.py" % m - native.py_test( - name="py_%s" % m, - srcs=[s], - main=s, - **kargs) diff --git a/starlark/src/syntax/testcases/providers.bzl b/starlark/src/syntax/testcases/providers.bzl deleted file mode 100644 index bec509b8..00000000 --- a/starlark/src/syntax/testcases/providers.bzl +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:mode.bzl", "mode_string") - -GoLibrary = provider() -"""See go/providers.rst#GoLibrary for full documentation.""" - -GoPackage = provider() - -GoPath = provider() - -GoSource = provider() -"""See go/providers.rst#GoSource for full documentation.""" - -GoSourceList = provider() -"""See go/providers.rst#GoSourceList for full documentation.""" - -GoArchive = provider() -"""See go/providers.rst#GoArchive for full documentation.""" - -GoArchiveData = provider() - -GoStdLib = provider() - -def _merge_runfiles(a, b): - if not a: return b - if not b: return a - return a.merge(b) - -def _source_build_entry(srcs = [], deps = [], gc_goopts=[], runfiles=None, cgo_deps=[], cgo_exports=[], cgo_archive=None, want_coverage = False, source = None, exclude = None): - """Creates a new GoSource from a collection of values and an optional GoSourceList to merge in.""" - for e in (source.entries if source else []): - srcs = srcs + e.srcs - deps = deps + e.deps - gc_goopts = gc_goopts + e.gc_goopts - runfiles = _merge_runfiles(runfiles, e.runfiles) - cgo_deps = cgo_deps + e.cgo_deps - cgo_exports = cgo_exports + e.cgo_exports - if e.cgo_archive: - if cgo_archive: - fail("multiple libraries with cgo_archive embedded") - cgo_archive = e.cgo_archive - - return GoSource( - srcs = srcs, - deps = deps, - gc_goopts = gc_goopts, - runfiles = runfiles, - cgo_deps = cgo_deps, - cgo_exports = cgo_exports, - cgo_archive = cgo_archive, - want_coverage = want_coverage, - exclude = exclude, - ) - -def _source_new(**kwargs): - """Creates a new GoSourceList from a collection of values.""" - return GoSourceList(entries = [_source_build_entry(**kwargs)]) - -def _source_merge(source): - """Merges the entries of multiple GoSourceList providers to a single GoSourceList.""" - entries = [] - for e in source: - entries.extend(e.entries) - return GoSourceList(entries = entries) - -def _source_flatten(ctx, source): - """Flattens a GoSourceList to a single GoSource ready for use.""" - return _source_build_entry(source = source) - -def _source_filter(ctx, source, mode): - return GoSourceList(entries = [s for s in source.entries if not (s.exclude and s.exclude(ctx, mode))]) - -sources = struct( - new = _source_new, - merge = _source_merge, - flatten = _source_flatten, - filter = _source_filter, -) -"""sources holds the functions for manipulating GoSourceList providers.""" diff --git a/starlark/src/syntax/testcases/pull.bzl b/starlark/src/syntax/testcases/pull.bzl deleted file mode 100644 index 7686d640..00000000 --- a/starlark/src/syntax/testcases/pull.bzl +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An implementation of container_pull based on google/containerregistry. - -This wraps the containerregistry.tools.fast_puller executable in a -Bazel rule for downloading base images without a Docker client to -construct new images. -""" - -def _python(repository_ctx): - if "BAZEL_PYTHON" in repository_ctx.os.environ: - return repository_ctx.os.environ.get("BAZEL_PYTHON") - - python_path = repository_ctx.which("python") - if not python_path: - python_path = repository_ctx.which("python.exe") - if python_path: - return python_path - - fail("rules_docker requires a python interpreter installed. " + - "Please set BAZEL_PYTHON, or put it on your path.") - -def _impl(repository_ctx): - """Core implementation of container_pull.""" - - # Add an empty top-level BUILD file. - repository_ctx.file("BUILD", "") - - repository_ctx.file("image/BUILD", """ -package(default_visibility = ["//visibility:public"]) - -load("@io_bazel_rules_docker//container:import.bzl", "container_import") - -container_import( - name = "image", - config = "config.json", - layers = glob(["*.tar.gz"]), -) -""") - - args = [ - _python(repository_ctx), - repository_ctx.path(repository_ctx.attr._puller), - "--directory", repository_ctx.path("image") - ] - - # If a digest is specified, then pull by digest. Otherwise, pull by tag. - if repository_ctx.attr.digest: - args += [ - "--name", "{registry}/{repository}@{digest}".format( - registry=repository_ctx.attr.registry, - repository=repository_ctx.attr.repository, - digest=repository_ctx.attr.digest) - ] - else: - args += [ - "--name", "{registry}/{repository}:{tag}".format( - registry=repository_ctx.attr.registry, - repository=repository_ctx.attr.repository, - tag=repository_ctx.attr.tag) - ] - - result = repository_ctx.execute(args) - if result.return_code: - fail("Pull command failed: %s (%s)" % (result.stderr, " ".join(args))) - -container_pull = repository_rule( - attrs = { - "registry": attr.string(mandatory = True), - "repository": attr.string(mandatory = True), - "digest": attr.string(), - "tag": attr.string(default = "latest"), - "_puller": attr.label( - executable = True, - default = Label("@puller//file:puller.par"), - cfg = "host", - ), - }, - implementation = _impl, -) - -"""Pulls a container image. - -This rule pulls a container image into our intermediate format. The -output of this rule can be used interchangeably with `docker_build`. - -Args: - name: name of the rule. - registry: the registry from which we are pulling. - repository: the name of the image. - tag: (optional) the tag of the image, default to 'latest' if this - and 'digest' remain unspecified. - digest: (optional) the digest of the image to pull. -""" diff --git a/starlark/src/syntax/testcases/push-all.bzl b/starlark/src/syntax/testcases/push-all.bzl deleted file mode 100644 index d1a81ba0..00000000 --- a/starlark/src/syntax/testcases/push-all.bzl +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An implementation of container_push based on google/containerregistry. - -This variant of container_push accepts a container_bundle target and publishes -the embedded image references. -""" - -load( - "//skylib:path.bzl", - "runfile", -) - -def _get_runfile_path(ctx, f): - return "${RUNFILES}/%s" % runfile(ctx, f) - -def _impl(ctx): - """Core implementation of container_push.""" - stamp = ctx.attr.bundle.stamp - images = ctx.attr.bundle.container_images - - stamp_inputs = [] - if stamp: - stamp_inputs = [ctx.info_file, ctx.version_file] - - stamp_arg = " ".join(["--stamp-info-file=%s" % _get_runfile_path(ctx, f) for f in stamp_inputs]) - - scripts = [] - runfiles = [] - index = 0 - for tag in images: - image = images[tag] - # Leverage our efficient intermediate representation to push. - legacy_base_arg = "" - if image.get("legacy"): - print("Pushing an image based on a tarball can be very " + - "expensive. If the image is the output of a " + - "docker_build, consider dropping the '.tar' extension. " + - "If the image is checked in, consider using " + - "docker_import instead.") - legacy_base_arg = "--tarball=%s" % _get_runfile_path(ctx, image["legacy"]) - runfiles += [image["legacy"]] - - blobsums = image.get("blobsum", []) - digest_arg = " ".join(["--digest=%s" % _get_runfile_path(ctx, f) for f in blobsums]) - blobs = image.get("zipped_layer", []) - layer_arg = " ".join(["--layer=%s" % _get_runfile_path(ctx, f) for f in blobs]) - config_arg = "--config=%s" % _get_runfile_path(ctx, image["config"]) - - runfiles += [image["config"]] + blobsums + blobs - - out = ctx.new_file("%s.%d.push" % (ctx.label.name, index)) - ctx.template_action( - template = ctx.file._tag_tpl, - substitutions = { - "%{stamp}": stamp_arg, - "%{tag}": ctx.expand_make_variables("tag", tag, {}), - "%{image}": "%s %s %s %s" % ( - legacy_base_arg, config_arg, digest_arg, layer_arg), - "%{format}": "--oci" if ctx.attr.format == "OCI" else "", - "%{container_pusher}": _get_runfile_path(ctx, ctx.executable._pusher), - }, - output = out, - executable=True, - ) - - scripts += [out] - runfiles += [out] - index += 1 - - ctx.template_action( - template = ctx.file._all_tpl, - substitutions = { - "%{push_statements}": "\n".join([ - 'async "%s"' % _get_runfile_path(ctx, command) - for command in scripts - ]), - }, - output = ctx.outputs.executable, - executable=True, - ) - - return struct(runfiles = ctx.runfiles(files = [ - ctx.executable._pusher - ] + stamp_inputs + runfiles + list(ctx.attr._pusher.default_runfiles.files))) - -container_push = rule( - attrs = { - "bundle": attr.label(mandatory = True), - "format": attr.string( - mandatory = True, - values = [ - "OCI", - "Docker", - ], - ), - "_all_tpl": attr.label( - default = Label("//contrib:push-all.sh.tpl"), - single_file = True, - allow_files = True, - ), - "_tag_tpl": attr.label( - default = Label("//container:push-tag.sh.tpl"), - single_file = True, - allow_files = True, - ), - "_pusher": attr.label( - default = Label("@containerregistry//:pusher"), - cfg = "host", - executable = True, - allow_files = True, - ), - }, - executable = True, - implementation = _impl, -) - -"""Pushes a bundle of container images. - -Args: - name: name of the rule. - bundle: the bundle of tagged images to publish. - format: the form to push: Docker or OCI. -""" - -def docker_push(*args, **kwargs): - if "format" in kwargs: - fail("Cannot override 'format' attribute on docker_push", - attr="format") - kwargs["format"] = "Docker" - container_push(*args, **kwargs) - -def oci_push(*args, **kwargs): - if "format" in kwargs: - fail("Cannot override 'format' attribute on oci_push", - attr="format") - kwargs["format"] = "OCI" - container_push(*args, **kwargs) diff --git a/starlark/src/syntax/testcases/push.bzl b/starlark/src/syntax/testcases/push.bzl deleted file mode 100644 index f4154a9e..00000000 --- a/starlark/src/syntax/testcases/push.bzl +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An implementation of container_push based on google/containerregistry. - -This wraps the containerregistry.tools.fast_pusher executable in a -Bazel rule for publishing images. -""" - -load( - "//skylib:path.bzl", - "runfile", -) -load( - "//container:layers.bzl", - _get_layers = "get_from_target", - _layer_tools = "tools", -) - -def _get_runfile_path(ctx, f): - return "${RUNFILES}/%s" % runfile(ctx, f) - -def _impl(ctx): - """Core implementation of container_push.""" - stamp_inputs = [] - if ctx.attr.stamp: - stamp_inputs = [ctx.info_file, ctx.version_file] - - image = _get_layers(ctx, ctx.attr.image, ctx.files.image) - - stamp_arg = " ".join(["--stamp-info-file=%s" % _get_runfile_path(ctx, f) for f in stamp_inputs]) - - # Leverage our efficient intermediate representation to push. - legacy_base_arg = "" - if image.get("legacy"): - print("Pushing an image based on a tarball can be very " + - "expensive. If the image is the output of a " + - "docker_build, consider dropping the '.tar' extension. " + - "If the image is checked in, consider using " + - "docker_import instead.") - legacy_base_arg = "--tarball=%s" % _get_runfile_path(ctx, image["legacy"]) - - blobsums = image.get("blobsum", []) - digest_arg = " ".join(["--digest=%s" % _get_runfile_path(ctx, f) for f in blobsums]) - blobs = image.get("zipped_layer", []) - layer_arg = " ".join(["--layer=%s" % _get_runfile_path(ctx, f) for f in blobs]) - config_arg = "--config=%s" % _get_runfile_path(ctx, image["config"]) - - ctx.template_action( - template = ctx.file._tag_tpl, - substitutions = { - "%{tag}": "{registry}/{repository}:{tag}".format( - registry=ctx.expand_make_variables( - "registry", ctx.attr.registry, {}), - repository=ctx.expand_make_variables( - "repository", ctx.attr.repository, {}), - tag=ctx.expand_make_variables( - "tag", ctx.attr.tag, {})), - "%{stamp}": stamp_arg, - "%{image}": "%s %s %s %s" % ( - legacy_base_arg, config_arg, digest_arg, layer_arg), - "%{format}": "--oci" if ctx.attr.format == "OCI" else "", - "%{container_pusher}": _get_runfile_path(ctx, ctx.executable._pusher), - }, - output = ctx.outputs.executable, - executable=True, - ) - - return struct(runfiles = ctx.runfiles(files = [ - ctx.executable._pusher, - image["config"] - ] + image.get("blobsum", []) + image.get("zipped_layer", []) + - stamp_inputs + ([image["legacy"]] if image.get("legacy") else []) + - list(ctx.attr._pusher.default_runfiles.files))) - -container_push = rule( - attrs = { - "image": attr.label( - allow_files = [".tar"], - single_file = True, - mandatory = True, - ), - "registry": attr.string(mandatory = True), - "repository": attr.string(mandatory = True), - "tag": attr.string(default = "latest"), - "format": attr.string( - mandatory = True, - values = [ - "OCI", - "Docker", - ], - ), - "_tag_tpl": attr.label( - default = Label("//container:push-tag.sh.tpl"), - single_file = True, - allow_files = True, - ), - "_pusher": attr.label( - default = Label("@containerregistry//:pusher"), - cfg = "host", - executable = True, - allow_files = True, - ), - "stamp": attr.bool( - default = False, - mandatory = False, - ), - } + _layer_tools, - executable = True, - implementation = _impl, -) - -"""Pushes a container image. - -This rule pushes a container image to a registry. - -Args: - name: name of the rule - image: the label of the image to push. - format: The form to push: Docker or OCI. - registry: the registry to which we are pushing. - repository: the name of the image. - tag: (optional) the tag of the image, default to 'latest'. -""" diff --git a/starlark/src/syntax/testcases/python.bzl b/starlark/src/syntax/testcases/python.bzl deleted file mode 100644 index f16c4afc..00000000 --- a/starlark/src/syntax/testcases/python.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# !!!! THIS IS A GENERATED FILE TO NOT EDIT IT BY HAND !!!! -# -# To regenerate this file, run ./update_deps.sh from the root of the -# git repository. - -DIGESTS = { - # "gcr.io/distroless/python2.7:debug" circa 2017-10-10 04:10 +0000 - "debug": "sha256:e17d365160da5d6a27bdccb7d00e98a8eb8ece2a3ad8721f39ec3f2b2083bb9a", - # "gcr.io/distroless/python2.7:latest" circa 2017-10-10 04:10 +0000 - "latest": "sha256:018229eb4b65acd8e2866da325b68e17f6c3b8e3164ed3c0994523f561c3b44f", -} diff --git a/starlark/src/syntax/testcases/redirects.bzl b/starlark/src/syntax/testcases/redirects.bzl deleted file mode 100644 index 8fb6954f..00000000 --- a/starlark/src/syntax/testcases/redirects.bzl +++ /dev/null @@ -1,218 +0,0 @@ -# Definition of redirects to generate in the Jekyll tree. - -def _make_doc_redirects(docs): - redirects = {} - for doc in docs: - old_unversioned = "docs/%s" % doc - old_versioned = "versions/master/%s" % old_unversioned - new = "https://docs.bazel.build/%s" % doc - redirects[old_unversioned] = new - redirects[old_versioned] = new - return redirects - -def _make_blog_redirects(posts): - redirects = { - "blog/index.html": "https://blog.bazel.build", - } - for post in posts: - old = "blog/%s" % post - new = "https://blog.bazel.build/%s" % post - redirects[old] = new - return redirects - -def _make_redirects(docs=[], blog=[], other={}): - return dict(_make_doc_redirects(docs).items() + - _make_blog_redirects(blog).items() + - other.items()) - -# Dict mapping path of site page to new URL to redirect to. -BAZEL_SITE_REDIRECTS = _make_redirects( - docs = [ - "bazel-overview.html", - "bazel-user-manual.html", - "best-practices.html", - "build-ref.html", - "command-line-reference.html", - "cpp.html", - "external.html", - "getting-started.html", - "install.html", - "install-compile-source.html", - "install-os-x.html", - "install-ubuntu.html", - "install-windows.html", - "mobile-install.html", - "output_directories.html", - "query-how-to.html", - "query.html", - "rule-challenges.html", - "skyframe.html", - "support.html", - "test-encyclopedia.html", - "windows.html", - - # Build Encyclopedia. - "be/android.html", - "be/c-cpp.html", - "be/common-definitions.html", - "be/docker.html", - "be/extra-actions.html", - "be/functions.html", - "be/general.html", - "be/java.html", - "be/make-variables.html", - "be/objective-c.html", - "be/overview.html", - "be/pkg.html", - "be/platform.html", - "be/predefined-python-variables.html", - "be/protocol-buffer.html", - "be/python.html", - "be/shell.html", - "be/workspace.html", - - # Skylark - "skylark/aspects.html", - "skylark/build-style.html", - "skylark/bzl-style.html", - "skylark/concepts.html", - "skylark/cookbook.html", - "skylark/deploying.html", - "skylark/depsets.html", - "skylark/index.html", - "skylark/language.html", - "skylark/macros.html", - "skylark/repository_rules.html", - "skylark/rules.html", - "skylark/errors/read-only-variable.html", - - # Skylark Library - "skylark/lib/Action.html", - "skylark/lib/AndroidSkylarkApiProvider.html", - "skylark/lib/AndroidSkylarkIdlInfo.html", - "skylark/lib/Aspect.html", - "skylark/lib/CcSkylarkApiProvider.html", - "skylark/lib/CcToolchainInfo.html", - "skylark/lib/ConfigurationTransition.html", - "skylark/lib/ConstraintSettingInfo.html", - "skylark/lib/ConstraintValueProvider.html", - "skylark/lib/DottedVersion.html", - "skylark/lib/FeatureFlagInfo.html", - "skylark/lib/File.html", - "skylark/lib/FileType.html", - "skylark/lib/FilesToRunProvider.html", - "skylark/lib/JavaRuntimeClasspathProvider.html", - "skylark/lib/JavaSkylarkApiProvider.html", - "skylark/lib/JavaToolchainSkylarkApiProvider.html", - "skylark/lib/Label.html", - "skylark/lib/MakeVariables.html", - "skylark/lib/ObjcProvider.html", - "skylark/lib/PlatformInfo.html", - "skylark/lib/ProtoSourcesProvider.html", - "skylark/lib/Provider.html", - "skylark/lib/Target.html", - "skylark/lib/ToolchainInfo.html", - "skylark/lib/XcTestAppProvider.html", - "skylark/lib/android_common.html", - "skylark/lib/apple.html", - "skylark/lib/apple_bitcode_mode.html", - "skylark/lib/apple_common.html", - "skylark/lib/apple_toolchain.html", - "skylark/lib/attr.html", - "skylark/lib/attr_definition.html", - "skylark/lib/attr_defintion.html", - "skylark/lib/bool.html", - "skylark/lib/cmd_helper.html", - "skylark/lib/config_common.html", - "skylark/lib/configuration.html", - "skylark/lib/cpp.html", - "skylark/lib/ctx.html", - "skylark/lib/depset.html", - "skylark/lib/dict.html", - "skylark/lib/exec_result.html", - "skylark/lib/file_provider.html", - "skylark/lib/fragments.html", - "skylark/lib/globals.html", - "skylark/lib/int.html", - "skylark/lib/java.html", - "skylark/lib/java_annotation_processing.html", - "skylark/lib/java_common.html", - "skylark/lib/java_compilation_info.html", - "skylark/lib/java_output.html", - "skylark/lib/java_output_jars.html", - "skylark/lib/java_proto_common.html", - "skylark/lib/jvm.html", - "skylark/lib/list.html", - "skylark/lib/native.html", - "skylark/lib/objc.html", - "skylark/lib/path.html", - "skylark/lib/platform.html", - "skylark/lib/platform_type.html", - "skylark/lib/proto.html", - "skylark/lib/provider.html", - "skylark/lib/repository_ctx.html", - "skylark/lib/repository_os.html", - "skylark/lib/root.html", - "skylark/lib/rule_attributes.html", - "skylark/lib/runfiles.html", - "skylark/lib/set.html", - "skylark/lib/skylark-builtin.html", - "skylark/lib/skylark-configuration-fragment.html", - "skylark/lib/skylark-overview.html", - "skylark/lib/skylark-provider.html", - "skylark/lib/string.html", - "skylark/lib/struct.html", - "skylark/lib/swift.html", - "skylark/lib/testing.html", - "skylark/lib/tuple.html", - - # Tutorial - "tutorial/android-app.html", - "tutorial/app.html", - "tutorial/backend-server.html", - "tutorial/cpp.html", - "tutorial/environment.html", - "tutorial/index.html", - "tutorial/ios-app.html", - "tutorial/java.html", - "tutorial/review.html", - "tutorial/workspace.html", - ], - blog = [ - "2015/03/27/Hello-World.html", - "2015/04/06/Simplified-Workspace-Creation.html", - "2015/04/10/bash-completion.html", - "2015/04/15/share-your-project.html", - "2015/04/22/thank-you-stickers.html", - "2015/06/17/visualize-your-build.html", - "2015/06/25/ErrorProne.html", - "2015/07/01/Configuration-File.html", - "2015/07/08/Java-Configuration.html", - "2015/07/23/tree-trimming.html", - "2015/07/28/docker_build.html", - "2015/07/29/dashboard-dogfood.html", - "2015/09/01/beta-release.html", - "2015/09/11/sandboxing.html", - "2015/12/10/java-workers.html", - "2016/01/27/continuous-integration.html", - "2016/02/23/0.2.0-release.html", - "2016/03/18/sandbox-easier-debug.html", - "2016/03/31/autoconfiguration.html", - "2016/06/10/0.3.0-release.html", - "2016/06/10/ide-support.html", - "2016/10/07/bazel-windows.html", - "2016/10/20/intellij-support.html", - "2016/11/02/0.4.0-release.html", - "2016/11/04/bazel-build.html", - "2017/02/22/repository-invalidation.html", - "2017/02/27/protocol-buffers.html", - "2017/02/28/google-summer-of-code.html", - "2017/03/07/java-sandwich.html", - "2017/03/21/design-of-skylark.html", - "2017/04/21/JDK7-deprecation.html", - "2017/05/26/Bazel-0-5-0-release.html", - "2017/05/31/google-summer-of-code-2017.html", - ], - other = { - "versions/master/docs/windows-chocolatey-maintenance.html": "https://www.bazel.build/windows-chocolatey-maintenance.html", - }) diff --git a/starlark/src/syntax/testcases/remote.bzl b/starlark/src/syntax/testcases/remote.bzl deleted file mode 100644 index 66f146ea..00000000 --- a/starlark/src/syntax/testcases/remote.bzl +++ /dev/null @@ -1,14 +0,0 @@ -def _test_chdir_remote_impl(ctx): - ctx.file("WORKSPACE", """workspace("test_chdir_remote")""") - ctx.file("BUILD.bazel", "") - for f in ["BUILD.bazel", "data_test.go", "data.txt"]: - input = Label("@io_bazel_rules_go//tests/test_chdir:{}".format(f)) - ctx.template("sub/" + f, input) - -_test_chdir_remote = repository_rule( - implementation = _test_chdir_remote_impl, - attrs = {}, -) - -def test_chdir_remote(): - _test_chdir_remote(name="test_chdir_remote") \ No newline at end of file diff --git a/starlark/src/syntax/testcases/repositories.bzl b/starlark/src/syntax/testcases/repositories.bzl deleted file mode 100644 index 4781ae92..00000000 --- a/starlark/src/syntax/testcases/repositories.bzl +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Once nested repositories work, this file should cease to exist. - -load("@io_bazel_rules_go//go/private:repository_tools.bzl", "go_repository_tools") -load("@io_bazel_rules_go//go/private:go_repository.bzl", "go_repository") -load('@io_bazel_rules_go//go/private:rules/stdlib.bzl', "go_stdlib") -load('@io_bazel_rules_go//go/toolchain:toolchains.bzl', "go_register_toolchains") -load("@io_bazel_rules_go//go/platform:list.bzl", "GOOS_GOARCH") - -def go_rules_dependencies(): - """See /go/workspace.rst#go-rules-dependencies for full documentation.""" - - # Needed for gazelle and wtool - _maybe(native.http_archive, - name = "com_github_bazelbuild_buildtools", - # master, as of 2017-08-14 - url = "https://codeload.github.com/bazelbuild/buildtools/zip/799e530642bac55de7e76728fa0c3161484899f6", - strip_prefix = "buildtools-799e530642bac55de7e76728fa0c3161484899f6", - type = "zip", - ) - - # Needed for fetch repo - _maybe(go_repository, - name = "org_golang_x_tools", - # release-branch.go1.9, as of 2017-08-25 - importpath = "golang.org/x/tools", - urls = ["https://codeload.github.com/golang/tools/zip/5d2fd3ccab986d52112bf301d47a819783339d0e"], - strip_prefix = "tools-5d2fd3ccab986d52112bf301d47a819783339d0e", - type = "zip", - ) - - for goos, goarch in GOOS_GOARCH: - _maybe(go_stdlib, - name = "go_stdlib_{}_{}_cgo".format(goos, goarch), - goos = goos, - goarch = goarch, - race = False, - cgo = True, - ) - _maybe(go_stdlib, - name = "go_stdlib_{}_{}_pure".format(goos, goarch), - goos = goos, - goarch = goarch, - race = False, - cgo = False, - ) - _maybe(go_stdlib, - name = "go_stdlib_{}_{}_cgo_race".format(goos, goarch), - goos = goos, - goarch = goarch, - race = True, - cgo = True, - ) - _maybe(go_stdlib, - name = "go_stdlib_{}_{}_pure_race".format(goos, goarch), - goos = goos, - goarch = goarch, - race = True, - cgo = False, - ) - - _maybe(go_repository_tools, - name = "io_bazel_rules_go_repository_tools", - ) - - # Proto dependancies - _maybe(go_repository, - name = "com_github_golang_protobuf", - importpath = "github.com/golang/protobuf", - commit = "1e59b77b52bf8e4b449a57e6f79f21226d571845", # master, as of 2017-11-24 - ) - _maybe(native.http_archive, - name = "com_google_protobuf", - # v3.5.0, latest as of 2017-11-24 - url = "https://codeload.github.com/google/protobuf/zip/2761122b810fe8861004ae785cc3ab39f384d342", - strip_prefix = "protobuf-2761122b810fe8861004ae785cc3ab39f384d342", - type = "zip", - ) - - # Only used by deprecated go_proto_library implementation - _maybe(native.http_archive, - name = "com_github_google_protobuf", - url = "https://github.com/google/protobuf/archive/v3.4.0.tar.gz", - strip_prefix = "protobuf-3.4.0", - ) - - # GRPC dependancies - _maybe(go_repository, - name = "org_golang_x_net", - commit = "a04bdaca5b32abe1c069418fb7088ae607de5bd0", # master as of 2017-10-10 - importpath = "golang.org/x/net", - ) - _maybe(go_repository, - name = "org_golang_x_text", - commit = "ab5ac5f9a8deb4855a60fab02bc61a4ec770bd49", # v0.1.0, latest as of 2017-10-10 - importpath = "golang.org/x/text", - ) - _maybe(go_repository, - name = "org_golang_google_grpc", - commit = "f92cdcd7dcdc69e81b2d7b338479a19a8723cfa3", # v1.6.0, latest as of 2017-10-10 - importpath = "google.golang.org/grpc", - build_file_proto_mode = "disable", # use existing generated code - ) - _maybe(go_repository, - name = "org_golang_google_genproto", - commit = "f676e0f3ac6395ff1a529ae59a6670878a8371a6", # master on 2017-10-10 - importpath = "google.golang.org/genproto", - ) - - # Needed for examples - _maybe(go_repository, - name = "com_github_golang_glog", - commit = "23def4e6c14b4da8ac2ed8007337bc5eb5007998", - importpath = "github.com/golang/glog", - ) - _maybe(go_repository, - name = "com_github_jteeuwen_go_bindata", - importpath = "github.com/jteeuwen/go-bindata", - commit = "a0ff2567cfb70903282db057e799fd826784d41d", - ) - - -def _maybe(repo_rule, name, **kwargs): - if name not in native.existing_rules(): - repo_rule(name=name, **kwargs) diff --git a/starlark/src/syntax/testcases/repository_tools.bzl b/starlark/src/syntax/testcases/repository_tools.bzl deleted file mode 100644 index c3df0ee7..00000000 --- a/starlark/src/syntax/testcases/repository_tools.bzl +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:go_repository.bzl", "go_repository", "env_execute") -load("@io_bazel_rules_go//go/private:toolchain.bzl", "executable_extension") - -_GO_REPOSITORY_TOOLS_BUILD_FILE = """ -package(default_visibility = ["//visibility:public"]) - -filegroup( - name = "fetch_repo", - srcs = ["bin/fetch_repo{extension}"], -) - -filegroup( - name = "gazelle", - srcs = ["bin/gazelle{extension}"], -) -""" - -def _go_repository_tools_impl(ctx): - # We work this out here because you can't use a toolchain from a repository rule - # TODO: This is an ugly non sustainable hack, we need to kill repository tools. - - extension = executable_extension(ctx) - go_tool = ctx.path(Label("@go_sdk//:bin/go{}".format(extension))) - - x_tools_commit = "3d92dd60033c312e3ae7cac319c792271cf67e37" - x_tools_path = ctx.path('tools-' + x_tools_commit) - buildtools_path = ctx.path(ctx.attr._buildtools).dirname - go_tools_path = ctx.path(ctx.attr._tools).dirname - - # We have to download this directly because the normal version is based on go_repository - # and thus requires the gazelle we build in here to generate it's BUILD files - # The commit used here should match the one in repositories.bzl - ctx.download_and_extract( - url = "https://codeload.github.com/golang/tools/zip/" + x_tools_commit, - type = "zip", - ) - - # Build something that looks like a normal GOPATH so go install will work - ctx.symlink(x_tools_path, "src/golang.org/x/tools") - ctx.symlink(buildtools_path, "src/github.com/bazelbuild/buildtools") - ctx.symlink(go_tools_path, "src/github.com/bazelbuild/rules_go/go/tools") - env = { - 'GOROOT': str(go_tool.dirname.dirname), - 'GOPATH': str(ctx.path('')), - } - - # build all the repository tools - for tool, importpath in ( - ("gazelle", 'github.com/bazelbuild/rules_go/go/tools/gazelle/gazelle'), - ("fetch_repo", 'github.com/bazelbuild/rules_go/go/tools/fetch_repo'), - ): - result = env_execute(ctx, [go_tool, "install", importpath], environment = env) - if result.return_code: - fail("failed to build {}: {}".format(tool, result.stderr)) - - # add a build file to export the tools - ctx.file('BUILD.bazel', _GO_REPOSITORY_TOOLS_BUILD_FILE.format(extension=executable_extension(ctx)), False) - -go_repository_tools = repository_rule( - _go_repository_tools_impl, - attrs = { - "linux_sdk": attr.string(), - "darwin_sdk": attr.string(), - "_tools": attr.label( - default = Label("//go/tools:BUILD.bazel"), - allow_files = True, - single_file = True, - ), - "_buildtools": attr.label( - default = Label("@com_github_bazelbuild_buildtools//:WORKSPACE"), - allow_files = True, - single_file = True, - ), - }, - environ = ["TMP"], -) diff --git a/starlark/src/syntax/testcases/rpm.bzl b/starlark/src/syntax/testcases/rpm.bzl deleted file mode 100644 index 2322dfae..00000000 --- a/starlark/src/syntax/testcases/rpm.bzl +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules to create RPM archives.""" - -rpm_filetype = [".rpm"] -spec_filetype = [".spec"] - -def _pkg_rpm_impl(ctx): - """Implements to pkg_rpm rule.""" - - files = [] - args = ["--name=" + ctx.label.name] - - # Version can be specified by a file or inlined - if ctx.attr.version_file: - if ctx.attr.version: - fail("Both version and version_file attributes were specified") - args += ["--version=@" + ctx.file.version_file.path] - files += [ctx.file.version_file] - elif ctx.attr.version: - args += ["--version=" + ctx.attr.version] - else: - fail("Neither version_file nor version attribute was specified") - - if ctx.attr.architecture: - args += ["--arch=" + ctx.attr.architecture] - - if ctx.attr.spec_file: - args += ["--spec_file=" + ctx.file.spec_file.path] - files += [ctx.file.spec_file] - else: - fail("spec_file was not specified") - - args += ["--out_file=" + ctx.outputs.rpm.path] - - # Add data files. - files += [ctx.file.changelog] + ctx.files.data - args += [ctx.file.changelog.path] - for f in ctx.files.data: - args += [f.path] - - # Call the generator script. - # TODO(katre): Generate a source RPM. - ctx.action( - executable = ctx.executable._make_rpm, - arguments = args, - inputs = files, - outputs = [ctx.outputs.rpm], - mnemonic = "MakeRpm") - - # Link the RPM to the expected output name. - ctx.action( - command = "ln -s %s %s" % (ctx.outputs.rpm.basename, ctx.outputs.out.path), - inputs = [ctx.outputs.rpm], - outputs = [ctx.outputs.out]) - -# Define the rule. -pkg_rpm = rule( - implementation = _pkg_rpm_impl, - attrs = { - "spec_file" : attr.label(mandatory=True, allow_files=spec_filetype, single_file=True), - "architecture": attr.string(default="all"), - "version_file": attr.label(allow_files=True, single_file=True), - "version": attr.string(), - "changelog" : attr.label(mandatory=True, allow_files=True, single_file=True), - "data": attr.label_list(mandatory=True, allow_files=True), - - # Implicit dependencies. - "_make_rpm": attr.label( - default=Label("//tools/build_defs/pkg:make_rpm"), - cfg="host", - executable=True, - allow_files=True), - }, - outputs = { - "out": "%{name}.rpm", - "rpm": "%{name}-%{architecture}.rpm", - }, - executable = False) -"""Creates an RPM format package from the data files. - -This runs rpmbuild (and requires it to be installed beforehand) to generate -an RPM package based on the spec_file and data attributes. - -Args: - spec_file: The RPM spec file to use. If the version or version_file - attributes are provided, the Version in the spec will be overwritten. - Any Sources listed in the spec file must be provided as data dependencies. - version: The version of the package to generate. This will overwrite any - Version provided in the spec file. Only specify one of version and - version_file. - version_file: A file containing the version of the package to generate. This - will overwrite any Version provided in the spec file. Only specify one of - version and version_file. - changelog: A changelog file to include. This will not be written to the spec - file, which should only list changes to the packaging, not the software itself. - data: List all files to be included in the package here. -""" diff --git a/starlark/src/syntax/testcases/rust.bzl b/starlark/src/syntax/testcases/rust.bzl deleted file mode 100644 index 1fe2fcac..00000000 --- a/starlark/src/syntax/testcases/rust.bzl +++ /dev/null @@ -1,1305 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Rust Rules - -These build rules are used for building [Rust][rust] projects with Bazel. - -[rust]: http://www.rust-lang.org/ - -### Setup - -To use the Rust rules, add the following to your `WORKSPACE` file to add the -external repositories for the Rust toolchain: - -```python -http_archive( - name = "io_bazel_rules_rust", - sha256 = "aa7ad550e2960143835c6a7d3bbc29e313aedf89ea879e5465e97f5d6a19e7f5", - strip_prefix = "rules_rust-0.0.5", - urls = [ - "http://bazel-mirror.storage.googleapis.com/github.com/bazelbuild/rules_rust/archive/0.0.5.tar.gz", - "https://github.com/bazelbuild/rules_rust/archive/0.0.5.tar.gz", - ], -) -load("@io_bazel_rules_rust//rust:rust.bzl", "rust_repositories") - -rust_repositories() -``` - -### Roadmap - -* Add `rust_toolchain` rule to make it easy to use a custom Rust toolchain. -* Add tool for taking `Cargo.toml` and generating a `WORKSPACE` file with - workspace rules for pulling external dependencies. -* Improve expressiveness of features and support for [Cargo's feature - groups](http://doc.crates.io/manifest.html#the-[features]-section). -* Add `cargo_crate` workspace rule for pulling crates from - [Cargo](https://crates.io/). -""" - -RUST_FILETYPE = FileType([".rs"]) - -A_FILETYPE = FileType([".a"]) - -LIBRARY_CRATE_TYPES = [ - "lib", - "rlib", - "dylib", - "staticlib", -] - -# Used by rust_doc -HTML_MD_FILETYPE = FileType([ - ".html", - ".md", -]) - -CSS_FILETYPE = FileType([".css"]) - -ZIP_PATH = "/usr/bin/zip" - -def _path_parts(path): - """Takes a path and returns a list of its parts with all "." elements removed. - - The main use case of this function is if one of the inputs to _relative() - is a relative path, such as "./foo". - - Args: - path_parts: A list containing parts of a path. - - Returns: - Returns a list containing the path parts with all "." elements removed. - """ - path_parts = path.split("/") - return [part for part in path_parts if part != "."] - -def _relative(src_path, dest_path): - """Returns the relative path from src_path to dest_path.""" - src_parts = _path_parts(src_path) - dest_parts = _path_parts(dest_path) - n = 0 - done = False - for src_part, dest_part in zip(src_parts, dest_parts): - if src_part != dest_part: - break - n += 1 - - relative_path = "" - for i in range(n, len(src_parts)): - relative_path += "../" - relative_path += "/".join(dest_parts[n:]) - - return relative_path - -def _create_setup_cmd(lib, deps_dir, in_runfiles): - """ - Helper function to construct a command for symlinking a library into the - deps directory. - """ - lib_path = lib.short_path if in_runfiles else lib.path - return ( - "ln -sf " + _relative(deps_dir, lib_path) + " " + - deps_dir + "/" + lib.basename + "\n" - ) - -def _setup_deps(deps, name, working_dir, allow_cc_deps=False, - in_runfiles=False): - """ - Walks through dependencies and constructs the necessary commands for linking - to all the necessary dependencies. - - Args: - deps: List of Labels containing deps from ctx.attr.deps. - name: Name of the current target. - working_dir: The output directory for the current target's outputs. - allow_cc_deps: True if the current target is allowed to depend on cc_library - targets, false otherwise. - in_runfiles: True if the setup commands will be run in a .runfiles - directory. In this case, the working dir should be '.', and the deps - will be symlinked into the .deps dir from the runfiles tree. - - Returns: - Returns a struct containing the following fields: - libs: - transitive_libs: - setup_cmd: - search_flags: - link_flags: - """ - deps_dir = working_dir + "/" + name + ".deps" - setup_cmd = ["rm -rf " + deps_dir + "; mkdir " + deps_dir + "\n"] - - has_rlib = False - has_native = False - - libs = set() - transitive_libs = set() - symlinked_libs = set() - link_flags = [] - for dep in deps: - if hasattr(dep, "rust_lib"): - # This dependency is a rust_library - libs += [dep.rust_lib] - transitive_libs += [dep.rust_lib] + dep.transitive_libs - symlinked_libs += [dep.rust_lib] + dep.transitive_libs - link_flags += [( - "--extern " + dep.label.name + "=" + - deps_dir + "/" + dep.rust_lib.basename - )] - has_rlib = True - - elif hasattr(dep, "cc"): - if not allow_cc_deps: - fail("Only rust_library, rust_binary, and rust_test targets can " + - "depend on cc_library") - - # This dependency is a cc_library - native_libs = A_FILETYPE.filter(dep.cc.libs) - libs += native_libs - transitive_libs += native_libs - symlinked_libs += native_libs - link_flags += ["-l static=" + dep.label.name] - has_native = True - - else: - fail("rust_library, rust_binary and rust_test targets can only depend " + - "on rust_library or cc_library targets.") - - for symlinked_lib in symlinked_libs: - setup_cmd += [_create_setup_cmd(symlinked_lib, deps_dir, in_runfiles)] - - search_flags = [] - if has_rlib: - search_flags += ["-L dependency=%s" % deps_dir] - if has_native: - search_flags += ["-L native=%s" % deps_dir] - - return struct( - libs = list(libs), - transitive_libs = list(transitive_libs), - setup_cmd = setup_cmd, - search_flags = search_flags, - link_flags = link_flags) - -def _get_features_flags(features): - """ - Constructs a string containing the feature flags from the features specified - in the features attribute. - """ - features_flags = [] - for feature in features: - features_flags += ["--cfg feature=\\\"%s\\\"" % feature] - return features_flags - -def _get_dirname(short_path): - return short_path[0:short_path.rfind('/')] - -def _rust_toolchain(ctx): - return struct( - rustc_path = ctx.file._rustc.path, - rustc_lib_path = ctx.files._rustc_lib[0].dirname, - rustc_lib_short_path = _get_dirname(ctx.files._rustc_lib[0].short_path), - rust_lib_path = ctx.files._rust_lib[0].dirname, - rust_lib_short_path = _get_dirname(ctx.files._rust_lib[0].short_path), - rustdoc_path = ctx.file._rustdoc.path, - rustdoc_short_path = ctx.file._rustdoc.short_path) - -def _build_rustc_command(ctx, crate_name, crate_type, src, output_dir, - depinfo, rust_flags=[]): - """Builds the rustc command. - - Constructs the rustc command used to build the current target. - - Args: - ctx: The ctx object for the current target. - crate_type: The type of crate to build ("lib" or "bin") - src: The File object for crate root source file ("lib.rs" or "main.rs") - output_dir: The output directory for the target. - depinfo: Struct containing information about dependencies as returned by - _setup_deps - - Return: - String containing the rustc command. - """ - - # Paths to the Rust compiler and standard libraries. - toolchain = _rust_toolchain(ctx) - - # Paths to cc (for linker) and ar - cpp_fragment = ctx.fragments.cpp - cc = cpp_fragment.compiler_executable - ar = cpp_fragment.ar_executable - # Currently, the CROSSTOOL config for darwin sets ar to "libtool". Because - # rust uses ar-specific flags, use /usr/bin/ar in this case. - # TODO(dzc): This is not ideal. Remove this workaround once ar_executable - # always points to an ar binary. - ar_str = "%s" % ar - if ar_str.find("libtool", 0) != -1: - ar = "/usr/bin/ar" - - # Construct features flags - features_flags = _get_features_flags(ctx.attr.crate_features) - - return " ".join( - ["set -e;"] + - depinfo.setup_cmd + - [ - "LD_LIBRARY_PATH=%s" % toolchain.rustc_lib_path, - "DYLD_LIBRARY_PATH=%s" % toolchain.rustc_lib_path, - toolchain.rustc_path, - src.path, - "--crate-name %s" % crate_name, - "--crate-type %s" % crate_type, - "-C opt-level=3", - "--codegen ar=%s" % ar, - "--codegen linker=%s" % cc, - "--codegen link-args='%s'" % ' '.join(cpp_fragment.link_options), - "-L all=%s" % toolchain.rust_lib_path, - "--out-dir %s" % output_dir, - "--emit=dep-info,link", - ] + - features_flags + - rust_flags + - depinfo.search_flags + - depinfo.link_flags + - ctx.attr.rustc_flags) - -def _find_crate_root_src(srcs, file_names=["lib.rs"]): - """Finds the source file for the crate root.""" - if len(srcs) == 1: - return srcs[0] - for src in srcs: - if src.basename in file_names: - return src - fail("No %s source file found." % " or ".join(file_names), "srcs") - -def _crate_root_src(ctx, file_names=["lib.rs"]): - if ctx.file.crate_root == None: - return _find_crate_root_src(ctx.files.srcs, file_names) - else: - return ctx.file.crate_root - -def _rust_library_impl(ctx): - """ - Implementation for rust_library Skylark rule. - """ - - # Find lib.rs - lib_rs = _crate_root_src(ctx) - - # Validate crate_type - crate_type = "" - if ctx.attr.crate_type != "": - if ctx.attr.crate_type not in LIBRARY_CRATE_TYPES: - fail("Invalid crate_type for rust_library. Allowed crate types are: %s" - % " ".join(LIBRARY_CRATE_TYPES), "crate_type") - crate_type += ctx.attr.crate_type - else: - crate_type += "lib" - - # Output library - rust_lib = ctx.outputs.rust_lib - output_dir = rust_lib.dirname - - # Dependencies - depinfo = _setup_deps(ctx.attr.deps, - ctx.label.name, - output_dir, - allow_cc_deps=True) - - # Build rustc command - cmd = _build_rustc_command( - ctx = ctx, - crate_name = ctx.label.name, - crate_type = crate_type, - src = lib_rs, - output_dir = output_dir, - depinfo = depinfo) - - # Compile action. - compile_inputs = ( - ctx.files.srcs + - ctx.files.data + - depinfo.libs + - depinfo.transitive_libs + - [ctx.file._rustc] + - ctx.files._rustc_lib + - ctx.files._rust_lib + - ctx.files._crosstool) - - ctx.action( - inputs = compile_inputs, - outputs = [rust_lib], - mnemonic = 'Rustc', - command = cmd, - use_default_shell_env = True, - progress_message = ("Compiling Rust library %s (%d files)" - % (ctx.label.name, len(ctx.files.srcs)))) - - return struct( - files = set([rust_lib]), - crate_type = crate_type, - crate_root = lib_rs, - rust_srcs = ctx.files.srcs, - rust_deps = ctx.attr.deps, - transitive_libs = depinfo.transitive_libs, - rust_lib = rust_lib) - -def _rust_binary_impl(ctx): - """Implementation for rust_binary Skylark rule.""" - - # Find main.rs. - main_rs = _crate_root_src(ctx, ["main.rs"]) - - # Output binary - rust_binary = ctx.outputs.executable - output_dir = rust_binary.dirname - - # Dependencies - depinfo = _setup_deps(ctx.attr.deps, - ctx.label.name, - output_dir, - allow_cc_deps=False) - - # Build rustc command. - cmd = _build_rustc_command(ctx = ctx, - crate_name = ctx.label.name, - crate_type = "bin", - src = main_rs, - output_dir = output_dir, - depinfo = depinfo) - - # Compile action. - compile_inputs = ( - ctx.files.srcs + - ctx.files.data + - depinfo.libs + - depinfo.transitive_libs + - [ctx.file._rustc] + - ctx.files._rustc_lib + - ctx.files._rust_lib + - ctx.files._crosstool) - - ctx.action( - inputs = compile_inputs, - outputs = [rust_binary], - mnemonic = "Rustc", - command = cmd, - use_default_shell_env = True, - progress_message = ("Compiling Rust binary %s (%d files)" - % (ctx.label.name, len(ctx.files.srcs)))) - - return struct(rust_srcs = ctx.files.srcs, - crate_root = main_rs, - rust_deps = ctx.attr.deps) - -def _rust_test_common(ctx, test_binary): - """Builds a Rust test binary. - - Args: - ctx: The ctx object for the current target. - test_binary: The File object for the test binary. - """ - output_dir = test_binary.dirname - - if len(ctx.attr.deps) == 1 and len(ctx.files.srcs) == 0: - # Target has a single dependency but no srcs. Build the test binary using - # the dependency's srcs. - dep = ctx.attr.deps[0] - crate_type = dep.crate_type if hasattr(dep, "crate_type") else "bin" - target = struct(name = ctx.label.name, - srcs = dep.rust_srcs, - deps = dep.rust_deps, - crate_root = dep.crate_root, - crate_type = crate_type) - else: - # Target is a standalone crate. Build the test binary as its own crate. - target = struct(name = ctx.label.name, - srcs = ctx.files.srcs, - deps = ctx.attr.deps, - crate_root = _crate_root_src(ctx), - crate_type = "lib") - - # Get information about dependencies - depinfo = _setup_deps(target.deps, - target.name, - output_dir, - allow_cc_deps=True) - - cmd = _build_rustc_command(ctx = ctx, - crate_name = test_binary.basename, - crate_type = target.crate_type, - src = target.crate_root, - output_dir = output_dir, - depinfo = depinfo, - rust_flags = ["--test"]) - - compile_inputs = (target.srcs + - depinfo.libs + - depinfo.transitive_libs + - [ctx.file._rustc] + - ctx.files._rustc_lib + - ctx.files._rust_lib + - ctx.files._crosstool) - - ctx.action( - inputs = compile_inputs, - outputs = [test_binary], - mnemonic = "RustcTest", - command = cmd, - use_default_shell_env = True, - progress_message = ("Compiling Rust test %s (%d files)" - % (ctx.label.name, len(target.srcs)))) - -def _rust_test_impl(ctx): - """ - Implementation for rust_test Skylark rule. - """ - _rust_test_common(ctx, ctx.outputs.executable) - -def _rust_bench_test_impl(ctx): - """Implementation for the rust_bench_test Skylark rule.""" - rust_bench_test = ctx.outputs.executable - test_binary = ctx.new_file(ctx.configuration.bin_dir, - "%s_bin" % rust_bench_test.basename) - _rust_test_common(ctx, test_binary) - - ctx.file_action( - output = rust_bench_test, - content = " ".join([ - "#!/bin/bash\n", - "set -e\n", - "%s --bench\n" % test_binary.short_path]), - executable = True) - - runfiles = ctx.runfiles(files = [test_binary], collect_data = True) - return struct(runfiles = runfiles) - -def _build_rustdoc_flags(ctx): - """Collects the rustdoc flags.""" - doc_flags = [] - doc_flags += [ - "--markdown-css %s" % css.path for css in ctx.files.markdown_css] - if hasattr(ctx.file, "html_in_header"): - doc_flags += ["--html-in-header %s" % ctx.file.html_in_header.path] - if hasattr(ctx.file, "html_before_content"): - doc_flags += ["--html-before-content %s" % - ctx.file.html_before_content.path] - if hasattr(ctx.file, "html_after_content"): - doc_flags += ["--html-after-content %s"] - return doc_flags - -def _rust_doc_impl(ctx): - """Implementation of the rust_doc rule.""" - rust_doc_zip = ctx.outputs.rust_doc_zip - - # Gather attributes about the rust_library target to generated rustdocs for. - target = struct(name = ctx.label.name, - srcs = ctx.attr.dep.rust_srcs, - deps = ctx.attr.dep.rust_deps, - crate_root = ctx.attr.dep.crate_root) - - # Find lib.rs - lib_rs = (_find_crate_root_src(target.srcs, ["lib.rs", "main.rs"]) - if target.crate_root == None else target.crate_root) - - # Get information about dependencies - output_dir = rust_doc_zip.dirname - depinfo = _setup_deps(target.deps, - target.name, - output_dir, - allow_cc_deps=False) - - # Rustdoc flags. - doc_flags = _build_rustdoc_flags(ctx) - - # Build rustdoc command. - toolchain = _rust_toolchain(ctx) - docs_dir = rust_doc_zip.dirname + "/_rust_docs" - doc_cmd = " ".join( - ["set -e;"] + - depinfo.setup_cmd + [ - "rm -rf %s;" % docs_dir, - "mkdir %s;" % docs_dir, - "LD_LIBRARY_PATH=%s" % toolchain.rustc_lib_path, - "DYLD_LIBRARY_PATH=%s" % toolchain.rustc_lib_path, - toolchain.rustdoc_path, - lib_rs.path, - "--crate-name %s" % target.name, - "-L all=%s" % toolchain.rust_lib_path, - "-o %s" % docs_dir, - ] + - doc_flags + - depinfo.search_flags + - depinfo.link_flags + [ - "&&", - "(cd %s" % docs_dir, - "&&", - ZIP_PATH, - "-qR", - rust_doc_zip.basename, - "$(find . -type f) )", - "&&", - "mv %s/%s %s" % (docs_dir, rust_doc_zip.basename, rust_doc_zip.path), - ]) - - # Rustdoc action - rustdoc_inputs = (target.srcs + - depinfo.libs + - [ctx.file._rustdoc] + - ctx.files._rustc_lib + - ctx.files._rust_lib) - - ctx.action( - inputs = rustdoc_inputs, - outputs = [rust_doc_zip], - mnemonic = "Rustdoc", - command = doc_cmd, - use_default_shell_env = True, - progress_message = ("Generating rustdoc for %s (%d files)" - % (target.name, len(target.srcs)))) - -def _rust_doc_test_impl(ctx): - """Implementation for the rust_doc_test rule.""" - rust_doc_test = ctx.outputs.executable - - # Gather attributes about the rust_library target to generated rustdocs for. - target = struct(name = ctx.label.name, - srcs = ctx.attr.dep.rust_srcs, - deps = ctx.attr.dep.rust_deps, - crate_root = ctx.attr.dep.crate_root) - - # Find lib.rs - lib_rs = (_find_crate_root_src(target.srcs, ["lib.rs", "main.rs"]) - if target.crate_root == None else target.crate_root) - - # Get information about dependencies - output_dir = rust_doc_test.dirname - depinfo = _setup_deps(target.deps, - target.name, - working_dir=".", - allow_cc_deps=False, - in_runfiles=True) - - # Construct rustdoc test command, which will be written to a shell script - # to be executed to run the test. - toolchain = _rust_toolchain(ctx) - doc_test_cmd = " ".join( - ["#!/bin/bash\n"] + - ["set -e\n"] + - depinfo.setup_cmd + - [ - "LD_LIBRARY_PATH=%s" % toolchain.rustc_lib_short_path, - "DYLD_LIBRARY_PATH=%s" % toolchain.rustc_lib_short_path, - toolchain.rustdoc_short_path, - "-L all=%s" % toolchain.rust_lib_short_path, - lib_rs.path, - ] + - depinfo.search_flags + - depinfo.link_flags) - - ctx.file_action(output = rust_doc_test, - content = doc_test_cmd, - executable = True) - - doc_test_inputs = (target.srcs + - depinfo.libs + - depinfo.transitive_libs + - [ctx.file._rustdoc] + - ctx.files._rustc_lib + - ctx.files._rust_lib) - - runfiles = ctx.runfiles(files = doc_test_inputs, collect_data = True) - return struct(runfiles = runfiles) - -_rust_common_attrs = { - "srcs": attr.label_list(allow_files = RUST_FILETYPE), - "crate_root": attr.label( - allow_files = RUST_FILETYPE, - single_file = True, - ), - "data": attr.label_list( - allow_files = True, - cfg = "data", - ), - "deps": attr.label_list(), - "crate_features": attr.string_list(), - "rustc_flags": attr.string_list(), -} - -_rust_toolchain_attrs = { - "_rustc": attr.label( - default = Label("//rust:rustc"), - executable = True, - cfg = "host", - single_file = True, - ), - "_rustc_lib": attr.label( - default = Label("//rust:rustc_lib"), - ), - "_rust_lib": attr.label( - default = Label("//rust:rust_lib"), - ), - "_rustdoc": attr.label( - default = Label("//rust:rustdoc"), - executable = True, - cfg = "host", - single_file = True, - ), - "_crosstool": attr.label( - default = Label("//tools/defaults:crosstool") - ), -} - -_rust_library_attrs = { - "crate_type": attr.string(), -} - -rust_library = rule( - _rust_library_impl, - attrs = dict(_rust_common_attrs.items() + - _rust_library_attrs.items() + - _rust_toolchain_attrs.items()), - fragments = ["cpp"], - outputs = { - "rust_lib": "lib%{name}.rlib", - }, -) -"""Builds a Rust library crate. - -Args: - name: This name will also be used as the name of the library crate built by - this rule. - srcs: List of Rust `.rs` source files used to build the library. - - If `srcs` contains more than one file, then there must be a file either - named `lib.rs`. Otherwise, `crate_root` must be set to the source file that - is the root of the crate to be passed to rustc to build this crate. - crate_root: The file that will be passed to `rustc` to be used for building - this crate. - - If `crate_root` is not set, then this rule will look for a `lib.rs` file or - the single file in `srcs` if `srcs` contains only one file. - deps: List of other libraries to be linked to this library target. - - These can be either other `rust_library` targets or `cc_library` targets if - linking a native library. - data: List of files used by this rule at runtime. - - This attribute can be used to specify any data files that are embedded into - the library, such as via the - [`include_str!`](https://doc.rust-lang.org/std/macro.include_str!.html) - macro. - crate_features: List of features to enable for this crate. - - Features are defined in the code using the `#[cfg(feature = "foo")]` - configuration option. The features listed here will be passed to `rustc` - with `--cfg feature="${feature_name}"` flags. - rustc_flags: List of compiler flags passed to `rustc`. - -Example: - Suppose you have the following directory structure for a simple Rust library - crate: - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - greeter.rs - lib.rs - ``` - - `hello_lib/src/greeter.rs`: - - ```rust - pub struct Greeter { - greeting: String, - } - - impl Greeter { - pub fn new(greeting: &str) -> Greeter { - Greeter { greeting: greeting.to_string(), } - } - - pub fn greet(&self, thing: &str) { - println!("{} {}", &self.greeting, thing); - } - } - ``` - - `hello_lib/src/lib.rs`: - - - ```rust - pub mod greeter; - ``` - - `hello_lib/BUILD`: - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library") - - rust_library( - name = "hello_lib", - srcs = [ - "src/greeter.rs", - "src/lib.rs", - ], - ) - ``` - - Build the library: - - ``` - $ bazel build //hello_lib - INFO: Found 1 target... - Target //examples/rust/hello_lib:hello_lib up-to-date: - bazel-bin/examples/rust/hello_lib/libhello_lib.rlib - INFO: Elapsed time: 1.245s, Critical Path: 1.01s - ``` -""" - -rust_binary = rule( - _rust_binary_impl, - attrs = dict(_rust_common_attrs.items() + _rust_toolchain_attrs.items()), - executable = True, - fragments = ["cpp"], -) -"""Builds a Rust binary crate. - -Args: - name: This name will also be used as the name of the binary crate built by - this rule. - srcs: List of Rust `.rs` source files used to build the library. - - If `srcs` contains more than one file, then there must be a file either - named `main.rs`. Otherwise, `crate_root` must be set to the source file that - is the root of the crate to be passed to rustc to build this crate. - crate_root: The file that will be passed to `rustc` to be used for building - this crate. - - If `crate_root` is not set, then this rule will look for a `bin.rs` file or - the single file in `srcs` if `srcs` contains only one file. - deps: List of other libraries to be linked to this library target. - - These must be `rust_library` targets. - data: List of files used by this rule at runtime. - - This attribute can be used to specify any data files that are embedded into - the library, such as via the - [`include_str!`](https://doc.rust-lang.org/std/macro.include_str!.html) - macro. - crate_features: List of features to enable for this crate. - - Features are defined in the code using the `#[cfg(feature = "foo")]` - configuration option. The features listed here will be passed to `rustc` - with `--cfg feature="${feature_name}"` flags. - rustc_flags: List of compiler flags passed to `rustc`. - -Example: - Suppose you have the following directory structure for a Rust project with a - library crate, `hello_lib`, and a binary crate, `hello_world` that uses the - `hello_lib` library: - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - lib.rs - hello_world/ - BUILD - src/ - main.rs - ``` - - `hello_lib/src/lib.rs`: - - ```rust - pub struct Greeter { - greeting: String, - } - - impl Greeter { - pub fn new(greeting: &str) -> Greeter { - Greeter { greeting: greeting.to_string(), } - } - - pub fn greet(&self, thing: &str) { - println!("{} {}", &self.greeting, thing); - } - } - ``` - - `hello_lib/BUILD`: - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library") - - rust_library( - name = "hello_lib", - srcs = ["src/lib.rs"], - ) - ``` - - `hello_world/src/main.rs`: - - ```rust - extern crate hello_lib; - - fn main() { - let hello = hello_lib::Greeter::new("Hello"); - hello.greet("world"); - } - ``` - - `hello_world/BUILD`: - - ```python - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_binary") - - rust_binary( - name = "hello_world", - srcs = ["src/main.rs"], - deps = ["//hello_lib"], - ) - ``` - - Build and run `hello_world`: - - ``` - $ bazel run //hello_world - INFO: Found 1 target... - Target //examples/rust/hello_world:hello_world up-to-date: - bazel-bin/examples/rust/hello_world/hello_world - INFO: Elapsed time: 1.308s, Critical Path: 1.22s - - INFO: Running command line: bazel-bin/examples/rust/hello_world/hello_world - Hello world - ``` -""" - -rust_test = rule( - _rust_test_impl, - attrs = dict(_rust_common_attrs.items() + _rust_toolchain_attrs.items()), - executable = True, - fragments = ["cpp"], - test = True, -) -"""Builds a Rust test crate. - -Args: - name: This name will also be used as the name of the binary crate built by - this rule. - srcs: List of Rust `.rs` source files used to build the test. - - If `srcs` contains more than one file, then there must be a file either - named `lib.rs`. Otherwise, `crate_root` must be set to the source file that - is the root of the crate to be passed to rustc to build this crate. - crate_root: The file that will be passed to `rustc` to be used for building - this crate. - - If `crate_root` is not set, then this rule will look for a `lib.rs` file or - the single file in `srcs` if `srcs` contains only one file. - deps: List of other libraries to be linked to this library target. - - These must be `rust_library` targets. - data: List of files used by this rule at runtime. - - This attribute can be used to specify any data files that are embedded into - the library, such as via the - [`include_str!`](https://doc.rust-lang.org/std/macro.include_str!.html) - macro. - crate_features: List of features to enable for this crate. - - Features are defined in the code using the `#[cfg(feature = "foo")]` - configuration option. The features listed here will be passed to `rustc` - with `--cfg feature="${feature_name}"` flags. - rustc_flags: List of compiler flags passed to `rustc`. - -Examples: - Suppose you have the following directory structure for a Rust library crate - with unit test code in the library sources: - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - lib.rs - ``` - - `hello_lib/src/lib.rs`: - - ```rust - pub struct Greeter { - greeting: String, - } - - impl Greeter { - pub fn new(greeting: &str) -> Greeter { - Greeter { greeting: greeting.to_string(), } - } - - pub fn greet(&self, thing: &str) { - println!("{} {}", &self.greeting, thing); - } - } - - #[cfg(test)] - mod test { - use super::Greeter; - - #[test] - fn test_greeting() { - let hello = Greeter::new("Hi"); - assert_eq!("Hi Rust", hello.greeting("Rust")); - } - } - ``` - - To build and run the tests, simply add a `rust_test` rule with no `srcs` and - only depends on the `hello_lib` `rust_library` target: - - `hello_lib/BUILD`: - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library", "rust_test") - - rust_library( - name = "hello_lib", - srcs = ["src/lib.rs"], - ) - - rust_test( - name = "hello_lib_test", - deps = [":hello_lib"], - ) - ``` - - Run the test with `bazel build //hello_lib:hello_lib_test`. - - ### Example: `test` directory - - Integration tests that live in the [`tests` directory][int-tests], they are - essentially built as separate crates. Suppose you have the following directory - structure where `greeting.rs` is an integration test for the `hello_lib` - library crate: - - [int-tests]: http://doc.rust-lang.org/book/testing.html#the-tests-directory - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - lib.rs - tests/ - greeting.rs - ``` - - `hello_lib/tests/greeting.rs`: - - ```rust - extern crate hello_lib; - - use hello_lib; - - #[test] - fn test_greeting() { - let hello = greeter::Greeter::new("Hello"); - assert_eq!("Hello world", hello.greeting("world")); - } - ``` - - To build the `greeting.rs` integration test, simply add a `rust_test` target - with `greeting.rs` in `srcs` and a dependency on the `hello_lib` target: - - `hello_lib/BUILD`: - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library", "rust_test") - - rust_library( - name = "hello_lib", - srcs = ["src/lib.rs"], - ) - - rust_test( - name = "greeting_test", - srcs = ["tests/greeting.rs"], - deps = [":hello_lib"], - ) - ``` - - Run the test with `bazel build //hello_lib:hello_lib_test`. -""" - -rust_bench_test = rule( - _rust_bench_test_impl, - attrs = dict(_rust_common_attrs.items() + _rust_toolchain_attrs.items()), - executable = True, - fragments = ["cpp"], - test = True, -) -"""Builds a Rust benchmark test. - -**Warning**: This rule is currently experimental. [Rust Benchmark -tests][rust-bench] require the `Bencher` interface in the unstable `libtest` -crate, which is behind the `test` unstable feature gate. As a result, using -this rule would require using a nightly binary release of Rust. A -`rust_toolchain` rule will be added in the [near future](#roadmap) to make it -easy to use a custom Rust toolchain, such as a nightly release. - -[rust-bench]: https://doc.rust-lang.org/book/benchmark-tests.html - -Args: - name: This name will also be used as the name of the binary crate built by - this rule. - srcs: List of Rust `.rs` source files used to build the test. - - If `srcs` contains more than one file, then there must be a file either - named `lib.rs`. Otherwise, `crate_root` must be set to the source file that - is the root of the crate to be passed to rustc to build this crate. - crate_root: The file that will be passed to `rustc` to be used for building - this crate. - - If `crate_root` is not set, then this rule will look for a `lib.rs` file or - the single file in `srcs` if `srcs` contains only one file. - deps: List of other libraries to be linked to this library target. - - These must be `rust_library` targets. - data: List of files used by this rule at runtime. - - This attribute can be used to specify any data files that are embedded into - the library, such as via the - [`include_str!`](https://doc.rust-lang.org/std/macro.include_str!.html) - macro. - crate_features: List of features to enable for this crate. - - Features are defined in the code using the `#[cfg(feature = "foo")]` - configuration option. The features listed here will be passed to `rustc` - with `--cfg feature="${feature_name}"` flags. - rustc_flags: List of compiler flags passed to `rustc`. - -Example: - Suppose you have the following directory structure for a Rust project with a - library crate, `fibonacci` with benchmarks under the `benches/` directory: - - ``` - [workspace]/ - WORKSPACE - fibonacci/ - BUILD - src/ - lib.rs - benches/ - fibonacci_bench.rs - ``` - - `fibonacci/src/lib.rs`: - - ```rust - pub fn fibonacci(n: u64) -> u64 { - if n < 2 { - return n; - } - let mut n1: u64 = 0; - let mut n2: u64 = 1; - for _ in 1..n { - let sum = n1 + n2; - n1 = n2; - n2 = sum; - } - n2 - } - ``` - - `fibonacci/benches/fibonacci_bench.rs`: - - ```rust - #![feature(test)] - - extern crate test; - extern crate fibonacci; - - use test::Bencher; - - #[bench] - fn bench_fibonacci(b: &mut Bencher) { - b.iter(|| fibonacci::fibonacci(40)); - } - ``` - - To build the benchmark test, simply add a `rust_bench_test` target: - - `fibonacci/BUILD`: - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library", "rust_bench_test") - - rust_library( - name = "fibonacci", - srcs = ["src/lib.rs"], - ) - - rust_bench_test( - name = "fibonacci_bench", - srcs = ["benches/fibonacci_bench.rs"], - deps = [":fibonacci"], - ) - ``` - - Run the benchmark test using: `bazel build //fibonacci:fibonacci_bench`. -""" - -_rust_doc_common_attrs = { - "dep": attr.label(mandatory = True), -} - -_rust_doc_attrs = { - "markdown_css": attr.label_list(allow_files = CSS_FILETYPE), - "html_in_header": attr.label(allow_files = HTML_MD_FILETYPE), - "html_before_content": attr.label(allow_files = HTML_MD_FILETYPE), - "html_after_content": attr.label(allow_files = HTML_MD_FILETYPE), -} - -rust_doc = rule( - _rust_doc_impl, - attrs = dict(_rust_doc_common_attrs.items() + - _rust_doc_attrs.items() + - _rust_toolchain_attrs.items()), - outputs = { - "rust_doc_zip": "%{name}-docs.zip", - }, -) -"""Generates code documentation. - -Args: - name: A unique name for this rule. - dep: The label of the target to generate code documentation for. - - `rust_doc` can generate HTML code documentation for the source files of - `rust_library` or `rust_binary` targets. - markdown_css: CSS files to include via `` in a rendered - Markdown file. - html_in_header: File to add to ``. - html_before_content: File to add in ``, before content. - html_after_content: File to add in ``, after content. - -Example: - Suppose you have the following directory structure for a Rust library crate: - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - lib.rs - ``` - - To build [`rustdoc`][rustdoc] documentation for the `hello_lib` crate, define - a `rust_doc` rule that depends on the the `hello_lib` `rust_library` target: - - [rustdoc]: https://doc.rust-lang.org/book/documentation.html - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library", "rust_doc") - - rust_library( - name = "hello_lib", - srcs = ["src/lib.rs"], - ) - - rust_doc( - name = "hello_lib_doc", - dep = ":hello_lib", - ) - ``` - - Running `bazel build //hello_lib:hello_lib_doc` will build a zip file containing - the documentation for the `hello_lib` library crate generated by `rustdoc`. -""" - -rust_doc_test = rule( - _rust_doc_test_impl, - attrs = dict(_rust_doc_common_attrs.items() + - _rust_toolchain_attrs.items()), - executable = True, - test = True, -) -"""Runs Rust documentation tests. - -Args: - name: A unique name for this rule. - dep: The label of the target to run documentation tests for. - - `rust_doc_test` can run documentation tests for the source files of - `rust_library` or `rust_binary` targets. - -Example: - Suppose you have the following directory structure for a Rust library crate: - - ``` - [workspace]/ - WORKSPACE - hello_lib/ - BUILD - src/ - lib.rs - ``` - - To run [documentation tests][doc-test] for the `hello_lib` crate, define a - `rust_doc_test` target that depends on the `hello_lib` `rust_library` target: - - [doc-test]: https://doc.rust-lang.org/book/documentation.html#documentation-as-tests - - ```python - package(default_visibility = ["//visibility:public"]) - - load("@io_bazel_rules_rust//rust:rust.bzl", "rust_library", "rust_doc_test") - - rust_library( - name = "hello_lib", - srcs = ["src/lib.rs"], - ) - - rust_doc_test( - name = "hello_lib_doc_test", - dep = ":hello_lib", - ) - ``` - - Running `bazel test //hello_lib:hello_lib_doc_test` will run all documentation - tests for the `hello_lib` library crate. -""" diff --git a/starlark/src/syntax/testcases/self_extract_binary.bzl b/starlark/src/syntax/testcases/self_extract_binary.bzl deleted file mode 100644 index 75f19279..00000000 --- a/starlark/src/syntax/testcases/self_extract_binary.bzl +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Self-extracting binary. - -Generate a binary suitable for self-extraction: - -self_extract_binary( - name = "install.sh", - launcher = "launcher.sh", - resources = ["path1/file1", "path2/file2"], - flatten_ressources = ["path3/file3"], -) - -will generate a file 'install.sh' with a header (launcher.sh) -and a ZIP footer with the following entries: - path1/ - path1/file1 - path2/ - path2/file2 - file3 - -""" - -def _self_extract_binary(ctx): - """Implementation for the self_extract_binary rule.""" - # This is a bit complex for stripping out timestamps - zip_artifact = ctx.new_file(ctx.label.name + ".zip") - touch_empty_files = [ - "mkdir -p $(dirname ${tmpdir}/%s); touch ${tmpdir}/%s" % (f, f) - for f in ctx.attr.empty_files - ] - cp_resources = [ - ("mkdir -p $(dirname ${tmpdir}/%s)\n" % r.short_path + - "cp %s ${tmpdir}/%s" % (r.path, r.short_path)) - for r in ctx.files.resources - ] - cp_flatten_resources = [ - "cp %s ${tmpdir}/%s" % (r.path, r.basename) - for r in ctx.files.flatten_resources - ] - ctx.action( - inputs = ctx.files.resources + ctx.files.flatten_resources, - outputs = [zip_artifact], - command = "\n".join([ - "tmpdir=$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)", - "trap \"rm -fr ${tmpdir}\" EXIT" - ] + touch_empty_files + cp_resources + cp_flatten_resources + [ - "find ${tmpdir} -exec touch -t 198001010000.00 '{}' ';'", - "(d=${PWD}; cd ${tmpdir}; zip -rq ${d}/%s *)" % zip_artifact.path, - ]), - mnemonic = "ZipBin", - ) - ctx.action( - inputs = [ctx.file.launcher, zip_artifact], - outputs = [ctx.outputs.executable], - command = "\n".join([ - "cat %s %s > %s" % (ctx.file.launcher.path, - zip_artifact.path, - ctx.outputs.executable.path), - "zip -qA %s" % ctx.outputs.executable.path - ]), - mnemonic = "BuildSelfExtractable", - ) - -self_extract_binary = rule( - _self_extract_binary, - attrs = { - "launcher": attr.label( - mandatory = True, - allow_files = True, - single_file = True, - ), - "empty_files": attr.string_list(default = []), - "resources": attr.label_list( - default = [], - allow_files = True, - ), - "flatten_resources": attr.label_list( - default = [], - allow_files = True, - ), - }, - executable = True, -) diff --git a/starlark/src/syntax/testcases/serialize.bzl b/starlark/src/syntax/testcases/serialize.bzl deleted file mode 100644 index 58fe1638..00000000 --- a/starlark/src/syntax/testcases/serialize.bzl +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Methods for serializing objects.""" - - -def dict_to_associative_list(dict_value): - """Serializes a dict to an associative list.""" - return ",".join(["%s=%s" % (k, dict_value[k]) for k in dict_value]) diff --git a/starlark/src/syntax/testcases/sets.bzl b/starlark/src/syntax/testcases/sets.bzl deleted file mode 100644 index edaf8275..00000000 --- a/starlark/src/syntax/testcases/sets.bzl +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylib module containing common set algorithms. - -CAUTION: Operating on sets, particularly sets contained in providers, may -asymptotically slow down the analysis phase. While constructing large sets with -addition/union is fast (there is no linear-time copy involved), the -`difference` function and various comparison predicates involve linear-time -traversals. - -For convenience, the functions in this module can take either sets or lists as -inputs; operations that take lists treat them as if they were sets (i.e., -duplicate elements are ignored). Functions that return new sets always return -them as the `set` type, regardless of the types of the inputs. -""" - - -def _precondition_only_sets_or_lists(*args): - """Verifies that all arguments are either sets or lists. - - The build will fail if any of the arguments is neither a set nor a list. - - Args: - *args: A list of values that must be sets or lists. - """ - for a in args: - t = type(a) - if t not in("depset", "list"): - fail("Expected arguments to be depset or list, but found type %s: %r" % - (t, a)) - - -def _is_equal(a, b): - """Returns whether two sets are equal. - - Args: - a: A depset or a list. - b: A depset or a list. - Returns: - True if `a` is equal to `b`, False otherwise. - """ - _precondition_only_sets_or_lists(a, b) - return sorted(depset(a)) == sorted(depset(b)) - - -def _is_subset(a, b): - """Returns whether `a` is a subset of `b`. - - Args: - a: A depset or a list. - b: A depset or a list. - - Returns: - True if `a` is a subset of `b`, False otherwise. - """ - _precondition_only_sets_or_lists(a, b) - for e in a: - if e not in b: - return False - return True - - -def _disjoint(a, b): - """Returns whether two sets are disjoint. - - Two sets are disjoint if they have no elements in common. - - Args: - a: A set or list. - b: A set or list. - - Returns: - True if `a` and `b` are disjoint, False otherwise. - """ - _precondition_only_sets_or_lists(a, b) - for e in a: - if e in b: - return False - return True - - -def _intersection(a, b): - """Returns the intersection of two sets. - - Args: - a: A set or list. - b: A set or list. - - Returns: - A set containing the elements that are in both `a` and `b`. - """ - _precondition_only_sets_or_lists(a, b) - return depset([e for e in a if e in b]) - - -def _union(*args): - """Returns the union of several sets. - - Args: - *args: An arbitrary number of sets or lists. - - Returns: - The set union of all sets or lists in `*args`. - """ - _precondition_only_sets_or_lists(*args) - r = depset() - for a in args: - r += a - return r - - -def _difference(a, b): - """Returns the elements in `a` that are not in `b`. - - Args: - a: A set or list. - b: A set or list. - - Returns: - A set containing the elements that are in `a` but not in `b`. - """ - _precondition_only_sets_or_lists(a, b) - return depset([e for e in a if e not in b]) - - -sets = struct( - difference = _difference, - disjoint = _disjoint, - intersection = _intersection, - is_equal = _is_equal, - is_subset = _is_subset, - union = _union, -) diff --git a/starlark/src/syntax/testcases/shared.bzl b/starlark/src/syntax/testcases/shared.bzl deleted file mode 100644 index f4c97377..00000000 --- a/starlark/src/syntax/testcases/shared.bzl +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common definitions for Apple rules.""" - -APPLE_SIMULATOR_ARCHITECTURES = ["i386", "x86_64"] -"""Architectures that are used by the simulator (iOS, tvOS and watchOS).""" - -IOS_DEVICE_ARCHITECTURES = ["armv7", "arm64"] -"""Architectures that are used by iOS devices.""" - -TVOS_DEVICE_ARCHITECTURES = ["arm64"] -"""Architectures that are used by tvOS devices.""" - -WATCHOS_DEVICE_ARCHITECTURES = ["armv7k"] -"""Architectures that are used by watchOS devices.""" - -APPLE_DEFAULT_ARCHITECTURES = (APPLE_SIMULATOR_ARCHITECTURES + - IOS_DEVICE_ARCHITECTURES + - WATCHOS_DEVICE_ARCHITECTURES) -"""Architectures commonly used for building/testing on simulators/devices.""" - -APPLE_FRAGMENTS = ["apple"] -"""Configuration fragments containing Apple specific information.""" - -DARWIN_EXECUTION_REQUIREMENTS = {"requires-darwin": ""} -"""Standard execution requirements to force building on Mac. - -See :func:`apple_action`.""" - -XCRUNWRAPPER_LABEL = "//external:xcrunwrapper" -"""The label for xcrunwrapper tool.""" - - -def label_scoped_path(ctx, path): - """Return the path scoped to target's label.""" - return ctx.label.name + "/" + path.lstrip("/") - - -def module_cache_path(ctx): - """Returns the Clang module cache path to use for this rule.""" - return ctx.genfiles_dir.path + "/_objc_module_cache" - - -def apple_action(ctx, **kw): - """Creates an action that only runs on MacOS/Darwin. - - Call it similar to how you would call ctx.action: - apple_action(ctx, outputs=[...], inputs=[...],...) - """ - execution_requirements = kw.get("execution_requirements", {}) - execution_requirements += DARWIN_EXECUTION_REQUIREMENTS - - no_sandbox = kw.pop("no_sandbox", False) - if no_sandbox: - execution_requirements["nosandbox"] = "1" - - kw["execution_requirements"] = execution_requirements - - ctx.action(**kw) - - -def xcrun_env(ctx): - """Returns the environment dictionary necessary to use xcrunwrapper.""" - platform = ctx.fragments.apple.single_arch_platform - - if hasattr(apple_common, "apple_host_system_env"): - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - return (apple_common.target_apple_env(xcode_config, platform) + - apple_common.apple_host_system_env(xcode_config)) - else: - return (ctx.fragments.apple.target_apple_env(platform) + - ctx.fragments.apple.apple_host_system_env()) - - -def xcrun_action(ctx, **kw): - """Creates an apple action that executes xcrunwrapper. - - args: - ctx: The context of the rule that owns this action. - - This method takes the same keyword arguments as ctx.action, however you don't - need to specify the executable. - """ - env = kw.get("env", {}) - kw["env"] = env + xcrun_env(ctx) - - apple_action(ctx, executable=ctx.executable._xcrunwrapper, **kw) diff --git a/starlark/src/syntax/testcases/shell.bzl b/starlark/src/syntax/testcases/shell.bzl deleted file mode 100644 index 4173fe7b..00000000 --- a/starlark/src/syntax/testcases/shell.bzl +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylib module containing shell utility functions.""" - - -def _array_literal(iterable): - """Creates a string from a sequence that can be used as a shell array. - - For example, `shell.array_literal(["a", "b", "c"])` would return the string - `("a" "b" "c")`, which can be used in a shell script wherever an array - literal is needed. - - Note that all elements in the array are quoted (using `shell.quote`) for - safety, even if they do not need to be. - - Args: - iterable: A sequence of elements. Elements that are not strings will be - converted to strings first, by calling `str()`. - Returns: - A string that represents the sequence as a shell array; that is, - parentheses containing the quoted elements. - """ - return "(" + " ".join([_quote(str(i)) for i in iterable]) + ")" - - -def _quote(s): - """Quotes the given string for use in a shell command. - - This function quotes the given string (in case it contains spaces or other - shell metacharacters.) - - Args: - s: The string to quote. - Returns: - A quoted version of the string that can be passed to a shell command. - """ - return "'" + s.replace("'", "'\\''") + "'" - - -shell = struct( - array_literal=_array_literal, - quote=_quote, -) diff --git a/starlark/src/syntax/testcases/single_output_test.bzl b/starlark/src/syntax/testcases/single_output_test.bzl deleted file mode 100644 index 3eb10b60..00000000 --- a/starlark/src/syntax/testcases/single_output_test.bzl +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2017 The Bazel Go Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def _impl(ctx): - ctx.actions.write( - output = ctx.outputs.executable, - content = "", - is_executable = True, - ) - -single_output_test = rule( - implementation = _impl, - attrs = { - "dep": attr.label(allow_single_file = True), - }, - test = True, -) -"""Checks that a dependency produces a single output file. - -This test works by setting `allow_single_file = True` on the `dep` attribute. -If `dep` provides zero or multiple files in its `files` provider, Bazel will -fail to build this rule during analysis. The actual test does nothing.] - -Args: - dep: a label for the rule to check. -""" diff --git a/starlark/src/syntax/testcases/site.bzl b/starlark/src/syntax/testcases/site.bzl deleted file mode 100644 index 913bfe35..00000000 --- a/starlark/src/syntax/testcases/site.bzl +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -_REDIRECTS_FILETYPE = [".redirects"] -_TAR_FILETYPE = [".tar"] - -def _gen_redirects_impl(ctx): - redirect_str = "" - redirects_map = ctx.attr.redirects - for path in redirects_map: - redirect_str += "%s\t%s\n" % (path, redirects_map[path]) - ctx.file_action(output=ctx.outputs.redirects, content=redirect_str) - -def _site_tar_impl(ctx): - ctx.action( - inputs = [ctx.file.src] + ( - [ctx.file.redirects_file] if ctx.file.redirects_file else []), - executable = ctx.executable.jekyll_tree, - arguments = [ - ctx.outputs.out.path, - ctx.file.src.path - ] + ([ctx.file.redirects_file.path] - if ctx.file.redirects_file - else []), - outputs = [ctx.outputs.out], - mnemonic = "SiteTar", - use_default_shell_env = True, - progress_message = "Generating site tarball.") - - -_gen_redirects = rule( - implementation = _gen_redirects_impl, - attrs = { - "redirects": attr.string_dict(mandatory=True, allow_empty=False), - }, - outputs = { - "redirects": "%{name}.redirects", - }, -) -"""Writes a tab-delimited file containing mapping of page path to redirect URL. - -Helper rule for `_site_tar`. Takes a string_dict containing a mapping of page -path to redirect URL as input and writes the map to a text file. Each line -represents a single redirect page, consists of the page path, a tab character, -and the redirect URL. - -Args: - redirects: String dict containing the redirect mapping. - -Outputs: - redirects: Text file containing the mapping. -""" - -_site_tar = rule( - implementation = _site_tar_impl, - attrs = { - "src": attr.label(mandatory=True, - allow_files=_TAR_FILETYPE, - single_file=True), - "redirects_file": attr.label(allow_files=_REDIRECTS_FILETYPE, - single_file=True), - "jekyll_tree": attr.label(default=Label("//:build-jekyll-tree"), - cfg="host", - executable=True), - }, - outputs = { - "out": "%{name}.tar", - }, -) -"""Generates redirects in the Jekyll tree archive. - -Args: - src: Label of the Jekyll tree archive. - redirects_file: File containing the mapping of page path to redirect URL as - generated by `_gen_redirects` - -Outputs: - out: Tar archive containing the Jekyll tree with the generated redirect pages. -""" - -def site_tar(name, src, redirects={}): - """Modifies the Jekyll tree, generating the specified redirect pages. - - Args: - name: A unique name for this rule. - src: The label of the Jekyll tree archive. - redirects: Dict mapping page path to redirect URL. - """ - _gen_redirects( - name = "%s_redirects" % name, - redirects = redirects, - ) - - _site_tar( - name = name, - src = src, - redirects_file = "%s_redirects" % name, - ) diff --git a/starlark/src/syntax/testcases/source.bzl b/starlark/src/syntax/testcases/source.bzl deleted file mode 100644 index c825631d..00000000 --- a/starlark/src/syntax/testcases/source.bzl +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# In Go, imports are always fully qualified with a URL, -# eg. github.com/user/project. Hence, a label //foo:bar from within a -# Bazel workspace must be referred to as -# "github.com/user/project/foo/bar". To make this work, each rule must -# know the repository's URL. This is achieved, by having all go rules -# depend on a globally unique target that has a "go_prefix" transitive -# info provider. - -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", - "GoSourceList", -) -load("@io_bazel_rules_go//go/private:rules/aspect.bzl", - "collect_src", -) - -def _go_source_impl(ctx): - """Implements the go_source() rule.""" - return [collect_src(ctx)] - -go_source = rule( - _go_source_impl, - attrs = { - "data": attr.label_list(allow_files = True, cfg = "data"), - "srcs": attr.label_list(allow_files = True), - "deps": attr.label_list(providers = [GoLibrary]), - "embed": attr.label_list(providers = [GoSourceList]), - "gc_goopts": attr.string_list(), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, -) -"""See go/core.rst#go_source for full documentation.""" diff --git a/starlark/src/syntax/testcases/stdlib.bzl b/starlark/src/syntax/testcases/stdlib.bzl deleted file mode 100644 index 51c395e3..00000000 --- a/starlark/src/syntax/testcases/stdlib.bzl +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2016 The Bazel Go Rules Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoStdLib", -) - -_STDLIB_BUILD = """ -load("@io_bazel_rules_go//go/private:rules/stdlib.bzl", "stdlib") - -stdlib( - name = "{name}", - goos = "{goos}", - goarch = "{goarch}", - race = {race}, - cgo = {cgo}, - visibility = ["//visibility:public"], -) -""" - -def _stdlib_impl(ctx): - src = ctx.actions.declare_directory("src") - pkg = ctx.actions.declare_directory("pkg") - root_file = ctx.actions.declare_file("ROOT") - goroot = root_file.path[:-(len(root_file.basename)+1)] - sdk = "" - for f in ctx.files._host_sdk: - prefix, found, extension = f.path.partition("bin/go") - if found: - sdk = prefix - if not sdk: - fail("Could not find go executable in go_sdk") - go = ctx.actions.declare_file("bin/go" + extension) - files = [root_file, go, pkg] - cpp = ctx.fragments.cpp - features = ctx.features - options = (cpp.compiler_options(features) + - cpp.unfiltered_compiler_options(features) + - cpp.link_options + - cpp.mostly_static_link_options(features, False)) - linker_path, _ = cpp.ld_executable.rsplit("/", 1) - ctx.actions.write(root_file, "") - cc_path = cpp.compiler_executable - if not cpp.compiler_executable.startswith("/"): - cc_path = "$(pwd)/" + cc_path - env = { - "GOROOT": "$(pwd)/{}".format(goroot), - "GOOS": ctx.attr.goos, - "GOARCH": ctx.attr.goarch, - "CGO_ENABLED": "1" if ctx.attr.cgo else "0", - "CC": cc_path, - "CXX": cc_path, - "COMPILER_PATH": linker_path - } - inputs = ctx.files._host_sdk + [root_file] - inputs.extend(ctx.files._host_tools) - install_args = [] - if ctx.attr.race: - install_args.append("-race") - install_args = " ".join(install_args) - - ctx.actions.run_shell( - inputs = inputs, - outputs = [go, src, pkg], - mnemonic = "GoStdlib", - command = " && ".join([ - "export " + " ".join(["{}={}".format(key, value) for key, value in env.items()]), - "mkdir -p {}".format(src.path), - "mkdir -p {}".format(pkg.path), - "cp {}/bin/{} {}".format(sdk, go.basename, go.path), - "cp -rf {}/src/* {}/".format(sdk, src.path), - "cp -rf {}/pkg/tool {}/".format(sdk, pkg.path), - "cp -rf {}/pkg/include {}/".format(sdk, pkg.path), - "{} install {} std".format(go.path, install_args), - "{} install {} runtime/cgo".format(go.path, install_args), - ]) - ) - return [ - DefaultInfo( - files = depset([root_file, go, src, pkg]), - ), - GoStdLib( - go = go, - root_file = root_file, - goos = ctx.attr.goos, - goarch = ctx.attr.goarch, - race = ctx.attr.race, - pure = not ctx.attr.cgo, - libs = [pkg], - headers = [pkg], - files = files, - cgo_tools = struct( - compiler_executable = cpp.compiler_executable, - ld_executable = cpp.ld_executable, - options = options, - c_options = cpp.c_options, - ), - ), - ] - -stdlib = rule( - _stdlib_impl, - attrs = { - "goos": attr.string(mandatory = True), - "goarch": attr.string(mandatory = True), - "race": attr.bool(mandatory = True), - "cgo": attr.bool(mandatory = True), - "_host_sdk": attr.label(allow_files = True, default="@go_sdk//:host_sdk"), - "_host_tools": attr.label(allow_files = True, cfg="host", default="@go_sdk//:host_tools"), - }, - fragments = ["cpp"], -) - -def _go_stdlib_impl(ctx): - ctx.file("BUILD.bazel", _STDLIB_BUILD.format( - name = ctx.name, - goos = ctx.attr.goos, - goarch = ctx.attr.goarch, - race = ctx.attr.race, - cgo = ctx.attr.cgo, - )) - -go_stdlib = repository_rule( - implementation = _go_stdlib_impl, - attrs = { - "goos": attr.string(mandatory = True), - "goarch": attr.string(mandatory = True), - "race": attr.bool(mandatory = True), - "cgo": attr.bool(mandatory = True), - }, -) -"""See /go/toolchains.rst#go-sdk for full documentation.""" diff --git a/starlark/src/syntax/testcases/structs.bzl b/starlark/src/syntax/testcases/structs.bzl deleted file mode 100644 index 78715cca..00000000 --- a/starlark/src/syntax/testcases/structs.bzl +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Skylib module containing functions that operate on structs.""" - - -def _to_dict(s): - """Converts a `struct` to a `dict`. - - Args: - s: A `struct`. - Returns: - A `dict` whose keys and values are the same as the fields in `s`. The - transformation is only applied to the struct's fields and not to any - nested values. - """ - attributes = dir(s) - attributes.remove("to_json") - attributes.remove("to_proto") - return {key: getattr(s, key) for key in attributes} - - -structs = struct( - to_dict=_to_dict, -) diff --git a/starlark/src/syntax/testcases/templates.bzl b/starlark/src/syntax/testcases/templates.bzl deleted file mode 100644 index 8182fbd4..00000000 --- a/starlark/src/syntax/testcases/templates.bzl +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Rules for templating / files layout - -def _expand_template_impl(ctx): - """Simply spawn the template-engine in a rule.""" - variables = [ - "--variable=%s=%s" % (k, ctx.attr.substitutions[k]) - for k in ctx.attr.substitutions - ] - if not ctx.attr.escape_xml: - variables += ["--noescape_xml"] - d = {str(ctx.attr.deps[i].label): ctx.files.deps[i].path - for i in range(0, len(ctx.attr.deps))} - imports = ["--imports=%s=%s" % (k, d[k]) for k in d] - imports += ["--imports=%s=%s" % (k, d[str(ctx.label.relative(ctx.attr.deps_aliases[k]))]) - for k in ctx.attr.deps_aliases] - ctx.action( - executable = ctx.executable._engine, - arguments = [ - "--executable" if ctx.attr.executable else "--noexecutable", - "--template=%s" % ctx.file.template.path, - "--output=%s" % ctx.outputs.out.path, - ] + variables + imports, - inputs = [ctx.file.template] + ctx.files.deps, - outputs = [ctx.outputs.out], - ) - -expand_template = rule( - attrs = { - "template": attr.label( - mandatory = True, - allow_files = True, - single_file = True, - ), - "deps": attr.label_list(default = [], allow_files = True), - "deps_aliases": attr.string_dict(default = {}), - "substitutions": attr.string_dict(mandatory = True), - "out": attr.output(mandatory = True), - "executable": attr.bool(default = True), - "escape_xml": attr.bool(default = True), - "_engine": attr.label( - default = Label("//templating:template_engine"), - executable = True, - cfg="host"), - }, - implementation = _expand_template_impl, -) -"""Expand a jinja2 template file. - -This rules expands the file given in template, into the file given by out. - -Args: - template: The template file to expand. - deps: additional files to expand, they will be accessible as imports[label] - in the template environment. If a file ends with .tpl, it is considered - a template itself and will be expanded. - deps_aliases: a dictionary of name to label. Each label in that dictionary - should be present in the deps attribute, and will be make accessible as - imports[name] in the template environment. - substitutions: a dictionary of key => values that will appear as variables.key - in the template environment. - out: the name of the output file to generate. - executable: mark the result as excutable if set to True. -""" - -def strip_prefix(path, prefixes): - for prefix in prefixes: - if path.startswith(prefix): - return path[len(prefix):] - return path - -def strip_suffix(path, suffixes): - for suffix in suffixes: - if path.endswith(suffix): - return path[:-len(suffix)] - return path - -def _dest_path(f, strip_prefixes, strip_suffixes): - """Returns the short path of f, stripped of strip_prefixes and strip_suffixes.""" - return strip_suffix(strip_prefix(f.short_path, strip_prefixes), strip_suffixes) - -def _format_path(path_format, path): - dirsep = path.rfind("/") - dirname = path[:dirsep] if dirsep > 0 else "" - basename = path[dirsep+1:] if dirsep > 0 else path - extsep = basename.rfind(".") - extension = basename[extsep+1:] if extsep > 0 else "" - basename = basename[:extsep] if extsep > 0 else basename - return path_format.format( - path=path, - dirname=dirname, - basename=basename, - extension=extension - ) - -def _append_inputs(args, inputs, f, path, path_format): - args.append("--file=%s=%s" % ( - f.path, - _format_path(path_format, path) - )) - inputs.append(f) - -def _merge_files_impl(ctx): - """Merge a list of config files in a tar ball with the correct layout.""" - output = ctx.outputs.out - build_tar = ctx.executable._build_tar - inputs = [] - args = [ - "--output=" + output.path, - "--directory=" + ctx.attr.directory, - "--mode=0644", - ] - variables = [ - "--variable=%s=%s" % (k, ctx.attr.substitutions[k]) - for k in ctx.attr.substitutions - ] - for f in ctx.files.srcs: - path = _dest_path(f, ctx.attr.strip_prefixes, ctx.attr.strip_suffixes) - if path.endswith(ctx.attr.template_extension): - path = path[:-4] - f2 = ctx.new_file(ctx.label.name + "/" + path) - ctx.action( - executable = ctx.executable._engine, - arguments = [ - "--template=%s" % f.path, - "--output=%s" % f2.path, - "--noescape_xml", - ] + variables, - inputs = [f], - outputs = [f2], - ) - _append_inputs(args, inputs, f2, path, ctx.attr.path_format) - else: - _append_inputs(args, inputs, f, path, ctx.attr.path_format) - ctx.action( - executable = build_tar, - arguments = args, - inputs = inputs, - outputs = [output], - mnemonic="MergeFiles" - ) - -merge_files = rule( - attrs = { - "srcs": attr.label_list(allow_files=True), - "template_extension": attr.string(default=".tpl"), - "directory": attr.string(default="/"), - "strip_prefixes": attr.string_list(default=[]), - "strip_suffixes": attr.string_list(default=["-staging", "-test"]), - "substitutions": attr.string_dict(default={}), - "path_format": attr.string(default="{path}"), - "_build_tar": attr.label( - default=Label("@bazel_tools//tools/build_defs/pkg:build_tar"), - cfg="host", - executable=True, - allow_files=True), - "_engine": attr.label( - cfg="host", - default = Label("//templating:template_engine"), - executable = True), - }, - outputs = {"out": "%{name}.tar"}, - implementation = _merge_files_impl, -) -"""Merge a set of files in a single tarball. - -This rule merge a set of files into one tarball, each file will appear in the -tarball as a file determined by path_format, strip_prefixes and directory. - -Outputs: - .tar: the tarball containing all the files in srcs. - -Args: - srcs: The list of files to merge. If a file is ending with ".tpl" (see - template_extension), it will get expanded like a template passed to - expand_template. - template_extension: extension of files to be considered as template, ".tpl" - by default. - directory: base directory for all the files in the resulting tarball. - strip_prefixes: list of prefixes to strip from the path of the srcs to obtain - the final path (see path_format). - strip_suffixes: list of suffixes to strip from the path of the srcs to obtain - the final path (see path_format). - substitutions: map of substitutions to make available during template - expansion. Values of that map will be available as "variables.name" in - the template environment. - path_format: format of the final files. Each file will appear in the final - tarball under "{directory}/{path_format}" where the following string of - path_format are replaced: - {path}: path of the input file, removed from prefixes and suffixes, - {dirname}: directory name of path, - {basename}: base filename of path, - {extension}: extension of path -""" diff --git a/starlark/src/syntax/testcases/test.bzl b/starlark/src/syntax/testcases/test.bzl deleted file mode 100644 index da2075bf..00000000 --- a/starlark/src/syntax/testcases/test.bzl +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", - "go_filetype", - "go_importpath", - "split_srcs", - "pkg_dir", - "declare_file", -) -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", -) -load("@io_bazel_rules_go//go/private:rules/prefix.bzl", - "go_prefix_default", -) -load("@io_bazel_rules_go//go/private:rules/binary.bzl", "gc_linkopts") -load("@io_bazel_rules_go//go/private:providers.bzl", - "GoLibrary", - "sources", -) -load("@io_bazel_rules_go//go/private:actions/action.bzl", - "add_go_env", -) -load("@io_bazel_rules_go//go/private:rules/aspect.bzl", - "go_archive_aspect", - "get_archive", -) - -def _go_test_impl(ctx): - """go_test_impl implements go testing. - - It emits an action to run the test generator, and then compiles the - test into a binary.""" - - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - archive = get_archive(ctx.attr.library) - - # now generate the main function - if ctx.attr.rundir: - if ctx.attr.rundir.startswith("/"): - run_dir = ctx.attr.rundir - else: - run_dir = pkg_dir(ctx.label.workspace_root, ctx.attr.rundir) - else: - run_dir = pkg_dir(ctx.label.workspace_root, ctx.label.package) - - main_go = declare_file(ctx, "testmain.go") - arguments = ctx.actions.args() - add_go_env(arguments, stdlib, mode) - arguments.add([ - '--package', - archive.data.importpath, - '--rundir', - run_dir, - '--output', - main_go, - ]) - for var in archive.cover_vars: - arguments.add(["-cover", var]) - arguments.add(archive.go_srcs) - ctx.actions.run( - inputs = archive.go_srcs, - outputs = [main_go], - mnemonic = "GoTestGenTest", - executable = go_toolchain.tools.test_generator, - arguments = [arguments], - env = { - "RUNDIR" : ctx.label.package, - }, - ) - - # Now compile the test binary itself - _, goarchive, executable = go_toolchain.actions.binary(ctx, go_toolchain, - name = ctx.label.name, - source = sources.new( - srcs = [main_go], - deps = [ctx.attr.library], - runfiles = ctx.runfiles(collect_data = True), - want_coverage = False, - ), - importpath = ctx.label.name + "~testmain~", - gc_linkopts = gc_linkopts(ctx), - x_defs=ctx.attr.x_defs, - ) - - # TODO(bazel-team): the Go tests should do a chdir to the directory - # holding the data files, so open-source go tests continue to work - # without code changes. - runfiles = goarchive.runfiles.merge(ctx.runfiles(files = [executable])) - return [ - DefaultInfo( - files = depset([executable]), - runfiles = runfiles, - executable = executable, - ), -] - -go_test = rule( - _go_test_impl, - attrs = { - "data": attr.label_list( - allow_files = True, - cfg = "data", - ), - "srcs": attr.label_list(allow_files = go_filetype), - "deps": attr.label_list(providers = [GoLibrary], aspects = [go_archive_aspect]), - "importpath": attr.string(), - "library": attr.label(providers = [GoLibrary], aspects = [go_archive_aspect]), - "pure": attr.string(values=["on", "off", "auto"], default="auto"), - "static": attr.string(values=["on", "off", "auto"], default="auto"), - "race": attr.string(values=["on", "off", "auto"], default="auto"), - "msan": attr.string(values=["on", "off", "auto"], default="auto"), - "gc_goopts": attr.string_list(), - "gc_linkopts": attr.string_list(), - "linkstamp": attr.string(), - "rundir": attr.string(), - "x_defs": attr.string_dict(), - "_go_prefix": attr.label(default = go_prefix_default), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - executable = True, - test = True, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) -"""See go/core.rst#go_test for full documentation.""" diff --git a/starlark/src/syntax/testcases/test_defs.bzl b/starlark/src/syntax/testcases/test_defs.bzl deleted file mode 100644 index 8ee1992f..00000000 --- a/starlark/src/syntax/testcases/test_defs.bzl +++ /dev/null @@ -1,187 +0,0 @@ -"""Custom rule for creating IntelliJ plugin tests. -""" - -load( - "//build_defs:build_defs.bzl", - "api_version_txt", -) - -def _generate_test_suite_impl(ctx): - """Generates a JUnit4 test suite pulling in all the referenced classes. - - Args: - ctx: the rule context - """ - suite_class_name = ctx.label.name - lines = [] - lines.append("package %s;" % ctx.attr.test_package_root) - lines.append("") - test_srcs = _get_test_srcs(ctx.attr.srcs) - test_classes = [_get_test_class(test_src, ctx.attr.test_package_root) for test_src in test_srcs] - class_rules = ctx.attr.class_rules - if (class_rules): - lines.append("import org.junit.ClassRule;") - lines.append("import org.junit.runner.RunWith;") - lines.append("import org.junit.runners.Suite;") - lines.append("") - for test_class in test_classes: - lines.append("import %s;" % test_class) - lines.append("") - lines.append("@RunWith(Suite.class)") - lines.append("@Suite.SuiteClasses({") - for test_class in test_classes: - lines.append(" %s.class," % test_class.split(".")[-1]) - lines.append("})") - lines.append("public class %s {" % suite_class_name) - lines.append("") - - i = 1 - for class_rule in class_rules: - lines.append("@ClassRule") - lines.append("public static %s setupRule_%d = new %s();" % (class_rule, i, class_rule)) - i += 1 - - lines.append("}") - - contents = "\n".join(lines) - ctx.file_action( - output = ctx.outputs.out, - content = contents, - ) - -_generate_test_suite = rule( - implementation = _generate_test_suite_impl, - attrs = { - # srcs for the test classes included in the suite (only keep those ending in Test.java) - "srcs": attr.label_list(allow_files=True, mandatory=True), - # the package string of the output test suite. - "test_package_root": attr.string(mandatory=True), - # optional list of classes to instantiate as a @ClassRule in the test suite. - "class_rules": attr.string_list() - }, - outputs={"out": "%{name}.java"}, -) - -def intellij_unit_test_suite(name, srcs, test_package_root, **kwargs): - """Creates a java_test rule comprising all valid test classes in the specified srcs. - - Only classes ending in "Test.java" will be recognized. - - Args: - name: name of this rule. - srcs: the test classes. - test_package_root: only tests under this package root will be run. - **kwargs: Any other args to be passed to the java_test. - """ - suite_class_name = name + "TestSuite" - suite_class = test_package_root + "." + suite_class_name - _generate_test_suite( - name = suite_class_name, - srcs = srcs, - test_package_root = test_package_root, - ) - native.java_test( - name = name, - srcs = srcs + [suite_class_name], - test_class = suite_class, - **kwargs) - -def intellij_integration_test_suite( - name, - srcs, - test_package_root, - deps, - size="medium", - shard_count=None, - jvm_flags = [], - runtime_deps = [], - platform_prefix="Idea", - required_plugins=None, - **kwargs): - """Creates a java_test rule comprising all valid test classes in the specified srcs. - - Only classes ending in "Test.java" will be recognized. - - All test classes must be located in the blaze package calling this function. - - Args: - name: name of this rule. - srcs: the test classes. - test_package_root: only tests under this package root will be run. - deps: the required deps. - size: the test size. - shard_count: the number of shards to use. - jvm_flags: extra flags to be passed to the test vm. - runtime_deps: the required runtime_deps. - platform_prefix: Specifies the JetBrains product these tests are run against. Examples are - 'Idea' (IJ CE), 'idea' (IJ UE), 'CLion', 'AndroidStudio'. See - com.intellij.util.PlatformUtils for other options. - required_plugins: optional comma-separated list of plugin IDs. Integration tests will fail if - these plugins aren't loaded at runtime. - **kwargs: Any other args to be passed to the java_test. - """ - suite_class_name = name + "TestSuite" - suite_class = test_package_root + "." + suite_class_name - _generate_test_suite( - name = suite_class_name, - srcs = srcs, - test_package_root = test_package_root, - class_rules = ["com.google.idea.testing.BlazeTestSystemPropertiesRule"], - ) - - api_version_txt_name = name + "_api_version" - api_version_txt(name = api_version_txt_name) - data = kwargs.pop("data", []) - data.append(api_version_txt_name) - - deps = list(deps) - deps.extend([ - "//testing:lib", - ]) - runtime_deps = list(runtime_deps) - runtime_deps.extend([ - "//intellij_platform_sdk:bundled_plugins", - "//third_party:jpda-jdi", - ]) - - jvm_flags = list(jvm_flags) - jvm_flags.extend([ - "-Didea.classpath.index.enabled=false", - "-Djava.awt.headless=true", - "-Didea.platform.prefix=" + platform_prefix, - "-Dblaze.idea.api.version.file=$(location %s)" % api_version_txt_name - ]) - - if required_plugins: - jvm_flags.append("-Didea.required.plugins.id=" + required_plugins) - - native.java_test( - name = name, - size = size, - srcs = srcs + [suite_class_name], - data = data, - shard_count = shard_count, - jvm_flags = jvm_flags, - test_class = suite_class, - runtime_deps = runtime_deps, - deps = deps, - **kwargs - ) - -def _get_test_class(test_src, test_package_root): - """Returns the package string of the test class, beginning with the given root.""" - test_path = test_src.short_path - temp = test_path[:-len(".java")] - temp = temp.replace("/", ".") - i = temp.rfind(test_package_root) - if i < 0: - fail("Test source '%s' not under package root '%s'" % (test_path, test_package_root)) - test_class = temp[i:] - return test_class - -def _get_test_srcs(targets): - """Returns all files of the given targets that end with Test.java.""" - files = set() - for target in targets: - files += target.files - return [f for f in files if f.basename.endswith("Test.java")] diff --git a/starlark/src/syntax/testcases/test_rules.bzl b/starlark/src/syntax/testcases/test_rules.bzl deleted file mode 100644 index ef93d62d..00000000 --- a/starlark/src/syntax/testcases/test_rules.bzl +++ /dev/null @@ -1,294 +0,0 @@ -"""Utilities for testing bazel.""" -# -# Copyright 2015 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -### First, trivial tests that either always pass, always fail, -### or sometimes pass depending on a trivial computation. - -def success_target(ctx, msg): - """Return a success for an analysis test. - - The test rule must have an executable output. - - Args: - ctx: the Bazel rule context - msg: an informative message to display - - Returns: - a suitable rule implementation struct(), - with actions that always succeed at execution time. - """ - exe = ctx.outputs.executable - dat = ctx.new_file(ctx.genfiles_dir, exe, ".dat") - ctx.file_action( - output=dat, - content=msg) - ctx.file_action( - output=exe, - content="cat " + dat.path + " ; echo", - executable=True) - return struct(runfiles=ctx.runfiles([exe, dat])) - -def _successful_test_impl(ctx): - return success_target(ctx, ctx.attr.msg) - -successful_test = rule( - attrs = {"msg": attr.string(mandatory = True)}, - executable = True, - test = True, - implementation = _successful_test_impl, -) - -def failure_target(ctx, msg): - """Return a failure for an analysis test. - - The test rule must have an executable output. - - Args: - ctx: the Bazel rule context - msg: an informative message to display - - Returns: - a suitable rule implementation struct(), - with actions that always fail at execution time. - """ - ### fail(msg) ### <--- This would fail at analysis time. - exe = ctx.outputs.executable - dat = ctx.new_file(ctx.genfiles_dir, exe, ".dat") - ctx.file_action( - output=dat, - content=msg) - ctx.file_action( - output=exe, - content="(cat " + dat.short_path + " ; echo ) >&2 ; exit 1", - executable=True) - return struct(runfiles=ctx.runfiles([exe, dat])) - -def _failed_test_impl(ctx): - return failure_target(ctx, ctx.attr.msg) - -failed_test = rule( - attrs = {"msg": attr.string(mandatory = True)}, - executable = True, - test = True, - implementation = _failed_test_impl, -) - -### Second, general purpose utilities - -def assert_(condition, string="assertion failed", *args): - """Trivial assertion mechanism. - - Args: - condition: a generalized boolean expected to be true - string: a format string for the error message should the assertion fail - *args: format arguments for the error message should the assertion fail - - Returns: - None. - - Raises: - an error if the condition isn't true. - """ - - if not condition: - fail(string % args) - -def strip_prefix(prefix, string): - assert_(string.startswith(prefix), - "%s does not start with %s", string, prefix) - return string[len(prefix):len(string)] - -def expectation_description(expect=None, expect_failure=None): - """Turn expectation of result or error into a string.""" - if expect_failure: - return "failure " + str(expect_failure) - else: - return "result " + repr(expect) - -def check_results(result, failure, expect, expect_failure): - """See if actual computation results match expectations. - - Args: - result: the result returned by the test if it ran to completion - failure: the failure message caught while testing, if any - expect: the expected result for a successful test, if no failure expected - expect_failure: the expected failure message for the test, if any - - Returns: - a pair (tuple) of a boolean (true if success) and a message (string). - """ - wanted = expectation_description(expect, expect_failure) - found = expectation_description(result, failure) - if wanted == found: - return (True, "successfully computed " + wanted) - else: - return (False, "expect " + wanted + " but found " + found) - -def load_results(name, result=None, failure=None, - expect=None, expect_failure=None): - """issue load-time results of a test. - - Args: - name: the name of the Bazel rule at load time. - result: the result returned by the test if it ran to completion - failure: the failure message caught while testing, if any - expect: the expected result for a successful test, if no failure expected - expect_failure: the expected failure message for the test, if any - - Returns: - None, after issuing a rule that will succeed at execution time if - expectations were met. - """ - (is_success, msg) = check_results(result, failure, expect, expect_failure) - this_test = successful_test if is_success else failed_test - return this_test(name=name, msg=msg) - -def analysis_results(ctx, result=None, failure=None, - expect=None, expect_failure=None): - """issue analysis-time results of a test. - - Args: - ctx: the Bazel rule context - result: the result returned by the test if it ran to completion - failure: the failure message caught while testing, if any - expect: the expected result for a successful test, if no failure expected - expect_failure: the expected failure message for the test, if any - - Returns: - a suitable rule implementation struct(), - with actions that succeed at execution time if expectation were met, - or fail at execution time if they didn't. - """ - (is_success, msg) = check_results(result, failure, expect, expect_failure) - this_test = success_target if is_success else failure_target - return this_test(ctx, msg) - -### Simple tests - -def _rule_test_impl(ctx): - """check that a rule generates the desired outputs and providers.""" - rule_ = ctx.attr.rule - rule_name = str(rule_.label) - exe = ctx.outputs.executable - if ctx.attr.generates: - # Generate the proper prefix to remove from generated files. - prefix_parts = [] - - if rule_.label.workspace_root: - # Create a prefix that is correctly relative to the output of this rule. - prefix_parts = ["..", strip_prefix("external/", rule_.label.workspace_root)] - - if rule_.label.package: - prefix_parts.append(rule_.label.package) - - prefix = "/".join(prefix_parts) - - if prefix: - # If the prefix isn't empty, it needs a trailing slash. - prefix = prefix + "/" - - # TODO(bazel-team): Use set() instead of sorted() once - # set comparison is implemented. - # TODO(bazel-team): Use a better way to determine if two paths refer to - # the same file. - generates = sorted(ctx.attr.generates) - generated = sorted([strip_prefix(prefix, f.short_path) - for f in rule_.files]) - if generates != generated: - fail("rule %s generates %s not %s" - % (rule_name, repr(generated), repr(generates))) - provides = ctx.attr.provides - if provides: - files = [] - commands = [] - for k in provides.keys(): - if hasattr(rule_, k): - v = repr(getattr(rule_, k)) - else: - fail(("rule %s doesn't provide attribute %s. " - + "Its list of attributes is: %s") - % (rule_name, k, dir(rule_))) - file_ = ctx.new_file(ctx.genfiles_dir, exe, "." + k) - files += [file_] - regexp = provides[k] - commands += [ - "if ! grep %s %s ; then echo 'bad %s:' ; cat %s ; echo ; exit 1 ; fi" - % (repr(regexp), file_.short_path, k, file_.short_path)] - ctx.file_action(output=file_, content=v) - script = "\n".join(commands + ["true"]) - ctx.file_action(output=exe, content=script, executable=True) - return struct(runfiles=ctx.runfiles([exe] + files)) - else: - return success_target(ctx, "success") - -rule_test = rule( - attrs = { - "rule": attr.label(mandatory = True), - "generates": attr.string_list(), - "provides": attr.string_dict(), - }, - executable = True, - test = True, - implementation = _rule_test_impl, -) - -def _file_test_impl(ctx): - """check that a file has a given content.""" - exe = ctx.outputs.executable - file_ = ctx.file.file - content = ctx.attr.content - regexp = ctx.attr.regexp - matches = ctx.attr.matches - if bool(content) == bool(regexp): - fail("Must specify one and only one of content or regexp") - if content and matches != -1: - fail("matches only makes sense with regexp") - if content: - dat = ctx.new_file(ctx.genfiles_dir, exe, ".dat") - ctx.file_action( - output=dat, - content=content) - ctx.file_action( - output=exe, - content="diff -u %s %s" % (dat.short_path, file_.short_path), - executable=True) - return struct(runfiles=ctx.runfiles([exe, dat, file_])) - if matches != -1: - script = "[ %s == $(grep -c %s %s) ]" % ( - matches, repr(regexp), file_.short_path) - else: - script = "grep %s %s" % (repr(regexp), file_.short_path) - ctx.file_action( - output=exe, - content=script, - executable=True) - return struct(runfiles=ctx.runfiles([exe, file_])) - -file_test = rule( - attrs = { - "file": attr.label( - mandatory = True, - allow_files = True, - single_file = True, - ), - "content": attr.string(default = ""), - "regexp": attr.string(default = ""), - "matches": attr.int(default = -1), - }, - executable = True, - test = True, - implementation = _file_test_impl, -) diff --git a/starlark/src/syntax/testcases/tests.bzl b/starlark/src/syntax/testcases/tests.bzl deleted file mode 100644 index 594e5839..00000000 --- a/starlark/src/syntax/testcases/tests.bzl +++ /dev/null @@ -1,21 +0,0 @@ -load("//tools/bzl:junit.bzl", "junit_tests") - -def acceptance_tests( - group, - deps = [], - labels = [], - vm_args = ['-Xmx256m'], - **kwargs): - junit_tests( - name = group, - deps = deps + [ - '//gerrit-acceptance-tests:lib', - ], - tags = labels + [ - 'acceptance', - 'slow', - ], - size = "large", - jvm_flags = vm_args, - **kwargs - ) diff --git a/starlark/src/syntax/testcases/toolchain.bzl b/starlark/src/syntax/testcases/toolchain.bzl deleted file mode 100644 index 2e74f024..00000000 --- a/starlark/src/syntax/testcases/toolchain.bzl +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:common.bzl", "env_execute") - -def executable_extension(ctx): - extension = "" - if ctx.os.name.startswith('windows'): - extension = ".exe" - return extension - -def _go_host_sdk_impl(ctx): - path = _detect_host_sdk(ctx) - _sdk_build_file(ctx) - _local_sdk(ctx, path) - _prepare(ctx) - -go_host_sdk = repository_rule(_go_host_sdk_impl, environ = ["GOROOT"]) - -def _go_download_sdk_impl(ctx): - if ctx.os.name == 'linux': - res = ctx.execute(['uname', '-p']) - if res.return_code == 0 and res.stdout == 's390x': - host = "linux_s390x" - else: - # uname -p, -i, and -m can return wildly different values on different - # distributions and versions. Always default to amd64. - host = "linux_amd64" - elif ctx.os.name == 'mac os x': - host = "darwin_amd64" - elif ctx.os.name.startswith('windows'): - host = "windows_amd64" - else: - fail("Unsupported operating system: " + ctx.os.name) - sdks = ctx.attr.sdks - if host not in sdks: fail("Unsupported host {}".format(host)) - filename, sha256 = ctx.attr.sdks[host] - _sdk_build_file(ctx) - _remote_sdk(ctx, [url.format(filename) for url in ctx.attr.urls], ctx.attr.strip_prefix, sha256) - _prepare(ctx) - -go_download_sdk = repository_rule(_go_download_sdk_impl, - attrs = { - "sdks": attr.string_list_dict(), - "urls": attr.string_list(default=["https://storage.googleapis.com/golang/{}"]), - "strip_prefix": attr.string(default="go"), - }, -) - -def _go_local_sdk_impl(ctx): - _sdk_build_file(ctx) - _local_sdk(ctx, ctx.attr.path) - _prepare(ctx) - -go_local_sdk = repository_rule(_go_local_sdk_impl, - attrs = { - "path": attr.string(), - }, -) - -def _go_sdk_impl(ctx): - urls = ctx.attr.urls - if ctx.attr.url: - print("DEPRECATED: use urls instead of url on go_sdk, {}".format(ctx.attr.url)) - urls = [ctx.attr.url] + urls - if urls: - if ctx.attr.path: - fail("url and path cannot both be set on go_sdk, got {} and {}".format(urls, ctx.attr.path)) - _sdk_build_file(ctx) - _remote_sdk(ctx, urls, ctx.attr.strip_prefix, ctx.attr.sha256) - elif ctx.attr.path: - print("DEPRECATED: go_sdk with a path, please use go_local_sdk") - _sdk_build_file(ctx) - _local_sdk(ctx, ctx.attr.path) - else: - print("DEPRECATED: go_sdk without path or urls, please use go_host_sdk") - path = _detect_host_sdk(ctx) - _sdk_build_file(ctx) - _local_sdk(ctx, path) - _prepare(ctx) - - -def _prepare(ctx): - # Create a text file with a list of standard packages. - # OPT: just list directories under src instead of running "go list". No - # need to read all source files. We need a portable way to run code though. - result = env_execute(ctx, - arguments = ["bin/go", "list", "..."], - environment = {"GOROOT": str(ctx.path("."))}, - ) - if result.return_code != 0: - print(result.stderr) - fail("failed to list standard packages") - ctx.file("packages.txt", result.stdout) - -go_sdk = repository_rule( - implementation = _go_sdk_impl, - attrs = { - "path": attr.string(), - "url": attr.string(), - "urls": attr.string_list(), - "strip_prefix": attr.string(default="go"), - "sha256": attr.string(), - }, -) -"""See /go/toolchains.rst#go-sdk for full documentation.""" - -def _remote_sdk(ctx, urls, strip_prefix, sha256): - ctx.download_and_extract( - url = urls, - stripPrefix = strip_prefix, - sha256 = sha256, - ) - -def _local_sdk(ctx, path): - for entry in ["src", "pkg", "bin"]: - ctx.symlink(path+"/"+entry, entry) - -def _sdk_build_file(ctx): - ctx.file("ROOT") - ctx.template("BUILD.bazel", - Label("@io_bazel_rules_go//go/private:BUILD.sdk.bazel"), - executable = False, - ) - -def _detect_host_sdk(ctx): - root = "@invalid@" - if "GOROOT" in ctx.os.environ: - return ctx.os.environ["GOROOT"] - res = ctx.execute(["go"+executable_extension(ctx), "env", "GOROOT"]) - if res.return_code: - fail("Could not detect host go version") - root = res.stdout.strip() - if not root: - fail("host go version failed to report it's GOROOT") - return root - diff --git a/starlark/src/syntax/testcases/toolchain_utils.bzl b/starlark/src/syntax/testcases/toolchain_utils.bzl deleted file mode 100644 index 83d07074..00000000 --- a/starlark/src/syntax/testcases/toolchain_utils.bzl +++ /dev/null @@ -1,54 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Finds the c++ toolchain if it is enabled. - -Otherwise, falls back to a toolchain constructed from the CppConfiguration. -""" - -def _get_cpp_toolchain_attr(ctx, attr): - if hasattr(ctx.attr._cc_toolchain[cc_common.CcToolchainInfo], attr): - return getattr(ctx.attr._cc_toolchain[cc_common.CcToolchainInfo], attr) - else: - return getattr(ctx.fragments.cpp, attr) - -def _make_legacy_toolchain(ctx): - return struct( - objcopy_executable = _get_cpp_toolchain_attr(ctx, "objcopy_executable"), - compiler_executable = _get_cpp_toolchain_attr(ctx, "compiler_executable"), - preprocessor_executable = _get_cpp_toolchain_attr(ctx, "preprocessor_executable"), - nm_executable = _get_cpp_toolchain_attr(ctx, "nm_executable"), - objdump_executable = _get_cpp_toolchain_attr(ctx, "objdump_executable"), - ar_executable = _get_cpp_toolchain_attr(ctx, "ar_executable"), - strip_executable = _get_cpp_toolchain_attr(ctx, "strip_executable"), - ld_executable = _get_cpp_toolchain_attr(ctx, "ld_executable"), - ) - -def find_cpp_toolchain(ctx): - """If the c++ toolchain is in use, returns it. - - Otherwise, returns a c++ toolchain derived from legacy toolchain selection. - - Args: - ctx: The rule context for which to find a toolchain. - - Returns: - A CcToolchainProvider. - """ - - if Label("@bazel_tools//tools/cpp:toolchain_type") in ctx.fragments.platform.enabled_toolchain_types: - return ctx.toolchains["@bazel_tools//tools/cpp:toolchain_type"] - else: - return _make_legacy_toolchain(ctx) diff --git a/starlark/src/syntax/testcases/toolchains.bzl b/starlark/src/syntax/testcases/toolchains.bzl deleted file mode 100644 index 6e6f9d6b..00000000 --- a/starlark/src/syntax/testcases/toolchains.bzl +++ /dev/null @@ -1,206 +0,0 @@ -load("//go/private:go_toolchain.bzl", - "go_toolchain", -) -load("//go/private:toolchain.bzl", - "go_download_sdk", - "go_host_sdk", -) -load("//go/platform:list.bzl", - "GOARCH", - "GOOS", - "GOOS_GOARCH", -) - -DEFAULT_VERSION = "1.9.2" - -SDK_REPOSITORIES = { - "1.9.2": { - "darwin_amd64": ("go1.9.2.darwin-amd64.tar.gz", "73fd5840d55f5566d8db6c0ffdd187577e8ebe650c783f68bd27cbf95bde6743"), - "linux_386": ("go1.9.2.linux-386.tar.gz", "574b2c4b1a248e58ef7d1f825beda15429610a2316d9cbd3096d8d3fa8c0bc1a"), - "linux_amd64": ("go1.9.2.linux-amd64.tar.gz", "de874549d9a8d8d8062be05808509c09a88a248e77ec14eb77453530829ac02b"), - "linux_armv6l": ("go1.9.2.linux-armv6l.tar.gz", "8a6758c8d390e28ef2bcea511f62dcb43056f38c1addc06a8bc996741987e7bb"), - "windows_386": ("go1.9.2.windows-386.zip", "35d3be5d7b97c6d11ffb76c1b19e20a824e427805ee918e82c08a2e5793eda20"), - "windows_amd64": ("go1.9.2.windows-amd64.zip", "682ec3626a9c45b657c2456e35cadad119057408d37f334c6c24d88389c2164c"), - "freebsd_386": ("go1.9.2.freebsd-386.tar.gz", "809dcb0a8457c8d0abf954f20311a1ee353486d0ae3f921e9478189721d37677"), - "freebsd_amd64": ("go1.9.2.freebsd-amd64.tar.gz", "8be985c3e251c8e007fa6ecd0189bc53e65cc519f4464ddf19fa11f7ed251134"), - "linux_arm64": ("go1.9.2.linux-arm64.tar.gz", "0016ac65ad8340c84f51bc11dbb24ee8265b0a4597dbfdf8d91776fc187456fa"), - "linux_ppc64le": ("go1.9.2.linux-ppc64le.tar.gz", "adb440b2b6ae9e448c253a20836d8e8aa4236f731d87717d9c7b241998dc7f9d"), - "linux_s390x": ("go1.9.2.linux-s390x.tar.gz", "a7137b4fbdec126823a12a4b696eeee2f04ec616e9fb8a54654c51d5884c1345"), - }, - "1.9.1": { - "darwin_amd64": ("go1.9.1.darwin-amd64.tar.gz", "59bc6deee2969dddc4490b684b15f63058177f5c7e27134c060288b7d76faab0"), - "linux_386": ("go1.9.1.linux-386.tar.gz", "2cea1ce9325cb40839601b566bc02b11c92b2942c21110b1b254c7e72e5581e7"), - "linux_amd64": ("go1.9.1.linux-amd64.tar.gz", "07d81c6b6b4c2dcf1b5ef7c27aaebd3691cdb40548500941f92b221147c5d9c7"), - "linux_armv6l": ("go1.9.1.linux-armv6l.tar.gz", "65a0495a50c7c240a6487b1170939586332f6c8f3526abdbb9140935b3cff14c"), - "windows_386": ("go1.9.1.windows-386.zip", "ea9c79c9e6214c9a78a107ef5a7bff775a281bffe8c2d50afa66d2d33998078a"), - "windows_amd64": ("go1.9.1.windows-amd64.zip", "8dc72a3881388e4e560c2e45f6be59860b623ad418e7da94e80fee012221cc81"), - "freebsd_386": ("go1.9.1.freebsd-386.tar.gz", "0da7ad96606a8ceea85652eb20816077769d51de9219d85b9b224a3390070c50"), - "freebsd_amd64": ("go1.9.1.freebsd-amd64.tar.gz", "c4eeacbb94821c5f252897a4d49c78293eaa97b29652d789dce9e79bc6aa6163"), - "linux_arm64": ("go1.9.1.linux-arm64.tar.gz", "d31ecae36efea5197af271ccce86ccc2baf10d2e04f20d0fb75556ecf0614dad"), - "linux_ppc64le": ("go1.9.1.linux-ppc64le.tar.gz", "de57b6439ce9d4dd8b528599317a35fa1e09d6aa93b0a80e3945018658d963b8"), - "linux_s390x": ("go1.9.1.linux-s390x.tar.gz", "9adf03574549db82a72e0d721ef2178ec5e51d1ce4f309b271a2bca4dcf206f6"), - }, - "1.9": { - "darwin_amd64": ("go1.9.darwin-amd64.tar.gz", "c2df361ec6c26fcf20d5569496182cb20728caa4d351bc430b2f0f1212cca3e0"), - "linux_386": ("go1.9.linux-386.tar.gz", "7cccff99dacf59162cd67f5b11070d667691397fd421b0a9ad287da019debc4f"), - "linux_amd64": ("go1.9.linux-amd64.tar.gz", "d70eadefce8e160638a9a6db97f7192d8463069ab33138893ad3bf31b0650a79"), - "linux_armv6l": ("go1.9.linux-armv6l.tar.gz", "f52ca5933f7a8de2daf7a3172b0406353622c6a39e67dd08bbbeb84c6496f487"), - "windows_386": ("go1.9.windows-386.zip", "ecfe6f5be56acedc56cd9ff735f239a12a7c94f40b0ea9753bbfd17396f5e4b9"), - "windows_amd64": ("go1.9.windows-amd64.zip", "874b144b994643cff1d3f5875369d65c01c216bb23b8edddf608facc43966c8b"), - "freebsd_386": ("go1.9.freebsd-386.tar.gz", "9e415e340eaea526170b0fd59aa55939ff4f76c126193002971e8c6799e2ed3a"), - "freebsd_amd64": ("go1.9.freebsd-amd64.tar.gz", "ba54efb2223fb4145604dcaf8605d519467f418ab02c081d3cd0632b6b43b6e7"), - "linux_ppc64le": ("go1.9.linux-ppc64le.tar.gz", "10b66dae326b32a56d4c295747df564616ec46ed0079553e88e39d4f1b2ae985"), - "linux_arm64": ("go1.9.linux-arm64.tar.gz", "0958dcf454f7f26d7acc1a4ddc34220d499df845bc2051c14ff8efdf1e3c29a6"), - "linux_s390x": ("go1.9.linux-s390x.tar.gz", "e06231e4918528e2eba1d3cff9bc4310b777971e5d8985f9772c6018694a3af8"), - }, - "1.8.5": { - "darwin_amd64": ("go1.8.5.darwin-amd64.tar.gz", "af5bd0c8e669a61f4b38fcce03bbf02f1ce672724a95c2ad61e89c6785f5c51e"), - "linux_386": ("go1.8.5.linux-386.tar.gz", "cf959b60b89acb588843ff985ecb47a7f6c37da6e4987739ab4aafad7211464f"), - "linux_amd64": ("go1.8.5.linux-amd64.tar.gz", "4f8aeea2033a2d731f2f75c4d0a4995b357b22af56ed69b3015f4291fca4d42d"), - "linux_armv6l": ("go1.8.5.linux-armv6l.tar.gz", "f5c58e7fd6cdfcc40b94c6655cf159b25836dffe13431f683b51705b8a67d608"), - "windows_386": ("go1.8.5.windows-386.zip", "c14d800bb79bf38a945f83cf37005609b719466c0051d20a5fc59d6efdd6fc66"), - "windows_amd64": ("go1.8.5.windows-amd64.zip", "137827cabff27cc36cbe13018f629a6418c2a6af85adde1b1bfb8d000c9fc1ae"), - "freebsd_386": ("go1.8.5.freebsd-386.tar.gz", "b7e246c9ec1b68e481abe6190caf79cc7179b9308c30076081a9dc90b3a12f99"), - "freebsd_amd64": ("go1.8.5.freebsd-amd64.tar.gz", "8a025284c1911aba8d133e9fcadd6a6dcf5dc78b0d8139be88747cea09773407"), - "linux_ppc64le": ("go1.8.5.linux-ppc64le.tar.gz", "1ee0874ce8c8625e14b4457a4861777be78f30067d914bcb264f7e0331d087de"), - "linux_s390x": ("go1.8.5.linux-s390x.tar.gz", "e978a56842297dc8924555540314ff09128e9a62da9881c3a26771ddd5d7ebc2"), - }, - "1.8.4": { - "darwin_amd64": ("go1.8.4.darwin-amd64.tar.gz", "cf803053aec24425d7be986af6dff0051bb48527bcdfa5b9ffeb4d40701ab54e"), - "linux_386": ("go1.8.4.linux-386.tar.gz", "00354388d5f7d21b69c62361e73250d2633124e8599386f704f6dd676a2f82ac"), - "linux_amd64": ("go1.8.4.linux-amd64.tar.gz", "0ef737a0aff9742af0f63ac13c97ce36f0bbc8b67385169e41e395f34170944f"), - "linux_armv6l": ("go1.8.4.linux-armv6l.tar.gz", "76329898bb9f2be0f86b07f05a6336818cb12f3a416ab3061aa0d5f2ea5c6ff0"), - "windows_386": ("go1.8.4.windows-386.zip", "c0f949174332e5b9d4f025c84338bbec1c94b436f249c20aade04a024537f0be"), - "windows_amd64": ("go1.8.4.windows-amd64.zip", "2ddfea037fd5e2eeb0cb854c095f6e44aaec27e8bbf76dca9a11a88e3a49bbf7"), - "freebsd_386": ("go1.8.4.freebsd-386.tar.gz", "4764920bc94cc9723e7a9a65ae7764922e0ab6148e1cf206bbf37062997fdf4c"), - "freebsd_amd64": ("go1.8.4.freebsd-amd64.tar.gz", "21dd9899b91f4aaeeb85c7bb7db6cd4b44be089b2a7397ea8f9f2e3397a0b5c6"), - "linux_ppc64le": ("go1.8.4.linux-ppc64le.tar.gz", "0f043568d65fd8121af6b35a39f4f20d292a03372b6531e80b743ee0689eb717"), - "linux_s390x": ("go1.8.4.linux-s390x.tar.gz", "aa998b7ac8882c549f7017d2e9722a3102cb9e6b92010baf5153a6dcf98205b1"), - }, - "1.8.3": { - "darwin_amd64": ("go1.8.3.darwin-amd64.tar.gz", "f20b92bc7d4ab22aa18270087c478a74463bd64a893a94264434a38a4b167c05"), - "linux_386": ("go1.8.3.linux-386.tar.gz", "ff4895eb68fb1daaec41c540602e8bb4c1e8bb2f0e7017367171913fc9995ed2"), - "linux_amd64": ("go1.8.3.linux-amd64.tar.gz", "1862f4c3d3907e59b04a757cfda0ea7aa9ef39274af99a784f5be843c80c6772"), - "linux_armv6l": ("go1.8.3.linux-armv6l.tar.gz", "3c30a3e24736ca776fc6314e5092fb8584bd3a4a2c2fa7307ae779ba2735e668"), - "windows_386": ("go1.8.3.windows-386.zip", "9e2bfcb8110a3c56f23b91f859963269bc29fd114190fecfd0a539395272a1c7"), - "windows_amd64": ("go1.8.3.windows-amd64.zip", "de026caef4c5b4a74f359737dcb2d14c67ca45c45093755d3b0d2e0ee3aafd96"), - "freebsd_386": ("go1.8.3.freebsd-386.tar.gz", "d301cc7c2b8b0ccb384ac564531beee8220727fd27ca190b92031a2e3e230224"), - "freebsd_amd64": ("go1.8.3.freebsd-amd64.tar.gz", "1bf5f076d48609012fe01b95e2a58e71e56719a04d576fe3484a216ad4b9c495"), - "linux_ppc64le": ("go1.8.3.linux-ppc64le.tar.gz", "e5fb00adfc7291e657f1f3d31c09e74890b5328e6f991a3f395ca72a8c4dc0b3"), - "linux_s390x": ("go1.8.3.linux-s390x.tar.gz", "e2ec3e7c293701b57ca1f32b37977ac9968f57b3df034f2cc2d531e80671e6c8"), - }, - "1.8.2": { - "linux_amd64": ("go1.8.2.linux-amd64.tar.gz", "5477d6c9a4f96fa120847fafa88319d7b56b5d5068e41c3587eebe248b939be7"), - "darwin_amd64": ("go1.8.2.darwin-amd64.tar.gz", "3f783c33686e6d74f6c811725eb3775c6cf80b9761fa6d4cebc06d6d291be137"), - }, - "1.8.1": { - "linux_amd64": ("go1.8.1.linux-amd64.tar.gz", "a579ab19d5237e263254f1eac5352efcf1d70b9dacadb6d6bb12b0911ede8994"), - "darwin_amd64": ("go1.8.1.darwin-amd64.tar.gz", "25b026fe2f4de7c80b227f69588b06b93787f5b5f134fbf2d652926c08c04bcd"), - }, - "1.8": { - "linux_amd64": ("go1.8.linux-amd64.tar.gz", "3ab94104ee3923e228a2cb2116e5e462ad3ebaeea06ff04463479d7f12d27ca"), - "darwin_amd64": ("go1.8.darwin-amd64.tar.gz", "fdc9f98b76a28655a8770a1fc8197acd8ef746dd4d8a60589ce19604ba2a120"), - }, -} - -def _generate_toolchains(): - # Use all the above information to generate all the possible toolchains we might support - toolchains = [] - for host_goos, host_goarch in GOOS_GOARCH: - host = "{}_{}".format(host_goos, host_goarch) - for target_goos, target_goarch in GOOS_GOARCH: - target = "{}_{}".format(target_goos, target_goarch) - toolchain_name = "go_{}".format(host) - if host != target: - toolchain_name += "_cross_" + target - link_flags = [] - cgo_link_flags = [] - if "darwin" in host: - # workaround for a bug in ld(1) on Mac OS X. - # http://lists.apple.com/archives/Darwin-dev/2006/Sep/msg00084.html - # TODO(yugui) Remove this workaround once rules_go stops supporting XCode 7.2 - # or earlier. - link_flags.append("-s") - cgo_link_flags.extend(["-shared", "-Wl,-all_load"]) - if "linux" in host: - cgo_link_flags.append("-Wl,-whole-archive") - # Add the primary toolchain - toolchains.append(dict( - name = toolchain_name, - host = host, - target = target, - link_flags = link_flags, - cgo_link_flags = cgo_link_flags, - )) - return toolchains - -_toolchains = _generate_toolchains() -_label_prefix = "@io_bazel_rules_go//go/toolchain:" - -def go_register_toolchains(go_version=DEFAULT_VERSION): - """See /go/toolchains.rst#go-register-toolchains for full documentation.""" - if "go_sdk" not in native.existing_rules(): - if go_version in SDK_REPOSITORIES: - go_download_sdk( - name = "go_sdk", - sdks = SDK_REPOSITORIES[go_version], - ) - elif go_version == "host": - go_host_sdk( - name = "go_sdk" - ) - else: - fail("Unknown go version {}".format(go_version)) - - # Use the final dictionaries to register all the toolchains - for toolchain in _toolchains: - name = _label_prefix + toolchain["name"] - native.register_toolchains(name) - if toolchain["host"] == toolchain["target"]: - name = name + "-bootstrap" - native.register_toolchains(name) - -def declare_constraints(): - for goos, constraint in GOOS.items(): - if constraint: - native.alias( - name = goos, - actual = constraint, - ) - else: - native.constraint_value( - name = goos, - constraint_setting = "@bazel_tools//platforms:os", - ) - for goarch, constraint in GOARCH.items(): - if constraint: - native.alias( - name = goarch, - actual = constraint, - ) - else: - native.constraint_value( - name = goarch, - constraint_setting = "@bazel_tools//platforms:cpu", - ) - for goos, goarch in GOOS_GOARCH: - native.platform( - name = goos + "_" + goarch, - constraint_values = [ - ":" + goos, - ":" + goarch, - ], - ) - -def declare_toolchains(): - # Use the final dictionaries to create all the toolchains - for toolchain in _toolchains: - go_toolchain( - # Required fields - name = toolchain["name"], - host = toolchain["host"], - target = toolchain["target"], - # Optional fields - link_flags = toolchain["link_flags"], - cgo_link_flags = toolchain["cgo_link_flags"], - ) diff --git a/starlark/src/syntax/testcases/transitive_maven_jar.bzl b/starlark/src/syntax/testcases/transitive_maven_jar.bzl deleted file mode 100644 index d215083d..00000000 --- a/starlark/src/syntax/testcases/transitive_maven_jar.bzl +++ /dev/null @@ -1,43 +0,0 @@ -MAX_TIMEOUT = 0x7FFFFFFF - -def _validate_coordinate_length(coordinate): - parts = coordinate.split(":") - return len(parts) >= 2 and len(parts) <= 5 - -def _validate_coordinates(rctx): - coordinates = rctx.attr.artifacts - for coord in coordinates: - if _validate_coordinate_length(coord) == False: - fail("Invalid coordinate %s. Generally formatted as \"group:artifact:version\"" % coord) - return True - -def _create_arguments(rctx): - arguments = ['--artifact ' + artifact for artifact in rctx.attr.artifacts] - return ' '.join(arguments) - -def _execute(rctx, command_string, quiet): - return rctx.execute(["bash", "-c", command_string], timeout = rctx.attr._timeout, quiet = quiet) - -def _transitive_maven_jar_impl(rctx): - _validate_coordinates(rctx) - arguments = _create_arguments(rctx) - quiet = rctx.attr.quiet - - jar_path = rctx.path(rctx.attr._generate_workspace_tool) - - # execute the command - result = _execute(rctx, "java -jar %s %s" % (jar_path, arguments), quiet) - rctx.file('%s/BUILD' % rctx.path(''), '', False) - -transitive_maven_jar = repository_rule( - implementation = _transitive_maven_jar_impl, - attrs = { - "artifacts" : attr.string_list(default = [], mandatory = True), - "quiet" : attr.bool(default = False, mandatory = False), - "_timeout" : attr.int(default = MAX_TIMEOUT), - "_generate_workspace_tool" : attr.label(executable = True, allow_files = True, cfg = "host", default = Label("//transitive_maven_jar:generate_workspace_deploy.jar")) - #TODO(petros): add support for private repositories. - }, - local = False, -) - diff --git a/starlark/src/syntax/testcases/unix_cc_configure.bzl b/starlark/src/syntax/testcases/unix_cc_configure.bzl deleted file mode 100644 index 9814831d..00000000 --- a/starlark/src/syntax/testcases/unix_cc_configure.bzl +++ /dev/null @@ -1,401 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Configuring the C++ toolchain on Unix platforms.""" - - -load( - "@bazel_tools//tools/cpp:lib_cc_configure.bzl", - "escape_string", - "get_env_var", - "which", - "tpl", -) - - -def _get_value(it): - """Convert `it` in serialized protobuf format.""" - if type(it) == "int": - return str(it) - elif type(it) == "bool": - return "true" if it else "false" - else: - return "\"%s\"" % it - - -def _build_crosstool(d, prefix=" "): - """Convert `d` to a string version of a CROSSTOOL file content.""" - lines = [] - for k in d: - if type(d[k]) == "list": - for it in d[k]: - lines.append("%s%s: %s" % (prefix, k, _get_value(it))) - else: - lines.append("%s%s: %s" % (prefix, k, _get_value(d[k]))) - return "\n".join(lines) - - -def _build_tool_path(d): - """Build the list of %-escaped tool_path for the CROSSTOOL file.""" - lines = [] - for k in d: - lines.append(" tool_path {name: \"%s\" path: \"%s\" }" % (k, escape_string(d[k]))) - return "\n".join(lines) - - -def _get_tool_paths(repository_ctx, darwin, cc): - """Compute the path to the various tools. Doesn't %-escape the result!""" - return {k: which(repository_ctx, k, "/usr/bin/" + k) - for k in [ - "ld", - "cpp", - "dwp", - "gcov", - "nm", - "objcopy", - "objdump", - "strip", - ]} + { - "gcc": cc, - "ar": "/usr/bin/libtool" - if darwin else which(repository_ctx, "ar", "/usr/bin/ar") - } - - -def _escaped_cplus_include_paths(repository_ctx): - """Use ${CPLUS_INCLUDE_PATH} to compute the %-escaped list of flags for cxxflag.""" - if "CPLUS_INCLUDE_PATH" in repository_ctx.os.environ: - result = [] - for p in repository_ctx.os.environ["CPLUS_INCLUDE_PATH"].split(":"): - p = escape_string(str(repository_ctx.path(p))) # Normalize the path - result.append("-I" + p) - return result - else: - return [] - - -_INC_DIR_MARKER_BEGIN = "#include <...>" - -# OSX add " (framework directory)" at the end of line, strip it. -_OSX_FRAMEWORK_SUFFIX = " (framework directory)" -_OSX_FRAMEWORK_SUFFIX_LEN = len(_OSX_FRAMEWORK_SUFFIX) - -def _cxx_inc_convert(path): - """Convert path returned by cc -E xc++ in a complete path. Doesn't %-escape the path!""" - path = path.strip() - if path.endswith(_OSX_FRAMEWORK_SUFFIX): - path = path[:-_OSX_FRAMEWORK_SUFFIX_LEN].strip() - return path - - -def get_escaped_cxx_inc_directories(repository_ctx, cc): - """Compute the list of default %-escaped C++ include directories.""" - result = repository_ctx.execute([cc, "-E", "-xc++", "-", "-v"]) - index1 = result.stderr.find(_INC_DIR_MARKER_BEGIN) - if index1 == -1: - return [] - index1 = result.stderr.find("\n", index1) - if index1 == -1: - return [] - index2 = result.stderr.rfind("\n ") - if index2 == -1 or index2 < index1: - return [] - index2 = result.stderr.find("\n", index2 + 1) - if index2 == -1: - inc_dirs = result.stderr[index1 + 1:] - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - - return [escape_string(repository_ctx.path(_cxx_inc_convert(p))) - for p in inc_dirs.split("\n")] - - -def _add_option_if_supported(repository_ctx, cc, option): - """Checks that `option` is supported by the C compiler. Doesn't %-escape the option.""" - result = repository_ctx.execute([ - cc, - option, - "-o", - "/dev/null", - "-c", - str(repository_ctx.path("tools/cpp/empty.cc")) - ]) - return [option] if result.stderr.find(option) == -1 else [] - - -def _is_gold_supported(repository_ctx, cc): - """Checks that `gold` is supported by the C compiler.""" - result = repository_ctx.execute([ - cc, - "-fuse-ld=gold", - "-o", - "/dev/null", - # Some macos clang versions don't fail when setting -fuse-ld=gold, adding - # these lines to force it to. This also means that we will not detect - # gold when only a very old (year 2010 and older) is present. - "-Wl,--start-lib", - "-Wl,--end-lib", - str(repository_ctx.path("tools/cpp/empty.cc")) - ]) - return result.return_code == 0 - - -def _crosstool_content(repository_ctx, cc, cpu_value, darwin): - """Return the content for the CROSSTOOL file, in a dictionary.""" - supports_gold_linker = _is_gold_supported(repository_ctx, cc) - return { - "abi_version": escape_string(get_env_var(repository_ctx, "ABI_VERSION", "local", False)), - "abi_libc_version": escape_string(get_env_var(repository_ctx, "ABI_LIBC_VERSION", "local", False)), - "builtin_sysroot": "", - "compiler": escape_string(get_env_var(repository_ctx, "BAZEL_COMPILER", "compiler", False)), - "host_system_name": escape_string(get_env_var(repository_ctx, "BAZEL_HOST_SYSTEM", "local", False)), - "needsPic": True, - "supports_gold_linker": supports_gold_linker, - "supports_incremental_linker": False, - "supports_fission": False, - "supports_interface_shared_objects": False, - "supports_normalizing_ar": False, - "supports_start_end_lib": supports_gold_linker, - "target_libc": "macosx" if darwin else escape_string(get_env_var(repository_ctx, "BAZEL_TARGET_LIBC", "local", False)), - "target_cpu": escape_string(get_env_var(repository_ctx, "BAZEL_TARGET_CPU", cpu_value, False)), - "target_system_name": escape_string(get_env_var(repository_ctx, "BAZEL_TARGET_SYSTEM", "local", False)), - "cxx_flag": [ - "-std=c++0x", - ] + _escaped_cplus_include_paths(repository_ctx), - "linker_flag": [ - "-lstdc++", - "-lm", # Some systems expect -lm in addition to -lstdc++ - # Anticipated future default. - ] + ( - ["-fuse-ld=gold"] if supports_gold_linker else [] - ) + _add_option_if_supported( - repository_ctx, cc, "-Wl,-no-as-needed" - ) + _add_option_if_supported( - repository_ctx, cc, "-Wl,-z,relro,-z,now" - ) + ([ - "-undefined", - "dynamic_lookup", - "-headerpad_max_install_names", - ] if darwin else [ - "-B" + str(repository_ctx.path(cc).dirname), - # Always have -B/usr/bin, see https://github.com/bazelbuild/bazel/issues/760. - "-B/usr/bin", - # Gold linker only? Can we enable this by default? - # "-Wl,--warn-execstack", - # "-Wl,--detect-odr-violations" - ] + _add_option_if_supported( - # Have gcc return the exit code from ld. - repository_ctx, cc, "-pass-exit-codes") - ), - "cxx_builtin_include_directory": get_escaped_cxx_inc_directories(repository_ctx, cc), - "objcopy_embed_flag": ["-I", "binary"], - "unfiltered_cxx_flag": - # If the compiler sometimes rewrites paths in the .d files without symlinks - # (ie when they're shorter), it confuses Bazel's logic for verifying all - # #included header files are listed as inputs to the action. - _add_option_if_supported(repository_ctx, cc, "-fno-canonical-system-headers") + [ - # Make C++ compilation deterministic. Use linkstamping instead of these - # compiler symbols. - "-Wno-builtin-macro-redefined", - "-D__DATE__=\\\"redacted\\\"", - "-D__TIMESTAMP__=\\\"redacted\\\"", - "-D__TIME__=\\\"redacted\\\"" - ], - "compiler_flag": [ - # Security hardening requires optimization. - # We need to undef it as some distributions now have it enabled by default. - "-U_FORTIFY_SOURCE", - "-fstack-protector", - # All warnings are enabled. Maybe enable -Werror as well? - "-Wall", - # Enable a few more warnings that aren't part of -Wall. - ] + (["-Wthread-safety", "-Wself-assign"] if darwin else [ - "-B" + escape_string(str(repository_ctx.path(cc).dirname)), - # Always have -B/usr/bin, see https://github.com/bazelbuild/bazel/issues/760. - "-B/usr/bin", - ]) + ( - # Disable problematic warnings. - _add_option_if_supported(repository_ctx, cc, "-Wunused-but-set-parameter") + - # has false positives - _add_option_if_supported(repository_ctx, cc, "-Wno-free-nonheap-object") + - # Enable coloring even if there's no attached terminal. Bazel removes the - # escape sequences if --nocolor is specified. - _add_option_if_supported(repository_ctx, cc, "-fcolor-diagnostics")) + [ - # Keep stack frames for debugging, even in opt mode. - "-fno-omit-frame-pointer", - ], - } - - -def _opt_content(darwin): - """Return the content of the opt specific section of the CROSSTOOL file.""" - return { - "compiler_flag": [ - # No debug symbols. - # Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or - # even generally? However, that can't happen here, as it requires special - # handling in Bazel. - "-g0", - - # Conservative choice for -O - # -O3 can increase binary size and even slow down the resulting binaries. - # Profile first and / or use FDO if you need better performance than this. - "-O2", - - # Security hardening on by default. - # Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases. - "-D_FORTIFY_SOURCE=1", - - # Disable assertions - "-DNDEBUG", - - # Removal of unused code and data at link time (can this increase binary size in some cases?). - "-ffunction-sections", - "-fdata-sections" - ], - "linker_flag": [] if darwin else ["-Wl,--gc-sections"] - } - - -def _dbg_content(): - """Return the content of the dbg specific section of the CROSSTOOL file.""" - # Enable debug symbols - return {"compiler_flag": "-g"} - - -def get_env(repository_ctx): - """Convert the environment in a list of export if in Homebrew. Doesn't %-escape the result!""" - env = repository_ctx.os.environ - if "HOMEBREW_RUBY_PATH" in env: - return "\n".join([ - "export %s='%s'" % (k, env[k].replace("'", "'\\''")) - for k in env - if k != "_" and k.find(".") == -1 - ]) - else: - return "" - - -def _coverage_feature(darwin): - if darwin: - compile_flags = """flag_group { - flag: '-fprofile-instr-generate' - flag: '-fcoverage-mapping' - }""" - link_flags = """flag_group { - flag: '-fprofile-instr-generate' - }""" - else: - compile_flags = """flag_group { - flag: '-fprofile-arcs' - flag: '-ftest-coverage' - }""" - link_flags = """flag_group { - flag: '-lgcov' - }""" - return """ - feature { - name: 'coverage' - provides: 'profile' - flag_set { - action: 'preprocess-assemble' - action: 'c-compile' - action: 'c++-compile' - action: 'c++-header-parsing' - action: 'c++-header-preprocessing' - action: 'c++-module-compile' - """ + compile_flags + """ - - - - } - flag_set { - action: 'c++-link-interface-dynamic-library' - action: 'c++-link-dynamic-library' - action: 'c++-link-executable' - """ + link_flags + """ - } - } - """ - - -def find_cc(repository_ctx): - """Find the C++ compiler. Doesn't %-escape the result.""" - cc_name = "gcc" - cc_environ = repository_ctx.os.environ.get("CC") - cc_paren = "" - if cc_environ != None: - cc_environ = cc_environ.strip() - if cc_environ: - cc_name = cc_environ - cc_paren = " (%s)" % cc_environ - if cc_name.startswith("/"): - # Absolute path, maybe we should make this suported by our which function. - return cc_name - cc = repository_ctx.which(cc_name) - if cc == None: - fail( - ("Cannot find gcc or CC%s, either correct your path or set the CC" - + " environment variable") % cc_paren) - return cc - - -def configure_unix_toolchain(repository_ctx, cpu_value): - """Configure C++ toolchain on Unix platforms.""" - repository_ctx.file("tools/cpp/empty.cc", "int main() {}") - darwin = cpu_value == "darwin" - cc = find_cc(repository_ctx) - tool_paths = _get_tool_paths(repository_ctx, darwin, - "cc_wrapper.sh" if darwin else str(cc)) - crosstool_content = _crosstool_content(repository_ctx, cc, cpu_value, darwin) - opt_content = _opt_content(darwin) - dbg_content = _dbg_content() - tpl(repository_ctx, "BUILD", { - "%{name}": cpu_value, - "%{supports_param_files}": "0" if darwin else "1", - "%{cc_compiler_deps}": ":cc_wrapper" if darwin else ":empty", - "%{compiler}": get_env_var(repository_ctx, "BAZEL_COMPILER", "compiler", False), - }) - tpl(repository_ctx, - "osx_cc_wrapper.sh" if darwin else "linux_cc_wrapper.sh", - {"%{cc}": escape_string(str(cc)), - "%{env}": escape_string(get_env(repository_ctx))}, - "cc_wrapper.sh") - tpl(repository_ctx, "CROSSTOOL", { - "%{cpu}": escape_string(cpu_value), - "%{default_toolchain_name}": escape_string( - get_env_var(repository_ctx, - "CC_TOOLCHAIN_NAME", - "local", - False)), - "%{toolchain_name}": escape_string( - get_env_var(repository_ctx, "CC_TOOLCHAIN_NAME", "local", False)), - "%{content}": _build_crosstool(crosstool_content) + "\n" + - _build_tool_path(tool_paths), - "%{opt_content}": _build_crosstool(opt_content, " "), - "%{dbg_content}": _build_crosstool(dbg_content, " "), - "%{cxx_builtin_include_directory}": "", - "%{coverage}": _coverage_feature(darwin), - "%{msvc_env_tmp}": "", - "%{msvc_env_path}": "", - "%{msvc_env_include}": "", - "%{msvc_env_lib}": "", - "%{msvc_cl_path}": "", - "%{msvc_ml_path}": "", - "%{msvc_link_path}": "", - "%{msvc_lib_path}": "", - "%{dbg_mode_debug}": "", - "%{fastbuild_mode_debug}": "", - "%{compilation_mode_content}": "", - }) diff --git a/starlark/src/syntax/testcases/utilities.bzl b/starlark/src/syntax/testcases/utilities.bzl deleted file mode 100644 index 5574c631..00000000 --- a/starlark/src/syntax/testcases/utilities.bzl +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2017 Google Inc. All Rights Reserved. -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This rule exposes the source jar of a java_*_library rule as a label.""" - -def _java_library_srcs_impl(ctx): - if len(ctx.attr.deps) != 1: - fail("Only one deps value supported", "deps") - dep = ctx.attr.deps[0] - return [DefaultInfo(files=depset(dep.java.source_jars))] - - -_java_library_srcs = rule( - implementation=_java_library_srcs_impl, - attrs={ - "deps": - attr.label_list( - mandatory=True, - non_empty=True, - providers=["java"],) - }) - - -def java_library_srcs(name, deps, visibility=None, **kwargs): - """Provides the source jars generated by a java_*_library rule.""" - _java_library_srcs(name=name, deps=deps, visibility=visibility, **kwargs) diff --git a/starlark/src/syntax/testcases/vars.bzl b/starlark/src/syntax/testcases/vars.bzl deleted file mode 100644 index 3892447a..00000000 --- a/starlark/src/syntax/testcases/vars.bzl +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Global constants for jenkins jobs substitutions - -MAIL_SUBSTITUTIONS = { - "BAZEL_BUILD_RECIPIENT": "bazel-ci@googlegroups.com", - "BAZEL_RELEASE_RECIPIENT": "bazel-discuss+release@googlegroups.com", - "SENDER_EMAIL": "noreply@bazel.io", -} diff --git a/starlark/src/syntax/testcases/version.bzl b/starlark/src/syntax/testcases/version.bzl deleted file mode 100644 index 667e07a1..00000000 --- a/starlark/src/syntax/testcases/version.bzl +++ /dev/null @@ -1,3 +0,0 @@ -"""Version of the blaze plugin.""" - -VERSION = "2017.05.17.1" diff --git a/starlark/src/syntax/testcases/vet.bzl b/starlark/src/syntax/testcases/vet.bzl deleted file mode 100644 index 7170c68a..00000000 --- a/starlark/src/syntax/testcases/vet.bzl +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:providers.bzl", "GoPath") - -load("@io_bazel_rules_go//go/private:mode.bzl", - "get_mode", -) -load("@io_bazel_rules_go//go/private:common.bzl", - "declare_file", -) - -def _go_vet_generate_impl(ctx): - print(""" -EXPERIMENTAL: the go_vet_test rule is still very experimental -Please do not rely on it for production use, but feel free to use it and file issues -""") - go_toolchain = ctx.toolchains["@io_bazel_rules_go//go:toolchain"] - mode = get_mode(ctx, ctx.attr._go_toolchain_flags) - stdlib = go_toolchain.stdlib.get(ctx, go_toolchain, mode) - script_file = declare_file(ctx, ext=".bash") - gopath = [] - files = ctx.files.data + stdlib.files - gopath = [] - packages = [] - for data in ctx.attr.data: - entry = data[GoPath] - gopath += [entry.gopath] - packages += [package.dir for package in entry.packages] - ctx.actions.write(output=script_file, is_executable=True, content=""" -export GOPATH="{gopath}" -{go} tool vet {packages} -""".format( - go=stdlib.go.short_path, - gopath=":".join(['$(pwd)/{})'.format(entry) for entry in gopath]), - packages=" ".join(packages), - )) - return struct( - files = depset([script_file]), - runfiles = ctx.runfiles(files, collect_data = True), - ) - -_go_vet_generate = rule( - _go_vet_generate_impl, - attrs = { - "data": attr.label_list(providers=[GoPath], cfg = "data"), - "_go_toolchain_flags": attr.label(default=Label("@io_bazel_rules_go//go/private:go_toolchain_flags")), - }, - toolchains = ["@io_bazel_rules_go//go:toolchain"], -) - -def go_vet_test(name, data, **kwargs): - script_name = "generate_"+name - _go_vet_generate( - name=script_name, - data=data, - tags = ["manual"], - ) - native.sh_test( - name=name, - srcs=[script_name], - data=data, - **kwargs - ) \ No newline at end of file diff --git a/starlark/src/syntax/testcases/win_rules.bzl b/starlark/src/syntax/testcases/win_rules.bzl deleted file mode 100644 index 093d8448..00000000 --- a/starlark/src/syntax/testcases/win_rules.bzl +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This is a quick and dirty rule to make Bazel compile itself. It -# only supports Java. - -def cc_library(srcs=[], hdrs=[], **kwargs): - """Replace srcs and hdrs with a dummy.cc on non-Windows platforms.""" - native.cc_library( - srcs = select({ - "//conditions:default": ["dummy.cc"], - "//src/conditions:windows": srcs, - }), - hdrs = select({ - "//conditions:default": [], - "//src/conditions:windows": hdrs, - }), - **kwargs) - -def cc_binary(srcs=[], **kwargs): - """Replace srcs with a dummy.cc on non-Windows platforms.""" - native.cc_binary( - srcs = select({ - "//conditions:default": ["dummy.cc"], - "//src/conditions:windows": srcs, - }), - **kwargs) - -def cc_test(srcs=[], **kwargs): - """Replace srcs with a dummy.cc on non-Windows platforms.""" - native.cc_test( - srcs = select({ - "//conditions:default": ["dummy.cc"], - "//src/conditions:windows": srcs, - }), - **kwargs) diff --git a/starlark/src/syntax/testcases/windows_cc_configure.bzl b/starlark/src/syntax/testcases/windows_cc_configure.bzl deleted file mode 100644 index 66659262..00000000 --- a/starlark/src/syntax/testcases/windows_cc_configure.bzl +++ /dev/null @@ -1,394 +0,0 @@ -# pylint: disable=g-bad-file-header -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Configuring the C++ toolchain on Windows.""" - -load( - "@bazel_tools//tools/cpp:lib_cc_configure.bzl", - "escape_string", - "auto_configure_fail", - "auto_configure_warning", - "get_env_var", - "which", - "which_cmd", - "execute", - "tpl", - "is_cc_configure_debug", -) - - -# TODO(pcloudy): Remove this after MSVC CROSSTOOL becomes default on Windows -def _get_escaped_windows_msys_crosstool_content(repository_ctx): - """Return the content of msys crosstool which is still the default CROSSTOOL on Windows.""" - bazel_sh = get_env_var(repository_ctx, "BAZEL_SH").replace("\\", "/").lower() - tokens = bazel_sh.rsplit("/", 1) - msys_root = None - if tokens[0].endswith("/usr/bin"): - msys_root = tokens[0][:len(tokens[0]) - len("usr/bin")] - elif tokens[0].endswith("/bin"): - msys_root = tokens[0][:len(tokens[0]) - len("bin")] - if not msys_root: - auto_configure_fail( - "Could not determine MSYS/Cygwin root from BAZEL_SH (%s)" % bazel_sh) - escaped_msys_root = escape_string(msys_root) - return ( - ' abi_version: "local"\n' + - ' abi_libc_version: "local"\n' + - ' builtin_sysroot: ""\n' + - ' compiler: "windows_msys64"\n' + - ' host_system_name: "local"\n' + - " needsPic: false\n" + - ' target_libc: "local"\n' + - ' target_cpu: "x64_windows_msys"\n' + - ' target_system_name: "local"\n' + - ' tool_path { name: "ar" path: "%susr/bin/ar" }\n' % escaped_msys_root + - ' tool_path { name: "compat-ld" path: "%susr/bin/ld" }\n' % escaped_msys_root + - ' tool_path { name: "cpp" path: "%susr/bin/cpp" }\n' % escaped_msys_root + - ' tool_path { name: "dwp" path: "%susr/bin/dwp" }\n' % escaped_msys_root + - ' tool_path { name: "gcc" path: "%susr/bin/gcc" }\n' % escaped_msys_root + - ' cxx_flag: "-std=gnu++0x"\n' + - ' linker_flag: "-lstdc++"\n' + - ' cxx_builtin_include_directory: "%s"\n' % escaped_msys_root + - ' cxx_builtin_include_directory: "/usr/"\n' + - ' tool_path { name: "gcov" path: "%susr/bin/gcov" }\n' % escaped_msys_root + - ' tool_path { name: "ld" path: "%susr/bin/ld" }\n' % escaped_msys_root + - ' tool_path { name: "nm" path: "%susr/bin/nm" }\n' % escaped_msys_root + - ' tool_path { name: "objcopy" path: "%susr/bin/objcopy" }\n' % escaped_msys_root + - ' objcopy_embed_flag: "-I"\n' + - ' objcopy_embed_flag: "binary"\n' + - ' tool_path { name: "objdump" path: "%susr/bin/objdump" }\n' % escaped_msys_root + - ' tool_path { name: "strip" path: "%susr/bin/strip" }'% escaped_msys_root ) - - -def _get_system_root(repository_ctx): - r"""Get System root path on Windows, default is C:\\Windows. Doesn't %-escape the result.""" - if "SYSTEMROOT" in repository_ctx.os.environ: - return escape_string(repository_ctx.os.environ["SYSTEMROOT"]) - auto_configure_warning("SYSTEMROOT is not set, using default SYSTEMROOT=C:\\Windows") - return "C:\\Windows" - - -def _find_cuda(repository_ctx): - """Find out if and where cuda is installed. Doesn't %-escape the result.""" - if "CUDA_PATH" in repository_ctx.os.environ: - return repository_ctx.os.environ["CUDA_PATH"] - nvcc = which(repository_ctx, "nvcc.exe") - if nvcc: - return nvcc[:-len("/bin/nvcc.exe")] - return None - - -def _find_python(repository_ctx): - """Find where is python on Windows. Doesn't %-escape the result.""" - if "BAZEL_PYTHON" in repository_ctx.os.environ: - python_binary = repository_ctx.os.environ["BAZEL_PYTHON"] - if not python_binary.endswith(".exe"): - python_binary = python_binary + ".exe" - return python_binary - auto_configure_warning("'BAZEL_PYTHON' is not set, start looking for python in PATH.") - python_binary = which_cmd(repository_ctx, "python.exe") - auto_configure_warning("Python found at %s" % python_binary) - return python_binary - - -def _add_system_root(repository_ctx, env): - r"""Running VCVARSALL.BAT and VCVARSQUERYREGISTRY.BAT need %SYSTEMROOT%\\system32 in PATH.""" - if "PATH" not in env: - env["PATH"] = "" - env["PATH"] = env["PATH"] + ";" + _get_system_root(repository_ctx) + "\\system32" - return env - - -def find_vc_path(repository_ctx): - """Find Visual C++ build tools install path. Doesn't %-escape the result.""" - # 1. Check if BAZEL_VC or BAZEL_VS is already set by user. - if "BAZEL_VC" in repository_ctx.os.environ: - return repository_ctx.os.environ["BAZEL_VC"] - - if "BAZEL_VS" in repository_ctx.os.environ: - return repository_ctx.os.environ["BAZEL_VS"] + "\\VC\\" - auto_configure_warning("'BAZEL_VC' is not set, " + - "start looking for the latest Visual C++ installed.") - - # 2. Check if VS%VS_VERSION%COMNTOOLS is set, if true then try to find and use - # vcvarsqueryregistry.bat to detect VC++. - auto_configure_warning("Looking for VS%VERSION%COMNTOOLS environment variables," + - "eg. VS140COMNTOOLS") - for vscommontools_env in ["VS140COMNTOOLS", "VS120COMNTOOLS", - "VS110COMNTOOLS", "VS100COMNTOOLS", "VS90COMNTOOLS"]: - if vscommontools_env not in repository_ctx.os.environ: - continue - vcvarsqueryregistry = repository_ctx.os.environ[vscommontools_env] + "\\vcvarsqueryregistry.bat" - if not repository_ctx.path(vcvarsqueryregistry).exists: - continue - repository_ctx.file("get_vc_dir.bat", - "@echo off\n" + - "call \"" + vcvarsqueryregistry + "\"\n" + - "echo %VCINSTALLDIR%", True) - env = _add_system_root(repository_ctx, repository_ctx.os.environ) - vc_dir = execute(repository_ctx, ["./get_vc_dir.bat"], environment=env) - - auto_configure_warning("Visual C++ build tools found at %s" % vc_dir) - return vc_dir - - # 3. User might clean up all environment variables, if so looking for Visual C++ through registry. - # Works for all VS versions, including Visual Studio 2017. - auto_configure_warning("Looking for Visual C++ through registry") - reg_binary = _get_system_root(repository_ctx) + "\\system32\\reg.exe" - vc_dir = None - for key, suffix in (("VC7", ""), ("VS7", "\\VC")): - for version in ["15.0", "14.0", "12.0", "11.0", "10.0", "9.0", "8.0"]: - if vc_dir: - break - result = repository_ctx.execute([reg_binary, "query", "HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\" + key, "/v", version]) - if is_cc_configure_debug(repository_ctx): - auto_configure_warning("registry query result for VC %s:\n\nSTDOUT(start)\n%s\nSTDOUT(end)\nSTDERR(start):\n%s\nSTDERR(end)\n" % - (version, result.stdout, result.stderr)) - if not result.stderr: - for line in result.stdout.split("\n"): - line = line.strip() - if line.startswith(version) and line.find("REG_SZ") != -1: - vc_dir = line[line.find("REG_SZ") + len("REG_SZ"):].strip() + suffix - - if not vc_dir: - return "visual-studio-not-found" - auto_configure_warning("Visual C++ build tools found at %s" % vc_dir) - return vc_dir - - -def _is_vs_2017(vc_path): - """Check if the installed VS version is Visual Studio 2017.""" - # In VS 2017, the location of VC is like: - # C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\ - # In VS 2015 or older version, it is like: - # C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\ - return vc_path.find("2017") != -1 - - -def _find_vcvarsall_bat_script(repository_ctx, vc_path): - """Find vcvarsall.bat script. Doesn't %-escape the result.""" - if _is_vs_2017(vc_path): - vcvarsall = vc_path + "\\Auxiliary\\Build\\VCVARSALL.BAT" - else: - vcvarsall = vc_path + "\\VCVARSALL.BAT" - - if not repository_ctx.path(vcvarsall).exists: - auto_configure_fail(vcvarsall + " doesn't exist, please check your VC++ installation") - return vcvarsall - - -def _find_env_vars(repository_ctx, vc_path): - """Get environment variables set by VCVARSALL.BAT. Doesn't %-escape the result!""" - vcvarsall = _find_vcvarsall_bat_script(repository_ctx, vc_path) - repository_ctx.file("get_env.bat", - "@echo off\n" + - "call \"" + vcvarsall + "\" amd64 > NUL \n" + - "echo PATH=%PATH%,INCLUDE=%INCLUDE%,LIB=%LIB% \n", True) - env = _add_system_root(repository_ctx, - {"PATH": "", "INCLUDE": "", "LIB": ""}) - envs = execute(repository_ctx, ["./get_env.bat"], environment=env).split(",") - env_map = {} - for env in envs: - key, value = env.split("=", 1) - env_map[key] = escape_string(value.replace("\\", "\\\\")) - return env_map - - -def find_msvc_tool(repository_ctx, vc_path, tool): - """Find the exact path of a specific build tool in MSVC. Doesn't %-escape the result.""" - tool_path = "" - if _is_vs_2017(vc_path): - # For VS 2017, the tools are under a directory like: - # C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Tools\MSVC\14.10.24930\bin\HostX64\x64 - dirs = repository_ctx.path(vc_path + "\\Tools\\MSVC").readdir() - if len(dirs) < 1: - auto_configure_fail("VC++ build tools directory not found under " + vc_path + "\\Tools\\MSVC") - # Normally there should be only one child directory under %VC_PATH%\TOOLS\MSVC, - # but iterate every directory to be more robust. - for path in dirs: - tool_path = str(path) + "\\bin\\HostX64\\x64\\" + tool - if repository_ctx.path(tool_path).exists: - break - else: - # For VS 2015 and older version, the tools are under: - # C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64 - tool_path = vc_path + "\\bin\\amd64\\" + tool - - if not repository_ctx.path(tool_path).exists: - auto_configure_fail(tool_path + " not found, please check your VC++ installation.") - return tool_path - - -def _is_support_whole_archive(repository_ctx, vc_path): - """Run MSVC linker alone to see if it supports /WHOLEARCHIVE.""" - env = repository_ctx.os.environ - if "NO_WHOLE_ARCHIVE_OPTION" in env and env["NO_WHOLE_ARCHIVE_OPTION"] == "1": - return False - linker = find_msvc_tool(repository_ctx, vc_path, "link.exe") - result = execute(repository_ctx, [linker], expect_failure = True) - return result.find("/WHOLEARCHIVE") != -1 - - -def _is_support_debug_fastlink(repository_ctx, vc_path): - """Run MSVC linker alone to see if it supports /DEBUG:FASTLINK.""" - linker = find_msvc_tool(repository_ctx, vc_path, "link.exe") - result = execute(repository_ctx, [linker], expect_failure = True) - return result.find("/DEBUG[:{FASTLINK|FULL|NONE}]") != -1 - - -def _is_use_msvc_wrapper(repository_ctx): - """Returns True if USE_MSVC_WRAPPER is set to 1.""" - env = repository_ctx.os.environ - return "USE_MSVC_WRAPPER" in env and env["USE_MSVC_WRAPPER"] == "1" - - -def _get_compilation_mode_content(): - """Return the content for adding flags for different compilation modes when using MSVC wrapper.""" - return "\n".join([ - " compilation_mode_flags {", - " mode: DBG", - " compiler_flag: '-Xcompilation-mode=dbg'", - " linker_flag: '-Xcompilation-mode=dbg'", - " }", - " compilation_mode_flags {", - " mode: FASTBUILD", - " compiler_flag: '-Xcompilation-mode=fastbuild'", - " linker_flag: '-Xcompilation-mode=fastbuild'", - " }", - " compilation_mode_flags {", - " mode: OPT", - " compiler_flag: '-Xcompilation-mode=opt'", - " linker_flag: '-Xcompilation-mode=opt'", - " }"]) - - -def _escaped_cuda_compute_capabilities(repository_ctx): - """Returns a %-escaped list of strings representing cuda compute capabilities.""" - - if "CUDA_COMPUTE_CAPABILITIES" not in repository_ctx.os.environ: - return ["3.5", "5.2"] - capabilities_str = escape_string(repository_ctx.os.environ["CUDA_COMPUTE_CAPABILITIES"]) - capabilities = capabilities_str.split(",") - for capability in capabilities: - # Workaround for Skylark's lack of support for regex. This check should - # be equivalent to checking: - # if re.match("[0-9]+.[0-9]+", capability) == None: - parts = capability.split(".") - if len(parts) != 2 or not parts[0].isdigit() or not parts[1].isdigit(): - auto_configure_fail("Invalid compute capability: %s" % capability) - return capabilities - - -def configure_windows_toolchain(repository_ctx): - """Configure C++ toolchain on Windows.""" - repository_ctx.symlink(Label("@bazel_tools//tools/cpp:BUILD.static"), "BUILD") - - vc_path = find_vc_path(repository_ctx) - if vc_path == "visual-studio-not-found": - vc_path_error_script = "vc_path_not_found.bat" - repository_ctx.symlink(Label("@bazel_tools//tools/cpp:vc_path_not_found.bat"), vc_path_error_script) - tpl(repository_ctx, "CROSSTOOL", { - "%{cpu}": "x64_windows", - "%{default_toolchain_name}": "msvc_x64", - "%{toolchain_name}": "msys_x64", - "%{msvc_env_tmp}": "", - "%{msvc_env_path}": "", - "%{msvc_env_include}": "", - "%{msvc_env_lib}": "", - "%{msvc_cl_path}": vc_path_error_script, - "%{msvc_link_path}": vc_path_error_script, - "%{msvc_lib_path}": vc_path_error_script, - "%{compilation_mode_content}": "", - "%{content}": _get_escaped_windows_msys_crosstool_content(repository_ctx), - "%{opt_content}": "", - "%{dbg_content}": "", - "%{cxx_builtin_include_directory}": "", - "%{coverage}": "", - }) - return - - env = _find_env_vars(repository_ctx, vc_path) - escaped_paths = escape_string(env["PATH"]) - escaped_include_paths = escape_string(env["INCLUDE"]) - escaped_lib_paths = escape_string(env["LIB"]) - escaped_tmp_dir = escape_string( - get_env_var(repository_ctx, "TMP", "C:\\Windows\\Temp").replace("\\", "\\\\")) - msvc_cl_path = find_msvc_tool(repository_ctx, vc_path, "cl.exe").replace("\\", "/") - msvc_ml_path = find_msvc_tool(repository_ctx, vc_path, "ml64.exe").replace("\\", "/") - msvc_link_path = find_msvc_tool(repository_ctx, vc_path, "link.exe").replace("\\", "/") - msvc_lib_path = find_msvc_tool(repository_ctx, vc_path, "lib.exe").replace("\\", "/") - escaped_cxx_include_directories = [] - compilation_mode_content = "" - - if _is_use_msvc_wrapper(repository_ctx): - if _is_support_whole_archive(repository_ctx, vc_path): - support_whole_archive = "True" - else: - support_whole_archive = "False" - nvcc_tmp_dir_name = escaped_tmp_dir + "\\\\nvcc_inter_files_tmp_dir" - # Make sure nvcc.exe is in PATH - cuda_path = _find_cuda(repository_ctx) - if cuda_path: - escaped_paths = escape_string(cuda_path.replace("\\", "\\\\") + "/bin;") + escaped_paths - escaped_compute_capabilities = _escaped_cuda_compute_capabilities(repository_ctx) - tpl(repository_ctx, "wrapper/bin/pydir/msvc_tools.py", { - "%{lib_tool}": escape_string(msvc_lib_path), - "%{support_whole_archive}": support_whole_archive, - "%{cuda_compute_capabilities}": ", ".join( - ["\"%s\"" % c for c in escaped_compute_capabilities]), - "%{nvcc_tmp_dir_name}": nvcc_tmp_dir_name, - }) - # nvcc will generate some source files under %{nvcc_tmp_dir_name} - # The generated files are guranteed to have unique name, so they can share the same tmp directory - escaped_cxx_include_directories += [ "cxx_builtin_include_directory: \"%s\"" % nvcc_tmp_dir_name ] - msvc_wrapper = repository_ctx.path(Label("@bazel_tools//tools/cpp:CROSSTOOL")).dirname.get_child("wrapper").get_child("bin") - for f in ["msvc_cl.bat", "msvc_link.bat", "msvc_nop.bat"]: - repository_ctx.symlink(msvc_wrapper.get_child(f), "wrapper/bin/" + f) - msvc_wrapper = msvc_wrapper.get_child("pydir") - for f in ["msvc_cl.py", "msvc_link.py"]: - repository_ctx.symlink(msvc_wrapper.get_child(f), "wrapper/bin/pydir/" + f) - python_binary = _find_python(repository_ctx) - tpl(repository_ctx, "wrapper/bin/call_python.bat", {"%{python_binary}": escape_string(python_binary)}) - msvc_cl_path = "wrapper/bin/msvc_cl.bat" - msvc_link_path = "wrapper/bin/msvc_link.bat" - msvc_lib_path = "wrapper/bin/msvc_link.bat" - compilation_mode_content = _get_compilation_mode_content() - - for path in escaped_include_paths.split(";"): - if path: - escaped_cxx_include_directories.append("cxx_builtin_include_directory: \"%s\"" % path) - - support_debug_fastlink = _is_support_debug_fastlink(repository_ctx, vc_path) - - tpl(repository_ctx, "CROSSTOOL", { - "%{cpu}": "x64_windows", - "%{default_toolchain_name}": "msvc_x64", - "%{toolchain_name}": "msys_x64", - "%{msvc_env_tmp}": escaped_tmp_dir, - "%{msvc_env_path}": escaped_paths, - "%{msvc_env_include}": escaped_include_paths, - "%{msvc_env_lib}": escaped_lib_paths, - "%{msvc_cl_path}": msvc_cl_path, - "%{msvc_ml_path}": msvc_ml_path, - "%{msvc_link_path}": msvc_link_path, - "%{msvc_lib_path}": msvc_lib_path, - "%{dbg_mode_debug}": "/DEBUG:FULL" if support_debug_fastlink else "/DEBUG", - "%{fastbuild_mode_debug}": "/DEBUG:FASTLINK" if support_debug_fastlink else "/DEBUG", - "%{compilation_mode_content}": compilation_mode_content, - "%{content}": _get_escaped_windows_msys_crosstool_content(repository_ctx), - "%{opt_content}": "", - "%{dbg_content}": "", - "%{cxx_builtin_include_directory}": "\n".join(escaped_cxx_include_directories), - "%{coverage}": "", - }) diff --git a/starlark/src/syntax/testcases/with-defaults.bzl b/starlark/src/syntax/testcases/with-defaults.bzl deleted file mode 100644 index 0670b405..00000000 --- a/starlark/src/syntax/testcases/with-defaults.bzl +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This defines a repository rule for configuring the rules' defaults. - -For now, this is limited to "push", where the default can be -specified as follows: - -```python - === WORKSPACE === - load( - "@io_bazel_rules_docker//docker/contrib:with-defaults.bzl", - docker_defaults="defaults", - ) - docker_defaults( - name = "my_defaults", - registry = "us.gcr.io", - tag = "{BUILD_USER}" - ) - - === BUILD === - load("@my_defaults//:defaults.bzl", "docker_push") -``` - -Any of "registry", "repository" or "tag" may be given a new default. -""" - -def _impl(repository_ctx): - """Core implementation of docker_default.""" - - repository_ctx.file("BUILD", "") - - repository_ctx.file("defaults.bzl", """ -load( - "@io_bazel_rules_docker//container:push.bzl", - _container_push="container_push" -) - -def container_push(**kwargs): - if "registry" not in kwargs: - kwargs["registry"] = "{registry}" or None - if "repository" not in kwargs: - kwargs["repository"] = "{repository}" or None - if "tag" not in kwargs: - kwargs["tag"] = "{tag}" or None - - _container_push(**kwargs) - -def docker_push(*args, **kwargs): - if "format" in kwargs: - fail("Cannot override 'format' attribute on docker_push", - attr="format") - kwargs["format"] = "Docker" - container_push(*args, **kwargs) - -def oci_push(*args, **kwargs): - if "format" in kwargs: - fail("Cannot override 'format' attribute on oci_push", - attr="format") - kwargs["format"] = "OCI" - container_push(*args, **kwargs) -""".format( - registry=repository_ctx.attr.registry or "", - repository=repository_ctx.attr.repository or "", - tag=repository_ctx.attr.tag or "", -)) - -defaults = repository_rule( - attrs = { - "registry": attr.string(), - "repository": attr.string(), - "tag": attr.string(), - }, - implementation = _impl, -) diff --git a/starlark/src/syntax/testcases/with-tag.bzl b/starlark/src/syntax/testcases/with-tag.bzl deleted file mode 100644 index 395ddbba..00000000 --- a/starlark/src/syntax/testcases/with-tag.bzl +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load( - "//container:container.bzl", - "container_bundle", - _container_image = "container_image", -) - -def container_image(name=None, tag=None, **kwargs): - _container_image(name=name + '-internal', **kwargs) - container_bundle(name=name, images={ - tag: ':' + name + '-internal' - }) - -docker_build = container_image - -docker_image = container_image - -oci_image = container_image diff --git a/starlark/src/syntax/testcases/workspace.bzl b/starlark/src/syntax/testcases/workspace.bzl deleted file mode 100644 index 7711ecdc..00000000 --- a/starlark/src/syntax/testcases/workspace.bzl +++ /dev/null @@ -1,132 +0,0 @@ -def maven_dependencies(callback): - callback({"artifact": "antlr:antlr:2.7.6", "lang": "java", "sha1": "cf4f67dae5df4f9932ae7810f4548ef3e14dd35e", "repository": "https://repo.maven.apache.org/maven2/", "name": "antlr_antlr", "actual": "@antlr_antlr//jar", "bind": "jar/antlr/antlr"}) - callback({"artifact": "aopalliance:aopalliance:1.0", "lang": "java", "sha1": "0235ba8b489512805ac13a8f9ea77a1ca5ebe3e8", "repository": "https://repo.maven.apache.org/maven2/", "name": "aopalliance_aopalliance", "actual": "@aopalliance_aopalliance//jar", "bind": "jar/aopalliance/aopalliance"}) - callback({"artifact": "args4j:args4j:2.0.31", "lang": "java", "sha1": "6b870d81551ce93c5c776c3046299db8ad6c39d2", "repository": "https://repo.maven.apache.org/maven2/", "name": "args4j_args4j", "actual": "@args4j_args4j//jar", "bind": "jar/args4j/args4j"}) - callback({"artifact": "com.cloudbees:groovy-cps:1.12", "lang": "java", "sha1": "d766273a59e0b954c016e805779106bca22764b9", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_cloudbees_groovy_cps", "actual": "@com_cloudbees_groovy_cps//jar", "bind": "jar/com/cloudbees/groovy_cps"}) - callback({"artifact": "com.github.jnr:jffi:1.2.15", "lang": "java", "sha1": "f480f0234dd8f053da2421e60574cfbd9d85e1f5", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_github_jnr_jffi", "actual": "@com_github_jnr_jffi//jar", "bind": "jar/com/github/jnr/jffi"}) - callback({"artifact": "com.github.jnr:jnr-constants:0.9.8", "lang": "java", "sha1": "478036404879bd582be79e9a7939f3a161601c8b", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_github_jnr_jnr_constants", "actual": "@com_github_jnr_jnr_constants//jar", "bind": "jar/com/github/jnr/jnr_constants"}) - callback({"artifact": "com.github.jnr:jnr-ffi:2.1.4", "lang": "java", "sha1": "0a63bbd4af5cee55d820ef40dc5347d45765b788", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_github_jnr_jnr_ffi", "actual": "@com_github_jnr_jnr_ffi//jar", "bind": "jar/com/github/jnr/jnr_ffi"}) - callback({"artifact": "com.github.jnr:jnr-posix:3.0.41", "lang": "java", "sha1": "36eff018149e53ed814a340ddb7de73ceb66bf96", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_github_jnr_jnr_posix", "actual": "@com_github_jnr_jnr_posix//jar", "bind": "jar/com/github/jnr/jnr_posix"}) - callback({"artifact": "com.github.jnr:jnr-x86asm:1.0.2", "lang": "java", "sha1": "006936bbd6c5b235665d87bd450f5e13b52d4b48", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_github_jnr_jnr_x86asm", "actual": "@com_github_jnr_jnr_x86asm//jar", "bind": "jar/com/github/jnr/jnr_x86asm"}) - callback({"artifact": "com.google.code.findbugs:jsr305:1.3.9", "lang": "java", "sha1": "40719ea6961c0cb6afaeb6a921eaa1f6afd4cfdf", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_google_code_findbugs_jsr305", "actual": "@com_google_code_findbugs_jsr305//jar", "bind": "jar/com/google/code/findbugs/jsr305"}) - callback({"artifact": "com.google.guava:guava:11.0.1", "lang": "java", "sha1": "57b40a943725d43610c898ac0169adf1b2d55742", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_google_guava_guava", "actual": "@com_google_guava_guava//jar", "bind": "jar/com/google/guava/guava"}) - callback({"artifact": "com.google.inject:guice:4.0", "lang": "java", "sha1": "0f990a43d3725781b6db7cd0acf0a8b62dfd1649", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_google_inject_guice", "actual": "@com_google_inject_guice//jar", "bind": "jar/com/google/inject/guice"}) - callback({"artifact": "com.infradna.tool:bridge-method-annotation:1.13", "lang": "java", "sha1": "18cdce50cde6f54ee5390d0907384f72183ff0fe", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_infradna_tool_bridge_method_annotation", "actual": "@com_infradna_tool_bridge_method_annotation//jar", "bind": "jar/com/infradna/tool/bridge_method_annotation"}) - callback({"artifact": "com.jcraft:jzlib:1.1.3-kohsuke-1", "lang": "java", "sha1": "af5d27e1de29df05db95da5d76b546d075bc1bc5", "repository": "http://repo.jenkins-ci.org/public/", "name": "com_jcraft_jzlib", "actual": "@com_jcraft_jzlib//jar", "bind": "jar/com/jcraft/jzlib"}) - callback({"artifact": "com.lesfurets:jenkins-pipeline-unit:1.0", "lang": "java", "sha1": "3aa90c606c541e88c268df3cc9e87306af69b29f", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_lesfurets_jenkins_pipeline_unit", "actual": "@com_lesfurets_jenkins_pipeline_unit//jar", "bind": "jar/com/lesfurets/jenkins_pipeline_unit"}) - callback({"artifact": "com.sun.solaris:embedded_su4j:1.1", "lang": "java", "sha1": "9404130cc4e60670429f1ab8dbf94d669012725d", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_sun_solaris_embedded_su4j", "actual": "@com_sun_solaris_embedded_su4j//jar", "bind": "jar/com/sun/solaris/embedded_su4j"}) - callback({"artifact": "com.sun.xml.txw2:txw2:20110809", "lang": "java", "sha1": "46afa3f3c468680875adb8f2a26086a126c89902", "repository": "https://repo.maven.apache.org/maven2/", "name": "com_sun_xml_txw2_txw2", "actual": "@com_sun_xml_txw2_txw2//jar", "bind": "jar/com/sun/xml/txw2/txw2"}) - callback({"artifact": "commons-beanutils:commons-beanutils:1.8.3", "lang": "java", "sha1": "686ef3410bcf4ab8ce7fd0b899e832aaba5facf7", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_beanutils_commons_beanutils", "actual": "@commons_beanutils_commons_beanutils//jar", "bind": "jar/commons_beanutils/commons_beanutils"}) - callback({"artifact": "commons-codec:commons-codec:1.8", "lang": "java", "sha1": "af3be3f74d25fc5163b54f56a0d394b462dafafd", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_codec_commons_codec", "actual": "@commons_codec_commons_codec//jar", "bind": "jar/commons_codec/commons_codec"}) - callback({"artifact": "commons-collections:commons-collections:3.2.2", "lang": "java", "sha1": "8ad72fe39fa8c91eaaf12aadb21e0c3661fe26d5", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_collections_commons_collections", "actual": "@commons_collections_commons_collections//jar", "bind": "jar/commons_collections/commons_collections"}) - callback({"artifact": "commons-digester:commons-digester:2.1", "lang": "java", "sha1": "73a8001e7a54a255eef0f03521ec1805dc738ca0", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_digester_commons_digester", "actual": "@commons_digester_commons_digester//jar", "bind": "jar/commons_digester/commons_digester"}) - callback({"artifact": "commons-discovery:commons-discovery:0.4", "lang": "java", "sha1": "9e3417d3866d9f71e83b959b229b35dc723c7bea", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_discovery_commons_discovery", "actual": "@commons_discovery_commons_discovery//jar", "bind": "jar/commons_discovery/commons_discovery"}) - callback({"artifact": "commons-fileupload:commons-fileupload:1.3.1-jenkins-1", "lang": "java", "sha1": "5d0270b78ad9d5344ce4a8e35482ad8802526aca", "repository": "http://repo.jenkins-ci.org/public/", "name": "commons_fileupload_commons_fileupload", "actual": "@commons_fileupload_commons_fileupload//jar", "bind": "jar/commons_fileupload/commons_fileupload"}) - callback({"artifact": "commons-httpclient:commons-httpclient:3.1", "lang": "java", "sha1": "964cd74171f427720480efdec40a7c7f6e58426a", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_httpclient_commons_httpclient", "actual": "@commons_httpclient_commons_httpclient//jar", "bind": "jar/commons_httpclient/commons_httpclient"}) -# duplicates in commons-io:commons-io promoted to 2.5. Versions: 2.4 2.5 - callback({"artifact": "commons-io:commons-io:2.5", "lang": "java", "sha1": "2852e6e05fbb95076fc091f6d1780f1f8fe35e0f", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_io_commons_io", "actual": "@commons_io_commons_io//jar", "bind": "jar/commons_io/commons_io"}) - callback({"artifact": "commons-jelly:commons-jelly-tags-fmt:1.0", "lang": "java", "sha1": "2107da38fdd287ab78a4fa65c1300b5ad9999274", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_jelly_commons_jelly_tags_fmt", "actual": "@commons_jelly_commons_jelly_tags_fmt//jar", "bind": "jar/commons_jelly/commons_jelly_tags_fmt"}) - callback({"artifact": "commons-jelly:commons-jelly-tags-xml:1.1", "lang": "java", "sha1": "cc0efc2ae0ff81ef7737afc786a0ce16a8540efc", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_jelly_commons_jelly_tags_xml", "actual": "@commons_jelly_commons_jelly_tags_xml//jar", "bind": "jar/commons_jelly/commons_jelly_tags_xml"}) - callback({"artifact": "commons-lang:commons-lang:2.6", "lang": "java", "sha1": "0ce1edb914c94ebc388f086c6827e8bdeec71ac2", "repository": "https://repo.maven.apache.org/maven2/", "name": "commons_lang_commons_lang", "actual": "@commons_lang_commons_lang//jar", "bind": "jar/commons_lang/commons_lang"}) - callback({"artifact": "javax.annotation:javax.annotation-api:1.2", "lang": "java", "sha1": "479c1e06db31c432330183f5cae684163f186146", "repository": "https://repo.maven.apache.org/maven2/", "name": "javax_annotation_javax_annotation_api", "actual": "@javax_annotation_javax_annotation_api//jar", "bind": "jar/javax/annotation/javax_annotation_api"}) - callback({"artifact": "javax.inject:javax.inject:1", "lang": "java", "sha1": "6975da39a7040257bd51d21a231b76c915872d38", "repository": "https://repo.maven.apache.org/maven2/", "name": "javax_inject_javax_inject", "actual": "@javax_inject_javax_inject//jar", "bind": "jar/javax/inject/javax_inject"}) - callback({"artifact": "javax.mail:mail:1.4.4", "lang": "java", "sha1": "b907ef0a02ff6e809392b1e7149198497fcc8e49", "repository": "https://repo.maven.apache.org/maven2/", "name": "javax_mail_mail", "actual": "@javax_mail_mail//jar", "bind": "jar/javax/mail/mail"}) - callback({"artifact": "javax.servlet:jstl:1.1.0", "lang": "java", "sha1": "bca201e52333629c59e459e874e5ecd8f9899e15", "repository": "https://repo.maven.apache.org/maven2/", "name": "javax_servlet_jstl", "actual": "@javax_servlet_jstl//jar", "bind": "jar/javax/servlet/jstl"}) - callback({"artifact": "javax.xml.stream:stax-api:1.0-2", "lang": "java", "sha1": "d6337b0de8b25e53e81b922352fbea9f9f57ba0b", "repository": "https://repo.maven.apache.org/maven2/", "name": "javax_xml_stream_stax_api", "actual": "@javax_xml_stream_stax_api//jar", "bind": "jar/javax/xml/stream/stax_api"}) - callback({"artifact": "jaxen:jaxen:1.1-beta-11", "lang": "java", "sha1": "81e32b8bafcc778e5deea4e784670299f1c26b96", "repository": "https://repo.maven.apache.org/maven2/", "name": "jaxen_jaxen", "actual": "@jaxen_jaxen//jar", "bind": "jar/jaxen/jaxen"}) - callback({"artifact": "jfree:jcommon:1.0.12", "lang": "java", "sha1": "737f02607d2f45bb1a589a85c63b4cd907e5e634", "repository": "https://repo.maven.apache.org/maven2/", "name": "jfree_jcommon", "actual": "@jfree_jcommon//jar", "bind": "jar/jfree/jcommon"}) - callback({"artifact": "jfree:jfreechart:1.0.9", "lang": "java", "sha1": "6e522aa603bf7ac69da59edcf519b335490e93a6", "repository": "https://repo.maven.apache.org/maven2/", "name": "jfree_jfreechart", "actual": "@jfree_jfreechart//jar", "bind": "jar/jfree/jfreechart"}) - callback({"artifact": "jline:jline:2.12", "lang": "java", "sha1": "ce9062c6a125e0f9ad766032573c041ae8ecc986", "repository": "https://repo.maven.apache.org/maven2/", "name": "jline_jline", "actual": "@jline_jline//jar", "bind": "jar/jline/jline"}) - callback({"artifact": "junit:junit:4.12", "lang": "java", "sha1": "2973d150c0dc1fefe998f834810d68f278ea58ec", "repository": "https://repo.maven.apache.org/maven2/", "name": "junit_junit", "actual": "@junit_junit//jar", "bind": "jar/junit/junit"}) - callback({"artifact": "net.i2p.crypto:eddsa:0.2.0", "lang": "java", "sha1": "0856a92559c4daf744cb27c93cd8b7eb1f8c4780", "repository": "https://repo.maven.apache.org/maven2/", "name": "net_i2p_crypto_eddsa", "actual": "@net_i2p_crypto_eddsa//jar", "bind": "jar/net/i2p/crypto/eddsa"}) - callback({"artifact": "net.java.dev.jna:jna:4.2.1", "lang": "java", "sha1": "fcc5b10cb812c41b00708e7b57baccc3aee5567c", "repository": "https://repo.maven.apache.org/maven2/", "name": "net_java_dev_jna_jna", "actual": "@net_java_dev_jna_jna//jar", "bind": "jar/net/java/dev/jna/jna"}) - callback({"artifact": "net.java.sezpoz:sezpoz:1.12", "lang": "java", "sha1": "01f7e4a04e06fdbc91d66ddf80c443c3f7c6503c", "repository": "https://repo.maven.apache.org/maven2/", "name": "net_java_sezpoz_sezpoz", "actual": "@net_java_sezpoz_sezpoz//jar", "bind": "jar/net/java/sezpoz/sezpoz"}) - callback({"artifact": "net.sf.ezmorph:ezmorph:1.0.6", "lang": "java", "sha1": "01e55d2a0253ea37745d33062852fd2c90027432", "repository": "https://repo.maven.apache.org/maven2/", "name": "net_sf_ezmorph_ezmorph", "actual": "@net_sf_ezmorph_ezmorph//jar", "bind": "jar/net/sf/ezmorph/ezmorph"}) - callback({"artifact": "org.acegisecurity:acegi-security:1.0.7", "lang": "java", "sha1": "72901120d299e0c6ed2f6a23dd37f9186eeb8cc3", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_acegisecurity_acegi_security", "actual": "@org_acegisecurity_acegi_security//jar", "bind": "jar/org/acegisecurity/acegi_security"}) - callback({"artifact": "org.apache.ant:ant-launcher:1.8.4", "lang": "java", "sha1": "22f1e0c32a2bfc8edd45520db176bac98cebbbfe", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_apache_ant_ant_launcher", "actual": "@org_apache_ant_ant_launcher//jar", "bind": "jar/org/apache/ant/ant_launcher"}) - callback({"artifact": "org.apache.ant:ant:1.8.4", "lang": "java", "sha1": "8acff3fb57e74bc062d4675d9dcfaffa0d524972", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_apache_ant_ant", "actual": "@org_apache_ant_ant//jar", "bind": "jar/org/apache/ant/ant"}) - callback({"artifact": "org.apache.commons:commons-compress:1.10", "lang": "java", "sha1": "5eeb27c57eece1faf2d837868aeccc94d84dcc9a", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_apache_commons_commons_compress", "actual": "@org_apache_commons_commons_compress//jar", "bind": "jar/org/apache/commons/commons_compress"}) - callback({"artifact": "org.apache.ivy:ivy:2.4.0", "lang": "java", "sha1": "5abe4c24bbe992a9ac07ca563d5bd3e8d569e9ed", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_apache_ivy_ivy", "actual": "@org_apache_ivy_ivy//jar", "bind": "jar/org/apache/ivy/ivy"}) -# duplicates in org.codehaus.groovy:groovy-all fixed to 2.4.6. Versions: 2.4.6 2.4.11 - callback({"artifact": "org.codehaus.groovy:groovy-all:2.4.6", "lang": "java", "sha1": "478feadca929a946b2f1fb962bb2179264759821", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_codehaus_groovy_groovy_all", "actual": "@org_codehaus_groovy_groovy_all//jar", "bind": "jar/org/codehaus/groovy/groovy_all"}) - callback({"artifact": "org.codehaus.woodstox:wstx-asl:3.2.9", "lang": "java", "sha1": "c82b6e8f225bb799540e558b10ee24d268035597", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_codehaus_woodstox_wstx_asl", "actual": "@org_codehaus_woodstox_wstx_asl//jar", "bind": "jar/org/codehaus/woodstox/wstx_asl"}) - callback({"artifact": "org.connectbot.jbcrypt:jbcrypt:1.0.0", "lang": "java", "sha1": "f37bba2b8b78fcc8111bb932318b621dcc6c5194", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_connectbot_jbcrypt_jbcrypt", "actual": "@org_connectbot_jbcrypt_jbcrypt//jar", "bind": "jar/org/connectbot/jbcrypt/jbcrypt"}) - callback({"artifact": "org.fusesource.jansi:jansi:1.11", "lang": "java", "sha1": "655c643309c2f45a56a747fda70e3fadf57e9f11", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_fusesource_jansi_jansi", "actual": "@org_fusesource_jansi_jansi//jar", "bind": "jar/org/fusesource/jansi/jansi"}) - callback({"artifact": "org.hamcrest:hamcrest-all:1.3", "lang": "java", "sha1": "63a21ebc981131004ad02e0434e799fd7f3a8d5a", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_hamcrest_hamcrest_all", "actual": "@org_hamcrest_hamcrest_all//jar", "bind": "jar/org/hamcrest/hamcrest_all"}) - callback({"artifact": "org.hamcrest:hamcrest-core:1.3", "lang": "java", "sha1": "42a25dc3219429f0e5d060061f71acb49bf010a0", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_hamcrest_hamcrest_core", "actual": "@org_hamcrest_hamcrest_core//jar", "bind": "jar/org/hamcrest/hamcrest_core"}) - callback({"artifact": "org.jboss.marshalling:jboss-marshalling-river:1.4.9.Final", "lang": "java", "sha1": "d41e3e1ed9cf4afd97d19df8ecc7f2120effeeb4", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jboss_marshalling_jboss_marshalling_river", "actual": "@org_jboss_marshalling_jboss_marshalling_river//jar", "bind": "jar/org/jboss/marshalling/jboss_marshalling_river"}) - callback({"artifact": "org.jboss.marshalling:jboss-marshalling:1.4.9.Final", "lang": "java", "sha1": "8fd342ee3dde0448c7600275a936ea1b17deb494", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jboss_marshalling_jboss_marshalling", "actual": "@org_jboss_marshalling_jboss_marshalling//jar", "bind": "jar/org/jboss/marshalling/jboss_marshalling"}) - callback({"artifact": "org.jenkins-ci.dom4j:dom4j:1.6.1-jenkins-4", "lang": "java", "sha1": "9a370b2010b5a1223c7a43dae6c05226918e17b1", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_dom4j_dom4j", "actual": "@org_jenkins_ci_dom4j_dom4j//jar", "bind": "jar/org/jenkins_ci/dom4j/dom4j"}) - callback({"artifact": "org.jenkins-ci.main:cli:2.73.1", "lang": "java", "sha1": "03ae1decd36ee069108e66e70cd6ffcdd4320aec", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_main_cli", "actual": "@org_jenkins_ci_main_cli//jar", "bind": "jar/org/jenkins_ci/main/cli"}) - callback({"artifact": "org.jenkins-ci.main:jenkins-core:2.73.1", "lang": "java", "sha1": "30c9e7029d46fd18a8720f9a491bf41ab8f2bdb2", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_main_jenkins_core", "actual": "@org_jenkins_ci_main_jenkins_core//jar", "bind": "jar/org/jenkins_ci/main/jenkins_core"}) - callback({"artifact": "org.jenkins-ci.main:remoting:3.10", "lang": "java", "sha1": "19905fa1550ab34a33bb92a5e27e2a86733c9d15", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_main_remoting", "actual": "@org_jenkins_ci_main_remoting//jar", "bind": "jar/org/jenkins_ci/main/remoting"}) - callback({"artifact": "org.jenkins-ci.plugins.icon-shim:icon-set:1.0.5", "lang": "java", "sha1": "dedc76ac61797dafc66f31e8507d65b98c9e57df", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_icon_shim_icon_set", "actual": "@org_jenkins_ci_plugins_icon_shim_icon_set//jar", "bind": "jar/org/jenkins_ci/plugins/icon_shim/icon_set"}) - callback({"artifact": "org.jenkins-ci.plugins.workflow:workflow-api:2.11", "lang": "java", "sha1": "3a8a6e221a8b32fd9faabb33939c28f79fd961d7", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_workflow_workflow_api", "actual": "@org_jenkins_ci_plugins_workflow_workflow_api//jar", "bind": "jar/org/jenkins_ci/plugins/workflow/workflow_api"}) - callback({"artifact": "org.jenkins-ci.plugins.workflow:workflow-step-api:2.9", "lang": "java", "sha1": "7d1ad140c092cf4a68a7763db9eac459b5ed86ff", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_workflow_workflow_step_api", "actual": "@org_jenkins_ci_plugins_workflow_workflow_step_api//jar", "bind": "jar/org/jenkins_ci/plugins/workflow/workflow_step_api"}) - callback({"artifact": "org.jenkins-ci.plugins.workflow:workflow-support:2.14", "lang": "java", "sha1": "cd5f68c533ddd46fea3332ce788dffc80707ddb5", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_workflow_workflow_support", "actual": "@org_jenkins_ci_plugins_workflow_workflow_support//jar", "bind": "jar/org/jenkins_ci/plugins/workflow/workflow_support"}) - callback({"artifact": "org.jenkins-ci.plugins:script-security:1.26", "lang": "java", "sha1": "44aacd104c0d5c8fe5d0f93e4a4001cae0e48c2b", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_script_security", "actual": "@org_jenkins_ci_plugins_script_security//jar", "bind": "jar/org/jenkins_ci/plugins/script_security"}) - callback({"artifact": "org.jenkins-ci.plugins:structs:1.5", "lang": "java", "sha1": "72d429f749151f1c983c1fadcb348895cc6da20e", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_plugins_structs", "actual": "@org_jenkins_ci_plugins_structs//jar", "bind": "jar/org/jenkins_ci/plugins/structs"}) -# duplicates in org.jenkins-ci:annotation-indexer promoted to 1.12. Versions: 1.9 1.12 - callback({"artifact": "org.jenkins-ci:annotation-indexer:1.12", "lang": "java", "sha1": "8f6ee0cd64c305dcca29e2f5b46631d50890208f", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_annotation_indexer", "actual": "@org_jenkins_ci_annotation_indexer//jar", "bind": "jar/org/jenkins_ci/annotation_indexer"}) - callback({"artifact": "org.jenkins-ci:bytecode-compatibility-transformer:1.8", "lang": "java", "sha1": "aded88ffe12f1904758397f96f16957e97b88e6e", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_bytecode_compatibility_transformer", "actual": "@org_jenkins_ci_bytecode_compatibility_transformer//jar", "bind": "jar/org/jenkins_ci/bytecode_compatibility_transformer"}) - callback({"artifact": "org.jenkins-ci:commons-jelly:1.1-jenkins-20120928", "lang": "java", "sha1": "2720a0d54b7f32479b08970d7738041362e1f410", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_commons_jelly", "actual": "@org_jenkins_ci_commons_jelly//jar", "bind": "jar/org/jenkins_ci/commons_jelly"}) - callback({"artifact": "org.jenkins-ci:commons-jexl:1.1-jenkins-20111212", "lang": "java", "sha1": "0a990a77bea8c5a400d58a6f5d98122236300f7d", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_commons_jexl", "actual": "@org_jenkins_ci_commons_jexl//jar", "bind": "jar/org/jenkins_ci/commons_jexl"}) - callback({"artifact": "org.jenkins-ci:constant-pool-scanner:1.2", "lang": "java", "sha1": "e5e0b7c7fcb67767dbd195e0ca1f0ee9406dd423", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jenkins_ci_constant_pool_scanner", "actual": "@org_jenkins_ci_constant_pool_scanner//jar", "bind": "jar/org/jenkins_ci/constant_pool_scanner"}) - callback({"artifact": "org.jenkins-ci:crypto-util:1.1", "lang": "java", "sha1": "3a199a4c3748012b9dbbf3080097dc9f302493d8", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_crypto_util", "actual": "@org_jenkins_ci_crypto_util//jar", "bind": "jar/org/jenkins_ci/crypto_util"}) - callback({"artifact": "org.jenkins-ci:jmdns:3.4.0-jenkins-3", "lang": "java", "sha1": "264d0c402b48c365f34d072b864ed57f25e92e63", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_jmdns", "actual": "@org_jenkins_ci_jmdns//jar", "bind": "jar/org/jenkins_ci/jmdns"}) - callback({"artifact": "org.jenkins-ci:memory-monitor:1.9", "lang": "java", "sha1": "1935bfb46474e3043ee2310a9bb790d42dde2ed7", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_memory_monitor", "actual": "@org_jenkins_ci_memory_monitor//jar", "bind": "jar/org/jenkins_ci/memory_monitor"}) -# duplicates in org.jenkins-ci:symbol-annotation promoted to 1.5. Versions: 1.1 1.5 - callback({"artifact": "org.jenkins-ci:symbol-annotation:1.5", "lang": "java", "sha1": "17694feb24cb69793914d0c1c11ff479ee4c1b38", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_symbol_annotation", "actual": "@org_jenkins_ci_symbol_annotation//jar", "bind": "jar/org/jenkins_ci/symbol_annotation"}) - callback({"artifact": "org.jenkins-ci:task-reactor:1.4", "lang": "java", "sha1": "b89e501a3bc64fe9f28cb91efe75ed8745974ef8", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_task_reactor", "actual": "@org_jenkins_ci_task_reactor//jar", "bind": "jar/org/jenkins_ci/task_reactor"}) - callback({"artifact": "org.jenkins-ci:trilead-ssh2:build-217-jenkins-11", "lang": "java", "sha1": "f10f4dd4121cc233cac229c51adb4775960fee0a", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_trilead_ssh2", "actual": "@org_jenkins_ci_trilead_ssh2//jar", "bind": "jar/org/jenkins_ci/trilead_ssh2"}) - callback({"artifact": "org.jenkins-ci:version-number:1.4", "lang": "java", "sha1": "5d0f2ea16514c0ec8de86c102ce61a7837e45eb8", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jenkins_ci_version_number", "actual": "@org_jenkins_ci_version_number//jar", "bind": "jar/org/jenkins_ci/version_number"}) - callback({"artifact": "org.jruby.ext.posix:jna-posix:1.0.3-jenkins-1", "lang": "java", "sha1": "fb1148cc8192614ec1418d414f7b6026cc0ec71b", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jruby_ext_posix_jna_posix", "actual": "@org_jruby_ext_posix_jna_posix//jar", "bind": "jar/org/jruby/ext/posix/jna_posix"}) - callback({"artifact": "org.jvnet.hudson:activation:1.1.1-hudson-1", "lang": "java", "sha1": "7957d80444223277f84676aabd5b0421b65888c4", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jvnet_hudson_activation", "actual": "@org_jvnet_hudson_activation//jar", "bind": "jar/org/jvnet/hudson/activation"}) - callback({"artifact": "org.jvnet.hudson:commons-jelly-tags-define:1.0.1-hudson-20071021", "lang": "java", "sha1": "8b952d0e504ee505d234853119e5648441894234", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jvnet_hudson_commons_jelly_tags_define", "actual": "@org_jvnet_hudson_commons_jelly_tags_define//jar", "bind": "jar/org/jvnet/hudson/commons_jelly_tags_define"}) - callback({"artifact": "org.jvnet.hudson:jtidy:4aug2000r7-dev-hudson-1", "lang": "java", "sha1": "ad8553d0acfa6e741d21d5b2c2beb737972ab7c7", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jvnet_hudson_jtidy", "actual": "@org_jvnet_hudson_jtidy//jar", "bind": "jar/org/jvnet/hudson/jtidy"}) - callback({"artifact": "org.jvnet.hudson:xstream:1.4.7-jenkins-1", "lang": "java", "sha1": "161ed1603117c2d37b864f81a0d62f36cf7e958a", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jvnet_hudson_xstream", "actual": "@org_jvnet_hudson_xstream//jar", "bind": "jar/org/jvnet/hudson/xstream"}) - callback({"artifact": "org.jvnet.localizer:localizer:1.24", "lang": "java", "sha1": "e20e7668dbf36e8d354dab922b89adb6273b703f", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jvnet_localizer_localizer", "actual": "@org_jvnet_localizer_localizer//jar", "bind": "jar/org/jvnet/localizer/localizer"}) - callback({"artifact": "org.jvnet.robust-http-client:robust-http-client:1.2", "lang": "java", "sha1": "dee9fda92ad39a94a77ec6cf88300d4dd6db8a4d", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jvnet_robust_http_client_robust_http_client", "actual": "@org_jvnet_robust_http_client_robust_http_client//jar", "bind": "jar/org/jvnet/robust_http_client/robust_http_client"}) - callback({"artifact": "org.jvnet.winp:winp:1.25", "lang": "java", "sha1": "1c88889f80c0e03a7fb62c26b706d68813f8e657", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_jvnet_winp_winp", "actual": "@org_jvnet_winp_winp//jar", "bind": "jar/org/jvnet/winp/winp"}) - callback({"artifact": "org.jvnet:tiger-types:2.2", "lang": "java", "sha1": "7ddc6bbc8ca59be8879d3a943bf77517ec190f39", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_jvnet_tiger_types", "actual": "@org_jvnet_tiger_types//jar", "bind": "jar/org/jvnet/tiger_types"}) - callback({"artifact": "org.kohsuke.jinterop:j-interop:2.0.6-kohsuke-1", "lang": "java", "sha1": "b2e243227608c1424ab0084564dc71659d273007", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_jinterop_j_interop", "actual": "@org_kohsuke_jinterop_j_interop//jar", "bind": "jar/org/kohsuke/jinterop/j_interop"}) - callback({"artifact": "org.kohsuke.jinterop:j-interopdeps:2.0.6-kohsuke-1", "lang": "java", "sha1": "778400517a3419ce8c361498c194036534851736", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_jinterop_j_interopdeps", "actual": "@org_kohsuke_jinterop_j_interopdeps//jar", "bind": "jar/org/kohsuke/jinterop/j_interopdeps"}) - callback({"artifact": "org.kohsuke.stapler:json-lib:2.4-jenkins-2", "lang": "java", "sha1": "7f4f9016d8c8b316ecbe68afe7c26df06d301366", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_kohsuke_stapler_json_lib", "actual": "@org_kohsuke_stapler_json_lib//jar", "bind": "jar/org/kohsuke/stapler/json_lib"}) - callback({"artifact": "org.kohsuke.stapler:stapler-adjunct-codemirror:1.3", "lang": "java", "sha1": "fd1d45544400d2a4da6dfee9e60edd4ec3368806", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_kohsuke_stapler_stapler_adjunct_codemirror", "actual": "@org_kohsuke_stapler_stapler_adjunct_codemirror//jar", "bind": "jar/org/kohsuke/stapler/stapler_adjunct_codemirror"}) - callback({"artifact": "org.kohsuke.stapler:stapler-adjunct-timeline:1.5", "lang": "java", "sha1": "3fa806cbb94679ceab9c1ecaaf5fea8207390cb7", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler_adjunct_timeline", "actual": "@org_kohsuke_stapler_stapler_adjunct_timeline//jar", "bind": "jar/org/kohsuke/stapler/stapler_adjunct_timeline"}) - callback({"artifact": "org.kohsuke.stapler:stapler-adjunct-zeroclipboard:1.3.5-1", "lang": "java", "sha1": "20184ea79888b55b6629e4479615b52f88b55173", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler_adjunct_zeroclipboard", "actual": "@org_kohsuke_stapler_stapler_adjunct_zeroclipboard//jar", "bind": "jar/org/kohsuke/stapler/stapler_adjunct_zeroclipboard"}) - callback({"artifact": "org.kohsuke.stapler:stapler-groovy:1.250", "lang": "java", "sha1": "a8b910923b8eef79dd99c8aa6418d8ada0de4c86", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler_groovy", "actual": "@org_kohsuke_stapler_stapler_groovy//jar", "bind": "jar/org/kohsuke/stapler/stapler_groovy"}) - callback({"artifact": "org.kohsuke.stapler:stapler-jelly:1.250", "lang": "java", "sha1": "6ac2202bf40e48a63623803697cd1801ee716273", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler_jelly", "actual": "@org_kohsuke_stapler_stapler_jelly//jar", "bind": "jar/org/kohsuke/stapler/stapler_jelly"}) - callback({"artifact": "org.kohsuke.stapler:stapler-jrebel:1.250", "lang": "java", "sha1": "b6f10cb14cf3462f5a51d03a7a00337052355c8c", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler_jrebel", "actual": "@org_kohsuke_stapler_stapler_jrebel//jar", "bind": "jar/org/kohsuke/stapler/stapler_jrebel"}) - callback({"artifact": "org.kohsuke.stapler:stapler:1.250", "lang": "java", "sha1": "d5afb2c46a2919d22e5bc3adccf5f09fbb0fb4e3", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_stapler_stapler", "actual": "@org_kohsuke_stapler_stapler//jar", "bind": "jar/org/kohsuke/stapler/stapler"}) - callback({"artifact": "org.kohsuke:access-modifier-annotation:1.11", "lang": "java", "sha1": "d1ca3a10d8be91d1525f51dbc6a3c7644e0fc6ea", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_access_modifier_annotation", "actual": "@org_kohsuke_access_modifier_annotation//jar", "bind": "jar/org/kohsuke/access_modifier_annotation"}) - callback({"artifact": "org.kohsuke:akuma:1.10", "lang": "java", "sha1": "0e2c6a1f79f17e3fab13332ab8e9b9016eeab0b6", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_akuma", "actual": "@org_kohsuke_akuma//jar", "bind": "jar/org/kohsuke/akuma"}) - callback({"artifact": "org.kohsuke:asm5:5.0.1", "lang": "java", "sha1": "71ab0620a41ed37f626b96d80c2a7c58165550df", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_asm5", "actual": "@org_kohsuke_asm5//jar", "bind": "jar/org/kohsuke/asm5"}) - callback({"artifact": "org.kohsuke:groovy-sandbox:1.10", "lang": "java", "sha1": "f4f33a2122cca74ce8beaaf6a3c5ab9c8644d977", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_groovy_sandbox", "actual": "@org_kohsuke_groovy_sandbox//jar", "bind": "jar/org/kohsuke/groovy_sandbox"}) - callback({"artifact": "org.kohsuke:libpam4j:1.8", "lang": "java", "sha1": "548d4a1177adad8242fe03a6930c335669d669ad", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_libpam4j", "actual": "@org_kohsuke_libpam4j//jar", "bind": "jar/org/kohsuke/libpam4j"}) - callback({"artifact": "org.kohsuke:libzfs:0.8", "lang": "java", "sha1": "5bb311276283921f7e1082c348c0253b17922dcc", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_libzfs", "actual": "@org_kohsuke_libzfs//jar", "bind": "jar/org/kohsuke/libzfs"}) - callback({"artifact": "org.kohsuke:trilead-putty-extension:1.2", "lang": "java", "sha1": "0f2f41517e1f73be8e319da27a69e0dc0c524bf6", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_trilead_putty_extension", "actual": "@org_kohsuke_trilead_putty_extension//jar", "bind": "jar/org/kohsuke/trilead_putty_extension"}) - callback({"artifact": "org.kohsuke:windows-package-checker:1.2", "lang": "java", "sha1": "86b5d2f9023633808d65dbcfdfd50dc5ad3ca31f", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_kohsuke_windows_package_checker", "actual": "@org_kohsuke_windows_package_checker//jar", "bind": "jar/org/kohsuke/windows_package_checker"}) - callback({"artifact": "org.mindrot:jbcrypt:0.4", "lang": "java", "sha1": "af7e61017f73abb18ac4e036954f9f28c6366c07", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_mindrot_jbcrypt", "actual": "@org_mindrot_jbcrypt//jar", "bind": "jar/org/mindrot/jbcrypt"}) - callback({"artifact": "org.ow2.asm:asm-analysis:5.0.3", "lang": "java", "sha1": "c7126aded0e8e13fed5f913559a0dd7b770a10f3", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_ow2_asm_asm_analysis", "actual": "@org_ow2_asm_asm_analysis//jar", "bind": "jar/org/ow2/asm/asm_analysis"}) - callback({"artifact": "org.ow2.asm:asm-commons:5.0.3", "lang": "java", "sha1": "a7111830132c7f87d08fe48cb0ca07630f8cb91c", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_ow2_asm_asm_commons", "actual": "@org_ow2_asm_asm_commons//jar", "bind": "jar/org/ow2/asm/asm_commons"}) - callback({"artifact": "org.ow2.asm:asm-tree:5.0.3", "lang": "java", "sha1": "287749b48ba7162fb67c93a026d690b29f410bed", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_ow2_asm_asm_tree", "actual": "@org_ow2_asm_asm_tree//jar", "bind": "jar/org/ow2/asm/asm_tree"}) - callback({"artifact": "org.ow2.asm:asm-util:5.0.3", "lang": "java", "sha1": "1512e5571325854b05fb1efce1db75fcced54389", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_ow2_asm_asm_util", "actual": "@org_ow2_asm_asm_util//jar", "bind": "jar/org/ow2/asm/asm_util"}) - callback({"artifact": "org.ow2.asm:asm:5.0.3", "lang": "java", "sha1": "dcc2193db20e19e1feca8b1240dbbc4e190824fa", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_ow2_asm_asm", "actual": "@org_ow2_asm_asm//jar", "bind": "jar/org/ow2/asm/asm"}) - callback({"artifact": "org.samba.jcifs:jcifs:1.3.17-kohsuke-1", "lang": "java", "sha1": "6c9114dc4075277d829ea09e15d6ffab52f2d0c0", "repository": "http://repo.jenkins-ci.org/public/", "name": "org_samba_jcifs_jcifs", "actual": "@org_samba_jcifs_jcifs//jar", "bind": "jar/org/samba/jcifs/jcifs"}) - callback({"artifact": "org.slf4j:jcl-over-slf4j:1.7.7", "lang": "java", "sha1": "56003dcd0a31deea6391b9e2ef2f2dc90b205a92", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_slf4j_jcl_over_slf4j", "actual": "@org_slf4j_jcl_over_slf4j//jar", "bind": "jar/org/slf4j/jcl_over_slf4j"}) - callback({"artifact": "org.slf4j:log4j-over-slf4j:1.7.7", "lang": "java", "sha1": "d521cb26a9c4407caafcec302e7804b048b07cea", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_slf4j_log4j_over_slf4j", "actual": "@org_slf4j_log4j_over_slf4j//jar", "bind": "jar/org/slf4j/log4j_over_slf4j"}) - callback({"artifact": "org.slf4j:slf4j-api:1.7.7", "lang": "java", "sha1": "2b8019b6249bb05d81d3a3094e468753e2b21311", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_slf4j_slf4j_api", "actual": "@org_slf4j_slf4j_api//jar", "bind": "jar/org/slf4j/slf4j_api"}) - callback({"artifact": "org.springframework:spring-aop:2.5.6.SEC03", "lang": "java", "sha1": "6468695557500723a18630b712ce112ec58827c1", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_aop", "actual": "@org_springframework_spring_aop//jar", "bind": "jar/org/springframework/spring_aop"}) - callback({"artifact": "org.springframework:spring-beans:2.5.6.SEC03", "lang": "java", "sha1": "79b2c86ff12c21b2420b4c46dca51f0e58762aae", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_beans", "actual": "@org_springframework_spring_beans//jar", "bind": "jar/org/springframework/spring_beans"}) - callback({"artifact": "org.springframework:spring-context-support:2.5.6.SEC03", "lang": "java", "sha1": "edf496f4ce066edc6b212e0e5521cb11ff97d55e", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_context_support", "actual": "@org_springframework_spring_context_support//jar", "bind": "jar/org/springframework/spring_context_support"}) - callback({"artifact": "org.springframework:spring-context:2.5.6.SEC03", "lang": "java", "sha1": "5f1c24b26308afedc48a90a1fe2ed334a6475921", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_context", "actual": "@org_springframework_spring_context//jar", "bind": "jar/org/springframework/spring_context"}) - callback({"artifact": "org.springframework:spring-core:2.5.6.SEC03", "lang": "java", "sha1": "644a23805a7ea29903bde0ccc1cd1a8b5f0432d6", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_core", "actual": "@org_springframework_spring_core//jar", "bind": "jar/org/springframework/spring_core"}) - callback({"artifact": "org.springframework:spring-dao:1.2.9", "lang": "java", "sha1": "6f90baf86fc833cac3c677a8f35d3333ed86baea", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_dao", "actual": "@org_springframework_spring_dao//jar", "bind": "jar/org/springframework/spring_dao"}) - callback({"artifact": "org.springframework:spring-jdbc:1.2.9", "lang": "java", "sha1": "8a81d42995e61e2deac49c2bc75cfacbb28e7218", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_jdbc", "actual": "@org_springframework_spring_jdbc//jar", "bind": "jar/org/springframework/spring_jdbc"}) - callback({"artifact": "org.springframework:spring-web:2.5.6.SEC03", "lang": "java", "sha1": "699f171339f20126f1d09dde2dd17d6db2943fce", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_web", "actual": "@org_springframework_spring_web//jar", "bind": "jar/org/springframework/spring_web"}) - callback({"artifact": "org.springframework:spring-webmvc:2.5.6.SEC03", "lang": "java", "sha1": "275c5ac6ade12819f49e984c8e06b114a4e23458", "repository": "https://repo.maven.apache.org/maven2/", "name": "org_springframework_spring_webmvc", "actual": "@org_springframework_spring_webmvc//jar", "bind": "jar/org/springframework/spring_webmvc"}) - callback({"artifact": "oro:oro:2.0.8", "lang": "java", "sha1": "5592374f834645c4ae250f4c9fbb314c9369d698", "repository": "https://repo.maven.apache.org/maven2/", "name": "oro_oro", "actual": "@oro_oro//jar", "bind": "jar/oro/oro"}) - callback({"artifact": "relaxngDatatype:relaxngDatatype:20020414", "lang": "java", "sha1": "de7952cecd05b65e0e4370cc93fc03035175eef5", "repository": "https://repo.maven.apache.org/maven2/", "name": "relaxngDatatype_relaxngDatatype", "actual": "@relaxngDatatype_relaxngDatatype//jar", "bind": "jar/relaxngDatatype/relaxngDatatype"}) - callback({"artifact": "stax:stax-api:1.0.1", "lang": "java", "sha1": "49c100caf72d658aca8e58bd74a4ba90fa2b0d70", "repository": "https://repo.maven.apache.org/maven2/", "name": "stax_stax_api", "actual": "@stax_stax_api//jar", "bind": "jar/stax/stax_api"}) - callback({"artifact": "xpp3:xpp3:1.1.4c", "lang": "java", "sha1": "9b988ea84b9e4e9f1874e390ce099b8ac12cfff5", "repository": "https://repo.maven.apache.org/maven2/", "name": "xpp3_xpp3", "actual": "@xpp3_xpp3//jar", "bind": "jar/xpp3/xpp3"}) diff --git a/starlark/src/syntax/testcases/wrappers.bzl b/starlark/src/syntax/testcases/wrappers.bzl deleted file mode 100644 index 8469d530..00000000 --- a/starlark/src/syntax/testcases/wrappers.bzl +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2014 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go/private:rules/binary.bzl", "go_binary") -load("@io_bazel_rules_go//go/private:rules/library.bzl", "go_library") -load("@io_bazel_rules_go//go/private:rules/test.bzl", "go_test") -load("@io_bazel_rules_go//go/private:rules/cgo.bzl", "setup_cgo_library") - -def go_library_macro(name, srcs=None, embed=[], cgo=False, cdeps=[], copts=[], clinkopts=[], library=None, **kwargs): - """See go/core.rst#go_library for full documentation.""" - if library: - #TODO: print("DEPRECATED: {}//{}:{} : the library attribute is deprecated. Please migrate to embed.".format(native.repository_name(), native.package_name(), name)) - embed = embed + [library] - - if cgo: - cgo_embed = setup_cgo_library( - name = name, - srcs = srcs, - cdeps = cdeps, - copts = copts, - clinkopts = clinkopts, - ) - embed = embed + [cgo_embed] - srcs = [] - go_library( - name = name, - srcs = srcs, - embed = embed, - **kwargs - ) - -def go_binary_macro(name, srcs=None, embed=[], cgo=False, cdeps=[], copts=[], clinkopts=[], library=None, **kwargs): - """See go/core.rst#go_binary for full documentation.""" - if library: - #TODO: print("DEPRECATED: {}//{}:{} : the library attribute is deprecated. Please migrate to embed.".format(native.repository_name(), native.package_name(), name)) - embed = embed + [library] - - if cgo: - cgo_embed = setup_cgo_library( - name = name, - srcs = srcs, - cdeps = cdeps, - copts = copts, - clinkopts = clinkopts, - ) - embed = embed + [cgo_embed] - srcs = [] - go_binary( - name = name, - srcs = srcs, - embed = embed, - **kwargs - ) - -def go_test_macro(name, srcs=None, deps=None, importpath="", library=None, embed=[], gc_goopts=[], cgo=False, cdeps=[], copts=[], clinkopts=[], **kwargs): - """See go/core.rst#go_test for full documentation.""" - if library: - #TODO: print("DEPRECATED: {}//{}:{} : the library attribute is deprecated. Please migrate to embed.".format(native.repository_name(), native.package_name(), name)) - embed = embed + [library] - - library_name = name + "~library~" - go_library_macro( - name = library_name, - visibility = ["//visibility:private"], - srcs = srcs, - deps = deps, - importpath = importpath, - embed = embed, - gc_goopts = gc_goopts, - testonly = True, - tags = ["manual"], - cgo = False, - cdeps = cdeps, - copts = copts, - clinkopts = clinkopts, - ) - go_test( - name = name, - library = library_name, - importpath = importpath, - gc_goopts = gc_goopts, - **kwargs - ) diff --git a/starlark/src/syntax/testcases/xcode_configure.bzl b/starlark/src/syntax/testcases/xcode_configure.bzl deleted file mode 100644 index 960754bb..00000000 --- a/starlark/src/syntax/testcases/xcode_configure.bzl +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Repository rule to generate host xcode_config and xcode_version targets. - - The xcode_config and xcode_version targets are configured for xcodes/SDKs - installed on the local host. -""" - - -def _search_string(fullstring, prefix, suffix): - """Returns the substring between two given substrings of a larger string. - - Args: - fullstring: The larger string to search. - prefix: The substring that should occur directly before the returned string. - suffix: The substring that should occur direclty after the returned string. - Returns: - A string occurring in fullstring exactly prefixed by prefix, and exactly - terminated by suffix. For example, ("hello goodbye", "lo ", " bye") will - return "good". If there is no such string, returns the empty string. - """ - - prefix_index = fullstring.find(prefix) - if (prefix_index < 0): - return "" - result_start_index = prefix_index + len(prefix) - suffix_index = fullstring.find(suffix, result_start_index) - if (suffix_index < 0): - return "" - return fullstring[result_start_index:suffix_index] - - -def _search_sdk_output(output, sdkname): - """Returns the sdk version given xcodebuild stdout and an sdkname.""" - return _search_string(output, "(%s" % sdkname, ")") - - -def _xcode_version_output(repository_ctx, name, version, aliases, developer_dir): - """Returns a string containing an xcode_version build target.""" - build_contents = "" - decorated_aliases = [] - error_msg = "" - for alias in aliases: - decorated_aliases.append("'%s'" % alias) - xcodebuild_result = repository_ctx.execute(["xcrun", "xcodebuild", "-version", "-sdk"], 30, - {"DEVELOPER_DIR": developer_dir}) - if (xcodebuild_result.return_code != 0): - error_msg = ( - "Invoking xcodebuild failed, developer dir: {devdir} ," + - "return code {code}, stderr: {err}, stdout: {out}").format( - devdir=developer_dir, - code=xcodebuild_result.return_code, - err=xcodebuild_result.stderr, - out=xcodebuild_result.stdout) - ios_sdk_version = _search_sdk_output(xcodebuild_result.stdout, "iphoneos") - tvos_sdk_version = _search_sdk_output(xcodebuild_result.stdout, "appletvos") - macos_sdk_version = _search_sdk_output(xcodebuild_result.stdout, "macosx") - watchos_sdk_version = _search_sdk_output(xcodebuild_result.stdout, "watchos") - build_contents += "xcode_version(\n name = '%s'," % name - build_contents += "\n version = '%s'," % version - if aliases: - build_contents += "\n aliases = [%s]," % " ,".join(decorated_aliases) - if ios_sdk_version: - build_contents += "\n default_ios_sdk_version = '%s'," % ios_sdk_version - if tvos_sdk_version: - build_contents += "\n default_tvos_sdk_version = '%s'," % tvos_sdk_version - if macos_sdk_version: - build_contents += "\n default_macos_sdk_version = '%s'," % macos_sdk_version - if watchos_sdk_version: - build_contents += "\n default_watchos_sdk_version = '%s'," % watchos_sdk_version - build_contents += "\n)\n" - if error_msg: - build_contents += "\n# Error: " + error_msg.replace("\n", " ") + "\n" - print(error_msg) - return build_contents - - -VERSION_CONFIG_STUB = "xcode_config(name = 'host_xcodes')" - - -def run_xcode_locator(repository_ctx, xcode_locator_src_label): - """Generates xcode-locator from source and runs it. - - Builds xcode-locator in the current repository directory. - Returns the standard output of running xcode-locator with -v, which will - return information about locally installed Xcode toolchains and the versions - they are associated with. - - This should only be invoked on a darwin OS, as xcode-locator cannot be built - otherwise. - - Args: - repository_ctx: The repository context. - xcode_locator_src_label: The label of the source file for xcode-locator. - Returns: - A 2-tuple containing: - output: A list representing installed xcode toolchain information. Each - element of the list is a struct containing information for one installed - toolchain. This is an empty list if there was an error building or - running xcode-locator. - err: An error string describing the error that occurred when attempting - to build and run xcode-locator, or None if the run was successful. - """ - xcodeloc_src_path = str(repository_ctx.path(xcode_locator_src_label)) - xcrun_result = repository_ctx.execute(["env", "-i", "xcrun", "clang", "-fobjc-arc", "-framework", - "CoreServices", "-framework", "Foundation", "-o", - "xcode-locator-bin", xcodeloc_src_path], 30) - - if (xcrun_result.return_code != 0): - suggestion = "" - if "Agreeing to the Xcode/iOS license" in xcrun_result.stderr: - suggestion = ("(You may need to sign the xcode license." + - " Try running 'sudo xcodebuild -license')") - error_msg = ( - "Generating xcode-locator-bin failed. {suggestion} " + - "return code {code}, stderr: {err}, stdout: {out}").format( - suggestion=suggestion, - code=xcrun_result.return_code, - err=xcrun_result.stderr, - out=xcrun_result.stdout) - return ([], error_msg.replace("\n", " ")) - - xcode_locator_result = repository_ctx.execute(["./xcode-locator-bin", "-v"], 30) - if (xcode_locator_result.return_code != 0): - error_msg = ( - "Invoking xcode-locator failed, " + - "return code {code}, stderr: {err}, stdout: {out}").format( - code=xcode_locator_result.return_code, - err=xcode_locator_result.stderr, - out=xcode_locator_result.stdout) - return ([], error_msg.replace("\n", " ")) - xcode_toolchains = [] - # xcode_dump is comprised of newlines with different installed xcode versions, - # each line of the form ::. - xcode_dump = xcode_locator_result.stdout - for xcodeversion in xcode_dump.split("\n"): - if ":" in xcodeversion: - infosplit = xcodeversion.split(":") - toolchain = struct( - version = infosplit[0], - aliases = infosplit[1].split(","), - developer_dir = infosplit[2] - ) - xcode_toolchains.append(toolchain) - return (xcode_toolchains, None) - - -def _darwin_build_file(repository_ctx): - """Evaluates local system state to create xcode_config and xcode_version targets.""" - xcodebuild_result = repository_ctx.execute(["env", "-i", "xcrun", "xcodebuild", "-version"], 30) - # "xcodebuild -version" failing may be indicative of no versions of xcode - # installed, which is an acceptable machine configuration to have for using - # bazel. Thus no print warning should be emitted here. - if (xcodebuild_result.return_code != 0): - error_msg = ( - "Running xcodebuild -version failed, " + - "return code {code}, stderr: {err}, stdout: {out}").format( - code=xcodebuild_result.return_code, - err=xcodebuild_result.stderr, - out=xcodebuild_result.stdout) - return VERSION_CONFIG_STUB + "\n# Error: " + error_msg.replace("\n", " ") + "\n" - - (toolchains, xcodeloc_err) = run_xcode_locator(repository_ctx, - Label(repository_ctx.attr.xcode_locator)) - - if xcodeloc_err: - return VERSION_CONFIG_STUB + "\n# Error: " + xcodeloc_err + "\n" - - default_xcode_version = _search_string(xcodebuild_result.stdout, "Xcode ", "\n") - default_xcode_target = "" - target_names = [] - buildcontents = "" - - for toolchain in toolchains: - version = toolchain.version - aliases = toolchain.aliases - developer_dir = toolchain.developer_dir - target_name = "version%s" % version.replace(".", "_") - buildcontents += _xcode_version_output(repository_ctx, target_name, version, aliases, developer_dir) - target_names.append("':%s'" % target_name) - if (version == default_xcode_version or default_xcode_version in aliases): - default_xcode_target = target_name - buildcontents += "xcode_config(name = 'host_xcodes'," - if target_names: - buildcontents += "\n versions = [%s]," % ", ".join(target_names) - if default_xcode_target: - buildcontents += "\n default = ':%s'," % default_xcode_target - buildcontents += "\n)\n" - return buildcontents - - -def _impl(repository_ctx): - """Implementation for the local_config_xcode repository rule. - - Generates a BUILD file containing a root xcode_config target named 'host_xcodes', - which points to an xcode_version target for each version of xcode installed on - the local host machine. If no versions of xcode are present on the machine - (for instance, if this is a non-darwin OS), creates a stub target. - - Args: - repository_ctx: The repository context. - """ - - os_name = repository_ctx.os.name.lower() - build_contents = "package(default_visibility = ['//visibility:public'])\n\n" - if (os_name.startswith("mac os")): - build_contents += _darwin_build_file(repository_ctx) - else: - build_contents += VERSION_CONFIG_STUB - repository_ctx.file("BUILD", build_contents) - -xcode_autoconf = repository_rule( - implementation=_impl, - local=True, - attrs={ - "xcode_locator": attr.string(), - } -) - - -def xcode_configure(xcode_locator_label): - """Generates a repository containing host xcode version information.""" - xcode_autoconf( - name="local_config_xcode", - xcode_locator=xcode_locator_label - ) diff --git a/starlark/src/syntax/testcases/xcode_version_flag.bzl b/starlark/src/syntax/testcases/xcode_version_flag.bzl deleted file mode 100644 index 4630e6e9..00000000 --- a/starlark/src/syntax/testcases/xcode_version_flag.bzl +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Rules that allows select() to differentiate between Apple OS versions.""" - -def _strip_version(version): - """Strip trailing characters that aren't digits or '.' from version names. - - Some OS versions look like "9.0gm", which is not useful for select() - statements. Thus, we strip the trailing "gm" part. - - Args: - version: the version string - - Returns: - The version with trailing letters stripped. - """ - result = "" - for ch in str(version): - if not ch.isdigit() and ch != ".": - break - - result += ch - - return result - - -def _xcode_version_flag_impl(ctx): - """A rule that allows select() to differentiate between Xcode versions.""" - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - return struct(providers = [ - config_common.FeatureFlagInfo(value = _strip_version( - xcode_config.xcode_version()))]) - - -def _ios_sdk_version_flag_impl(ctx): - """A rule that allows select() to select based on the iOS SDK version.""" - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - - return struct(providers = [ - config_common.FeatureFlagInfo(value = _strip_version( - xcode_config.sdk_version_for_platform( - apple_common.platform.ios_device)))]) - - -def _tvos_sdk_version_flag_impl(ctx): - """A rule that allows select() to select based on the tvOS SDK version.""" - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - - return struct(providers = [ - config_common.FeatureFlagInfo(value = _strip_version( - xcode_config.sdk_version_for_platform( - apple_common.platform.tvos_device)))]) - - -def _watchos_sdk_version_flag_impl(ctx): - """A rule that allows select() to select based on the watchOS SDK version.""" - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - - return struct(providers = [ - config_common.FeatureFlagInfo(value = _strip_version( - xcode_config.sdk_version_for_platform( - apple_common.platform.watchos_device)))]) - - -def _macos_sdk_version_flag_impl(ctx): - """A rule that allows select() to select based on the macOS SDK version.""" - xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig] - - return struct(providers = [ - config_common.FeatureFlagInfo(value = _strip_version( - xcode_config.sdk_version_for_platform( - apple_common.platform.macos)))]) - - -xcode_version_flag = rule( - implementation = _xcode_version_flag_impl, - attrs = { - "_xcode_config": attr.label(default=Label("//tools/osx:current_xcode_config")), - }) - -ios_sdk_version_flag = rule( - implementation = _ios_sdk_version_flag_impl, - attrs = { - "_xcode_config": attr.label(default=Label("//tools/osx:current_xcode_config")), - }) - -tvos_sdk_version_flag = rule( - implementation = _tvos_sdk_version_flag_impl, - attrs = { - "_xcode_config": attr.label(default=Label("//tools/osx:current_xcode_config")), - }) - -watchos_sdk_version_flag = rule( - implementation = _watchos_sdk_version_flag_impl, - attrs = { - "_xcode_config": attr.label(default=Label("//tools/osx:current_xcode_config")), - }) - -macos_sdk_version_flag = rule( - implementation = _macos_sdk_version_flag_impl, - attrs = { - "_xcode_config": attr.label(default=Label("//tools/osx:current_xcode_config")), - }) diff --git a/starlark/src/syntax/testcases/zip.bzl b/starlark/src/syntax/testcases/zip.bzl deleted file mode 100644 index 03e20973..00000000 --- a/starlark/src/syntax/testcases/zip.bzl +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2017 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Functions for producing the gzip of an artifact.""" - -def gzip(ctx, artifact): - """Create an action to compute the gzipped artifact.""" - out = ctx.new_file(artifact.basename + ".gz") - ctx.action( - command = 'gzip -n < %s > %s' % (artifact.path, out.path), - inputs = [artifact], - outputs = [out], - mnemonic = "GZIP") - return out - -def gunzip(ctx, artifact): - """Create an action to compute the gunzipped artifact.""" - out = ctx.new_file(artifact.basename + ".nogz") - ctx.action( - command = 'gunzip < %s > %s' % (artifact.path, out.path), - inputs = [artifact], - outputs = [out], - mnemonic = "GUNZIP") - return out - -tools = {} diff --git a/starlark/src/syntax/testutil.rs b/starlark/src/syntax/testutil.rs deleted file mode 100644 index dcac295d..00000000 --- a/starlark/src/syntax/testutil.rs +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -macro_rules! assert_diagnostics { - ($e:expr, $m:expr) => { - if !$e.is_empty() { - let nb_errors = $e.len(); - let locked = $m.lock(); - let codemap = locked.unwrap(); - let mut emitter = codemap_diagnostic::Emitter::stderr( - codemap_diagnostic::ColorConfig::Always, - Some(&codemap), - ); - emitter.emit(&$e); - panic!("There was {} parse errors", nb_errors); - } - }; -} diff --git a/starlark/src/testutil.rs b/starlark/src/testutil.rs deleted file mode 100644 index bebab9c9..00000000 --- a/starlark/src/testutil.rs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Macro to test starlark code execution -use crate::environment; -use crate::environment::TypeValues; -use crate::eval; -use crate::syntax::dialect::Dialect; -use codemap::CodeMap; -use codemap_diagnostic::Diagnostic; -use std::sync; - -/// Execute a starlark snippet with the passed environment. -pub fn starlark_no_diagnostic( - env: &mut environment::Environment, - snippet: &str, - type_values: &TypeValues, -) -> Result { - let map = sync::Arc::new(sync::Mutex::new(CodeMap::new())); - Ok(eval::noload::eval(&map, "", snippet, Dialect::Bzl, env, type_values)?.to_bool()) -} - -/// A simple macro to execute a Starlark snippet and fails if the last statement is false. -macro_rules! starlark_ok_fn { - ($fn:path, $t:expr) => { - assert!($fn($t).unwrap()); - }; - ($fn:path, $t1:expr, $t2:expr) => { - assert!($fn(&format!("{}{}", $t1, $t2)).unwrap()); - }; -} - -/// Test that the execution of a starlark code raise an error -macro_rules! starlark_fail_fn { - ($fn:path, $t:expr) => { - assert!($fn($t).is_err()); - }; - ($fn:path, $t:expr, $c:expr) => { - assert_eq!($c, $fn($t).err().unwrap().code.unwrap()); - }; - ($fn:path, $t1:expr, $t2:expr, $c:expr) => { - assert_eq!( - $c, - $fn(&format!("{}{}", $t1, $t2)).err().unwrap().code.unwrap() - ); - }; -} - -/// A simple macro to execute a Starlark snippet and fails if the last statement is false. -macro_rules! starlark_ok { - ($($t:expr),+) => (starlark_ok_fn!($crate::stdlib::starlark_default, $($t),+)) -} - -/// Test that the execution of a starlark code raise an error -macro_rules! starlark_fail { - ($($t:expr),+) => (starlark_fail_fn!($crate::stdlib::tests::starlark_default_fail, $($t),+)) -} diff --git a/starlark/src/values/boolean.rs b/starlark/src/values/boolean.rs deleted file mode 100644 index f58df16d..00000000 --- a/starlark/src/values/boolean.rs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the bool type for Starlark. - -use crate::values::error::ValueError; -use crate::values::frozen::FrozenOnCreation; -use crate::values::*; -use std::cmp::Ordering; -use std::fmt; -use std::iter; - -impl From for Value { - fn from(b: bool) -> Self { - Value::new(b) - } -} - -/// Define the bool type -impl TypedValue for bool { - type Holder = Immutable; - const TYPE: &'static str = "bool"; - - const INLINE: bool = true; - - fn new_value(self) -> Value { - Value(ValueInner::Bool(self)) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{}", if *self { "True" } else { "False" }) - } - fn to_int(&self) -> Result { - Ok(if *self { 1 } else { 0 }) - } - fn to_bool(&self) -> bool { - *self - } - fn get_hash(&self) -> Result { - Ok(self.to_int().unwrap() as u64) - } - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn equals(&self, other: &bool) -> Result { - Ok(self == other) - } - fn compare(&self, other: &bool) -> Result { - Ok(self.cmp(other)) - } -} - -impl FrozenOnCreation for bool {} diff --git a/starlark/src/values/cell/error.rs b/starlark/src/values/cell/error.rs deleted file mode 100644 index c6274563..00000000 --- a/starlark/src/values/cell/error.rs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Cell-related errors. - -use std::fmt; - -/// Error when borrow failed. -#[derive(Debug, Clone, PartialEq)] -pub(crate) enum ObjectBorrowError { - /// Can only fail if object is mutably borrowed - BorrowedMut, -} - -/// Object cannot be mutably borrowed. -#[derive(Debug, Clone, PartialEq)] -pub enum ObjectBorrowMutError { - /// Object is immutable - Immutable, - /// Object is frozen - Frozen, - /// Object is frozen for iteration - FrozenForIteration, - /// Object is already mutably borrowed - BorrowedMut, - /// Object is borrowed - Borrowed, -} - -impl fmt::Display for ObjectBorrowMutError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ObjectBorrowMutError::Immutable => write!(f, "Immutable"), - ObjectBorrowMutError::Frozen => write!(f, "Frozen"), - ObjectBorrowMutError::FrozenForIteration => { - write!(f, "Cannot mutate an iterable while iterating") - } - ObjectBorrowMutError::BorrowedMut => write!(f, "Borrowed mutably"), - ObjectBorrowMutError::Borrowed => write!(f, "Borrowed"), - } - } -} diff --git a/starlark/src/values/cell/header.rs b/starlark/src/values/cell/header.rs deleted file mode 100644 index 99022053..00000000 --- a/starlark/src/values/cell/header.rs +++ /dev/null @@ -1,346 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Object header - -use crate::values::cell::error::ObjectBorrowError; -use crate::values::cell::error::ObjectBorrowMutError; -use std::cell::Cell; -use std::fmt; - -/// Object mutability state. -#[derive(PartialEq, Debug)] -enum ObjectState { - /// Object type is immutable, not yet frozen (e. g. just constructed tuple) - Immutable, - /// Object type is immutable, and object is frozen - ImmutableFrozen, - /// Object type is mutable, but object is frozen - MutableFrozen, - /// Borrowed mutably - BorrowedMut, - /// Borrowed - // borrowed count, borrowed for iteration - Borrowed(usize, bool), -} - -const LARGEST_PO2: usize = (usize::max_value() >> 1) + 1; -const IMMUTABLE_FLAG: usize = LARGEST_PO2 >> 0; -const FROZEN_FLAG: usize = LARGEST_PO2 >> 1; -const FOR_ITER_FLAG: usize = LARGEST_PO2 >> 2; -const BORROWED_MUT: usize = FOR_ITER_FLAG - 1; - -impl ObjectState { - fn encode(&self) -> usize { - match self { - ObjectState::Immutable => IMMUTABLE_FLAG, - ObjectState::ImmutableFrozen => IMMUTABLE_FLAG | FROZEN_FLAG, - ObjectState::MutableFrozen => FROZEN_FLAG, - ObjectState::BorrowedMut => BORROWED_MUT, - ObjectState::Borrowed(count, for_iter) => { - assert!(*count < BORROWED_MUT); - let mut r = *count; - if *for_iter { - debug_assert!(*count != 0); - r = r | FOR_ITER_FLAG; - } - r - } - } - } - - fn decode(state: usize) -> ObjectState { - if state == IMMUTABLE_FLAG { - ObjectState::Immutable - } else if state == FROZEN_FLAG { - ObjectState::MutableFrozen - } else if state == FROZEN_FLAG | IMMUTABLE_FLAG { - ObjectState::ImmutableFrozen - } else if state == BORROWED_MUT { - ObjectState::BorrowedMut - } else { - let for_iter = (state & FOR_ITER_FLAG) != 0; - let count = state & !FOR_ITER_FLAG; - debug_assert!(count < BORROWED_MUT); - if for_iter { - debug_assert!(count != 0); - } - ObjectState::Borrowed(count, for_iter) - } - } -} - -pub(crate) struct ObjectBorrowRef<'b> { - header: &'b ObjectHeader, - was_for_iter: bool, -} - -impl ObjectBorrowRef<'_> { - /// Immutable frozen object borrow. - pub fn immutable_frozen() -> ObjectBorrowRef<'static> { - ObjectBorrowRef { - // Note returned object is no-op on drop, - // so that's fine to use a reference to a static variable. - header: ObjectHeader::immutable_frozen_static(), - was_for_iter: false, - } - } -} - -pub(crate) struct ObjectBorrowRefMut<'b> { - header: &'b ObjectHeader, -} - -impl Drop for ObjectBorrowRef<'_> { - fn drop(&mut self) { - // If `` is immutable, it's possible to implement - // a micropimization here: static no-op. - self.header.unborrow(self.was_for_iter); - } -} - -impl Drop for ObjectBorrowRefMut<'_> { - fn drop(&mut self) { - self.header.unborrow_mut(); - } -} - -impl fmt::Debug for ObjectBorrowRef<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("ObjectBorrowRef").field("_", &()).finish() - } -} - -impl fmt::Debug for ObjectBorrowRefMut<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("ObjectBorrowRefMut") - .field("_", &()) - .finish() - } -} - -/// Struct to declare unsync `ObjectHeader` in static non-mut field. -struct ObjectHeaderInStaticField(ObjectHeader); -unsafe impl Sync for ObjectHeaderInStaticField {} -static IMMUTABLE_FROZEN_OBJECT_HEADER: ObjectHeaderInStaticField = - ObjectHeaderInStaticField(ObjectHeader { - state: Cell::new(IMMUTABLE_FLAG | FROZEN_FLAG), - }); - -#[derive(Clone)] -pub(crate) struct ObjectHeader { - state: Cell, -} - -impl ObjectHeader { - fn get_decoded(&self) -> ObjectState { - ObjectState::decode(self.state.get()) - } - - fn set_decoded(&self, state: ObjectState) { - self.state.set(state.encode()); - } - - /// True iff object was mutable and now is frozen. - /// (Return `false` for immutable). - pub fn is_mutable_frozen(&self) -> bool { - match self.get_decoded() { - ObjectState::MutableFrozen => true, - _ => false, - } - } - - /// True iff the object is frozen. - pub fn is_frozen(&self) -> bool { - match self.get_decoded() { - ObjectState::MutableFrozen | ObjectState::ImmutableFrozen => true, - _ => false, - } - } - - /// Create new object header for mutable object - pub fn mutable() -> ObjectHeader { - ObjectHeader { - state: Cell::new(ObjectState::Borrowed(0, false).encode()), - } - } - - /// Create new object header for immutable object - pub fn immutable() -> ObjectHeader { - ObjectHeader { - state: Cell::new(ObjectState::Immutable.encode()), - } - } - - /// Create new object header for immutable frozen object - pub fn immutable_frozen() -> ObjectHeader { - ObjectHeader { - state: Cell::new(ObjectState::ImmutableFrozen.encode()), - } - } - - /// Get a header pointer for immutable object. - /// Note all operations like `freeze` or `borrow` do not change - /// bits of the state, so it's safe to pass a pointer to global immutable value. - pub fn immutable_frozen_static() -> &'static ObjectHeader { - &IMMUTABLE_FROZEN_OBJECT_HEADER.0 - } - - /// Freeze the object. - pub fn freeze(&self) -> bool { - match self.get_decoded() { - ObjectState::ImmutableFrozen => false, - ObjectState::MutableFrozen => false, - ObjectState::Immutable => { - self.set_decoded(ObjectState::ImmutableFrozen); - true - } - ObjectState::Borrowed(0, _) => { - self.set_decoded(ObjectState::MutableFrozen); - true - } - ObjectState::Borrowed(..) => panic!("cannot freeze, because it is borrowed"), - ObjectState::BorrowedMut => panic!("cannot freeze, because it is borrowed mutably"), - } - } - - pub fn try_borrow(&self, for_iter: bool) -> Result { - Ok(match self.get_decoded() { - ObjectState::ImmutableFrozen | ObjectState::MutableFrozen | ObjectState::Immutable => { - ObjectBorrowRef { - header: self, - was_for_iter: false, - } - } - ObjectState::Borrowed(count, was_for_iter) => { - self.set_decoded(ObjectState::Borrowed(count + 1, for_iter || was_for_iter)); - ObjectBorrowRef { - header: self, - was_for_iter, - } - } - ObjectState::BorrowedMut => { - return Err(ObjectBorrowError::BorrowedMut); - } - }) - } - - fn unborrow(&self, was_for_iter: bool) { - match self.get_decoded() { - ObjectState::Immutable => {} - ObjectState::ImmutableFrozen => {} - ObjectState::MutableFrozen => {} - ObjectState::Borrowed(count, _) => { - assert!(count > 0); - self.set_decoded(ObjectState::Borrowed(count - 1, was_for_iter)); - } - ObjectState::BorrowedMut => { - panic!("unborrow when borrowed mutably"); - } - } - } - - pub fn try_borrow_mut(&self) -> Result { - Err(match self.get_decoded() { - ObjectState::Immutable | ObjectState::ImmutableFrozen => { - ObjectBorrowMutError::Immutable - } - ObjectState::MutableFrozen => ObjectBorrowMutError::Frozen, - ObjectState::BorrowedMut => ObjectBorrowMutError::BorrowedMut, - ObjectState::Borrowed(0, _) => { - self.set_decoded(ObjectState::BorrowedMut); - return Ok(ObjectBorrowRefMut { header: self }); - } - ObjectState::Borrowed(_, true) => ObjectBorrowMutError::FrozenForIteration, - ObjectState::Borrowed(_, false) => ObjectBorrowMutError::Borrowed, - }) - } - - fn unborrow_mut(&self) { - match self.get_decoded() { - ObjectState::Immutable => unreachable!(), - ObjectState::ImmutableFrozen => unreachable!(), - ObjectState::MutableFrozen => unreachable!(), - ObjectState::Borrowed(..) => unreachable!(), - ObjectState::BorrowedMut => { - self.set_decoded(ObjectState::Borrowed(0, false)); - } - } - } -} - -impl fmt::Debug for ObjectHeader { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.get_decoded(), f) - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn immutable_static() { - let h = ObjectHeader::immutable_frozen_static(); - let b = h.try_borrow(true).unwrap(); - assert_eq!(false, b.was_for_iter); - assert_eq!(false, h.try_borrow(true).unwrap().was_for_iter); - drop(b); - h.freeze(); - assert_eq!( - ObjectBorrowMutError::Immutable, - h.try_borrow_mut().unwrap_err() - ); - } - - #[test] - fn frozen() { - let h = ObjectHeader::mutable(); - h.freeze(); - let b = h.try_borrow(false).unwrap(); - assert_eq!( - ObjectBorrowMutError::Frozen, - h.try_borrow_mut().unwrap_err() - ); - assert_eq!(false, h.try_borrow(true).unwrap().was_for_iter); - drop(b); - assert_eq!( - ObjectBorrowMutError::Frozen, - h.try_borrow_mut().unwrap_err() - ); - } - - #[test] - fn mutable_recursive_borrow() { - let h = ObjectHeader::mutable(); - let b1 = h.try_borrow(true).unwrap(); - let b2 = h.try_borrow(false).unwrap(); - assert_eq!( - ObjectBorrowMutError::FrozenForIteration, - h.try_borrow_mut().unwrap_err() - ); - drop(b2); - assert_eq!( - ObjectBorrowMutError::FrozenForIteration, - h.try_borrow_mut().unwrap_err() - ); - drop(b1); - let bm = h.try_borrow_mut().unwrap(); - assert_eq!( - ObjectBorrowError::BorrowedMut, - h.try_borrow(true).unwrap_err() - ); - drop(bm); - } -} diff --git a/starlark/src/values/cell/mod.rs b/starlark/src/values/cell/mod.rs deleted file mode 100644 index 526bda52..00000000 --- a/starlark/src/values/cell/mod.rs +++ /dev/null @@ -1,246 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! [`Ref`](std::cell::Ref) and [`RefMut`](std::cell::RefMut)-like objects -//! used in implementation of [`Value`](crate::values::Value). - -use crate::values::cell::error::ObjectBorrowError; -use crate::values::cell::error::ObjectBorrowMutError; -use crate::values::cell::header::ObjectBorrowRef; -use crate::values::cell::header::ObjectBorrowRefMut; -use crate::values::cell::header::ObjectHeader; -use std::cell::UnsafeCell; -use std::fmt; -use std::ops::Deref; -use std::ops::DerefMut; - -pub mod error; -pub(crate) mod header; - -/// [`Ref`](std::cell::Ref)-like object for [`ObjectCell`], -/// and it also works as a reference wrapper for immutable objects. -pub struct ObjectRef<'b, T: ?Sized + 'b> { - value: &'b T, - borrow: ObjectBorrowRef<'b>, -} - -impl<'b, T: ?Sized + 'b> ObjectRef<'b, T> { - unsafe fn new(value: &'b UnsafeCell, borrow: ObjectBorrowRef<'b>) -> ObjectRef<'b, T> { - ObjectRef { - value: &*value.get(), - borrow, - } - } - - /// A reference to immutable frozen value - pub fn immutable_frozen(value: &T) -> ObjectRef { - ObjectRef { - value, - borrow: ObjectBorrowRef::immutable_frozen(), - } - } - - /// A raw pointer to the referenced value - pub fn as_ptr(&self) -> *mut T { - self.value as *const T as *mut T - } - - /// Convert ref to another type - pub fn map(orig: ObjectRef<'b, T>, f: F) -> ObjectRef<'b, U> - where - F: FnOnce(&T) -> &U, - { - ObjectRef { - value: f(orig.value), - borrow: orig.borrow, - } - } - - /// Convert ref to another type - pub fn flat_map(orig: ObjectRef<'b, T>, f: F) -> Option> - where - F: FnOnce(&T) -> Option<&U>, - { - f(orig.value).map(|value| ObjectRef { - value, - borrow: orig.borrow, - }) - } -} - -impl Deref for ObjectRef<'_, T> { - type Target = T; - - #[inline] - fn deref(&self) -> &T { - self.value - } -} - -/// [`RefMut`](std::cell::RefMut)-like -pub struct ObjectRefMut<'b, T: ?Sized + 'b> { - value: &'b mut T, - borrow: ObjectBorrowRefMut<'b>, -} - -/// [`RefMut`](std::cell::RefMut)-like -impl<'b, T: ?Sized + 'b> ObjectRefMut<'b, T> { - pub fn map(orig: ObjectRefMut<'b, T>, f: F) -> ObjectRefMut<'b, U> - where - F: FnOnce(&mut T) -> &mut U, - { - let ObjectRefMut { value, borrow } = orig; - ObjectRefMut { - value: f(value), - borrow, - } - } - - pub fn flat_map(orig: ObjectRefMut<'b, T>, f: F) -> Option> - where - F: FnOnce(&mut T) -> Option<&mut U>, - { - let ObjectRefMut { value, borrow } = orig; - f(value).map(|value| ObjectRefMut { value, borrow }) - } -} - -impl<'b, T: ?Sized + 'b> ObjectRefMut<'b, T> { - unsafe fn new(value: &'b UnsafeCell, borrow: ObjectBorrowRefMut<'b>) -> ObjectRefMut<'b, T> { - ObjectRefMut { - value: &mut *value.get(), - borrow, - } - } -} - -impl Deref for ObjectRefMut<'_, T> { - type Target = T; - - #[inline] - fn deref(&self) -> &T { - self.value - } -} - -impl DerefMut for ObjectRefMut<'_, T> { - #[inline] - fn deref_mut(&mut self) -> &mut T { - self.value - } -} - -/// [`RefCell`](std::cell::RefCell)-like object. -/// -/// In addition to `borrow` and `borrow_mut` operation, it also support: -/// * "borrowed for iteration" flag to provide better messages for this Starlark use case -/// * freezing -pub(crate) struct ObjectCell { - header: ObjectHeader, - value: UnsafeCell, -} - -impl ObjectCell { - pub fn new_mutable(value: T) -> ObjectCell { - ObjectCell { - header: ObjectHeader::mutable(), - value: UnsafeCell::new(value), - } - } - - pub fn new_immutable(value: T) -> ObjectCell { - ObjectCell { - header: ObjectHeader::immutable(), - value: UnsafeCell::new(value), - } - } - - pub fn _new_immutable_frozen(value: T) -> ObjectCell { - ObjectCell { - header: ObjectHeader::immutable_frozen(), - value: UnsafeCell::new(value), - } - } -} - -impl ObjectCell { - pub fn try_borrow(&self, for_iter: bool) -> Result, ObjectBorrowError> { - let borrow = self.header.try_borrow(for_iter)?; - Ok(unsafe { ObjectRef::new(&self.value, borrow) }) - } - - pub fn try_borrow_mut(&self) -> Result, ObjectBorrowMutError> { - let borrow = self.header.try_borrow_mut()?; - Ok(unsafe { ObjectRefMut::new(&self.value, borrow) }) - } - - pub fn borrow(&self) -> ObjectRef { - self.try_borrow(false).unwrap() - } - - pub fn borrow_mut(&self) -> ObjectRefMut { - self.try_borrow_mut().unwrap() - } - - pub fn get_ptr(&self) -> *const T { - self.value.get() as *const T - } - - /// Get a copy of object header. - /// - /// Cannot return the header reference because it's not safe. - pub fn get_header_copy(&self) -> ObjectHeader { - self.header.clone() - } - - /// Mark value as frozen. - /// - /// Return `true` if the object was not frozen before. - /// - /// # Panics - /// - /// If value is borrowed. - pub fn freeze(&self) -> bool { - self.header.freeze() - } - - /// Get a copy of object header. - pub fn get_header(&self) -> ObjectHeader { - self.header.clone() - } -} - -impl fmt::Debug for ObjectRef<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(self.value, f) - } -} - -impl fmt::Debug for ObjectRefMut<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(self.value, f) - } -} - -impl fmt::Debug for ObjectCell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.try_borrow(false) { - Ok(v) => f.debug_struct("ObjectCell").field("value", &v).finish(), - Err(e) => f - .debug_struct("ObjectCell") - .field("borrow_error", &e) - .finish(), - } - } -} diff --git a/starlark/src/values/context.rs b/starlark/src/values/context.rs deleted file mode 100644 index cbbf9ea5..00000000 --- a/starlark/src/values/context.rs +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use crate::environment::Environment; -use crate::environment::EnvironmentError; -use crate::environment::TypeValues; -use crate::eval::call_stack::CallStack; -use crate::eval::expr::GlobalOrSlot; -use crate::eval::globals::Globals; -use crate::eval::locals::Locals; -use crate::eval::FileLoader; -use crate::values::Value; -use codemap::CodeMap; -use std::sync::Arc; -use std::sync::Mutex; - -/// A structure holding all the data about the evaluation context -/// (scope, load statement resolver, ...) -pub(crate) struct EvaluationContext<'a, E: EvaluationContextEnvironment> { - // Locals and captured context. - pub(crate) env: E, - // Globals used to resolve type values, provided by the caller. - pub(crate) type_values: &'a TypeValues, - pub(crate) call_stack: &'a mut CallStack, - pub(crate) map: Arc>, -} - -/// Module-level or function environments are quite different, -/// this trait describes the differences. -pub(crate) trait EvaluationContextEnvironment { - /// Get the (global) environment - fn env(&self) -> &Environment; - - /// Get global variable by name - fn get_global(&mut self, slot: usize, name: &str) -> Result; - - /// Panic if this environment is local - fn assert_module_env(&self) -> &EvaluationContextEnvironmentModule; - - /// Set local variable - fn set_local(&mut self, slot: usize, name: &str, value: Value); - - /// Get local variable - fn get_local(&mut self, slot: usize, name: &str) -> Result; - - fn get(&mut self, name_slot: &GlobalOrSlot) -> Result { - let GlobalOrSlot { name, local, slot } = name_slot; - match local { - true => self.get_local(*slot, name), - false => self.get_global(*slot, name), - } - } - - fn set_global(&mut self, slot: usize, name: &str, value: Value) - -> Result<(), EnvironmentError>; - - fn set(&mut self, name_slot: &GlobalOrSlot, value: Value) -> Result<(), EnvironmentError> { - let GlobalOrSlot { name, local, slot } = name_slot; - match local { - true => Ok(self.set_local(*slot, name, value)), - false => self.set_global(*slot, name, value), - } - } - - fn top_level_local_to_slot(&self, name: &str) -> usize; -} - -pub(crate) struct EvaluationContextEnvironmentModule<'a> { - pub env: Environment, - pub globals: IndexedGlobals<'a>, - pub loader: &'a dyn FileLoader, -} - -pub(crate) struct EvaluationContextEnvironmentLocal<'a> { - pub globals: IndexedGlobals<'a>, - pub locals: IndexedLocals<'a>, -} - -impl<'a> EvaluationContextEnvironment for EvaluationContextEnvironmentModule<'a> { - fn env(&self) -> &Environment { - &self.env - } - - fn get_global(&mut self, slot: usize, name: &str) -> Result { - self.globals.get_slot(slot, name) - } - - fn assert_module_env(&self) -> &EvaluationContextEnvironmentModule { - self - } - - fn set_local(&mut self, _slot: usize, _name: &str, _value: Value) { - unreachable!("not a local env") - } - - fn get_local(&mut self, _slot: usize, _name: &str) -> Result { - unreachable!("not a local env") - } - - fn set_global( - &mut self, - slot: usize, - name: &str, - value: Value, - ) -> Result<(), EnvironmentError> { - self.globals.set_slot(slot, name, value) - } - - fn top_level_local_to_slot(&self, _name: &str) -> usize { - unreachable!("not a local env") - } -} - -impl<'a> EvaluationContextEnvironment for EvaluationContextEnvironmentLocal<'a> { - fn env(&self) -> &Environment { - &self.globals.env - } - - fn get_global(&mut self, slot: usize, name: &str) -> Result { - self.globals.get_slot(slot, name) - } - - fn assert_module_env(&self) -> &EvaluationContextEnvironmentModule { - unreachable!("not a module env") - } - - fn set_local(&mut self, slot: usize, name: &str, value: Value) { - self.locals.set_slot(slot, name, value) - } - - fn get_local(&mut self, slot: usize, name: &str) -> Result { - self.locals.get_slot(slot, name) - } - - fn set_global( - &mut self, - _slot: usize, - _name: &str, - _value: Value, - ) -> Result<(), EnvironmentError> { - unreachable!("assign to global in local environment") - } - - fn top_level_local_to_slot(&self, name: &str) -> usize { - self.locals.local_defs.top_level_name_to_slot(name).unwrap() - } -} - -/// Starlark `def` or comprehension local variables -pub(crate) struct IndexedLocals<'a> { - // This field is not used at runtime, but could be used for debugging or - // for better diagnostics in the future - pub local_defs: &'a Locals, - /// Local variables are stored in this array. Names to slots are mapped - /// during analysis phase. Note access by index is much faster than by name. - locals: Box<[Option]>, -} - -impl<'a> IndexedLocals<'a> { - pub fn new(local_defs: &'a Locals) -> IndexedLocals<'a> { - IndexedLocals { - local_defs, - locals: vec![None; local_defs.len()].into_boxed_slice(), - } - } - - pub fn get_slot(&self, slot: usize, name: &str) -> Result { - match self.locals[slot].clone() { - Some(value) => Ok(value), - None => Err(EnvironmentError::LocalVariableReferencedBeforeAssignment( - name.to_owned(), - )), - } - } - - pub fn set_slot(&mut self, slot: usize, _name: &str, value: Value) { - self.locals[slot] = Some(value); - } -} - -pub(crate) struct IndexedGlobals<'a> { - // This field is not used at runtime, but could be used for debugging or - // for better diagnostics in the future. - _global_defs: &'a Globals, - /// Global variables are cached in this array. Names to slots are mapped - /// during analysis phase. Note access by index is much faster than by name. - globals: Box<[Option]>, - /// Actual storage of variables. - env: Environment, -} - -impl<'a> IndexedGlobals<'a> { - pub fn new(global_defs: &'a Globals, env: Environment) -> IndexedGlobals<'a> { - IndexedGlobals { - _global_defs: global_defs, - globals: vec![None; global_defs.len()].into_boxed_slice(), - env, - } - } - - fn get_slot(&mut self, slot: usize, name: &str) -> Result { - match &mut self.globals[slot] { - Some(value) => Ok(value.clone()), - o @ None => { - let value = self.env.get(name)?; - *o = Some(value.clone()); - Ok(value) - } - } - } - - fn set_slot(&mut self, slot: usize, name: &str, value: Value) -> Result<(), EnvironmentError> { - self.env.set(name, value.clone())?; - self.globals[slot] = Some(value); - Ok(()) - } -} diff --git a/starlark/src/values/dict.rs b/starlark/src/values/dict.rs deleted file mode 100644 index 40ee34bd..00000000 --- a/starlark/src/values/dict.rs +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Module define the Starlark type Dictionary -use crate::values::error::ValueError; -use crate::values::hashed_value::HashedValue; -use crate::values::iter::TypedIterable; -use crate::values::*; -use linked_hash_map::LinkedHashMap; // To preserve insertion order -use std::collections::HashMap; -use std::convert::TryFrom; -use std::fmt; -use std::hash::Hash; - -/// The Dictionary type -#[derive(Default)] -pub struct Dictionary { - content: LinkedHashMap, -} - -impl Dictionary { - pub(crate) fn new_typed() -> Dictionary { - Dictionary { - content: LinkedHashMap::new(), - } - } - - pub(crate) fn new() -> ValueOther { - ValueOther::default() - } - - pub fn get_content(&self) -> &LinkedHashMap { - &self.content - } - - pub fn get(&self, key: &Value) -> Result, ValueError> { - Ok(self.get_hashed(&HashedValue::new(key.clone())?)) - } - - pub fn clear(&mut self) { - self.content.clear(); - } - - pub fn remove(&mut self, key: &Value) -> Result, ValueError> { - Ok(self.remove_hashed(&HashedValue::new(key.clone())?)) - } - - pub fn pop_front(&mut self) -> Option<(HashedValue, Value)> { - self.content.pop_front() - } - - pub fn items(&self) -> Vec<(Value, Value)> { - self.content - .iter() - .map(|(k, v)| (k.get_value().clone(), v.clone())) - .collect() - } - - pub fn values(&self) -> Vec { - self.content.values().cloned().collect() - } - - pub fn get_hashed(&self, key: &HashedValue) -> Option<&Value> { - self.content.get(key) - } - - pub fn insert(&mut self, key: Value, value: Value) -> Result<(), ValueError> { - let key = key.clone_for_container(self)?; - let key = HashedValue::new(key)?; - let value = value.clone_for_container(self)?; - self.content.insert(key, value); - Ok(()) - } - - pub fn remove_hashed(&mut self, key: &HashedValue) -> Option { - self.content.remove(key) - } - - pub fn keys(&self) -> Vec { - self.content.keys().map(|k| k.get_value().clone()).collect() - } -} - -impl + Hash + Eq + Clone, T2: Into + Eq + Clone> TryFrom> - for Dictionary -{ - type Error = ValueError; - - fn try_from(a: HashMap) -> Result { - let mut result = Dictionary { - content: LinkedHashMap::new(), - }; - for (k, v) in a.iter() { - result - .content - .insert(HashedValue::new(k.clone().into())?, v.clone().into()); - } - Ok(result) - } -} - -impl + Hash + Eq + Clone, T2: Into + Eq + Clone> - TryFrom> for Dictionary -{ - type Error = ValueError; - - fn try_from(a: LinkedHashMap) -> Result { - let mut result = Dictionary { - content: LinkedHashMap::new(), - }; - for (k, v) in a.iter() { - result - .content - .insert(HashedValue::new(k.clone().into())?, v.clone().into()); - } - Ok(result) - } -} - -/// Define the Dictionary type -impl TypedValue for Dictionary { - type Holder = Mutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - // XXX: We cannot freeze the key because they are immutable in rust, is it important? - Box::new( - self.content - .iter() - .flat_map(|(k, v)| vec![k.get_value().clone(), v.clone()].into_iter()), - ) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{{")?; - for (index, (k, v)) in self.content.iter().enumerate() { - if index != 0 { - write!(buf, ", ")?; - } - k.get_value().to_repr_impl(buf)?; - write!(buf, ": ")?; - v.to_repr_impl(buf)?; - } - write!(buf, "}}")?; - Ok(()) - } - - const TYPE: &'static str = "dict"; - fn to_bool(&self) -> bool { - !self.content.is_empty() - } - - fn equals(&self, other: &Dictionary) -> Result { - if self.content.len() != other.content.len() { - return Ok(false); - } - - for (k, v) in &self.content { - match other.content.get(k) { - None => return Ok(false), - Some(w) => { - if !v.equals(w)? { - return Ok(false); - } - } - } - } - - Ok(true) - } - - fn at(&self, index: Value) -> ValueResult { - match self.content.get(&HashedValue::new(index.clone())?) { - Some(v) => Ok(v.clone()), - None => Err(ValueError::KeyNotFound(index)), - } - } - - fn length(&self) -> Result { - Ok(self.content.len() as i64) - } - - fn contains(&self, other: &Value) -> Result { - Ok(self.content.contains_key(&HashedValue::new(other.clone())?)) - } - - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Ok(self) - } - - fn set_at(&mut self, index: Value, new_value: Value) -> Result<(), ValueError> { - let index_key = HashedValue::new(index)?; - let new_value = new_value.clone_for_container(self)?; - { - if let Some(x) = self.content.get_mut(&index_key) { - *x = new_value; - return Ok(()); - } - } - self.content.insert(index_key, new_value); - Ok(()) - } - - fn add(&self, other: &Dictionary) -> Result { - let mut result = Dictionary { - content: LinkedHashMap::new(), - }; - for (k, v) in &self.content { - result.content.insert(k.clone(), v.clone()); - } - for (k, v) in &other.content { - result.content.insert(k.clone(), v.clone()); - } - Ok(result) - } -} - -impl TypedIterable for Dictionary { - fn to_iter<'a>(&'a self) -> Box + 'a> { - Box::new(self.content.keys().map(|x| x.get_value().clone())) - } - - fn to_vec(&self) -> Vec { - self.content.keys().map(|x| x.get_value().clone()).collect() - } -} - -impl + Eq + Hash + Clone, T2: Into + Eq + Clone> TryFrom> - for Value -{ - type Error = ValueError; - - fn try_from(a: HashMap) -> Result { - Ok(Value::new(dict::Dictionary::try_from(a)?)) - } -} - -impl + Eq + Hash + Clone, T2: Into + Eq + Clone> - TryFrom> for Value -{ - type Error = ValueError; - - fn try_from(a: LinkedHashMap) -> Result { - Ok(Value::new(dict::Dictionary::try_from(a)?)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_mutate_dict() { - let mut map = LinkedHashMap::::new(); - map.insert(HashedValue::new(Value::from(1)).unwrap(), Value::from(2)); - map.insert(HashedValue::new(Value::from(2)).unwrap(), Value::from(4)); - let mut d = Value::try_from(map).unwrap(); - assert_eq!("{1: 2, 2: 4}", d.to_str()); - d.set_at(Value::from(2), Value::from(3)).unwrap(); - assert_eq!("{1: 2, 2: 3}", d.to_str()); - d.set_at(Value::from((3, 4)), Value::from(5)).unwrap(); - assert_eq!("{1: 2, 2: 3, (3, 4): 5}", d.to_str()); - } - - #[test] - fn test_is_descendant() { - let mut map = LinkedHashMap::::new(); - map.insert(HashedValue::new(Value::from(1)).unwrap(), Value::from(2)); - map.insert(HashedValue::new(Value::from(2)).unwrap(), Value::from(4)); - let v1 = Value::try_from(map.clone()).unwrap(); - map.insert(HashedValue::new(Value::from(3)).unwrap(), v1.clone()); - let v2 = Value::try_from(map.clone()).unwrap(); - map.insert(HashedValue::new(Value::from(3)).unwrap(), v2.clone()); - let v3 = Value::try_from(map).unwrap(); - assert!(v3.is_descendant_value(&v2)); - assert!(v3.is_descendant_value(&v1)); - assert!(v3.is_descendant_value(&v3)); - - assert!(v2.is_descendant_value(&v1)); - assert!(v2.is_descendant_value(&v2)); - assert!(!v2.is_descendant_value(&v3)); - - assert!(v1.is_descendant_value(&v1)); - assert!(!v1.is_descendant_value(&v2)); - assert!(!v1.is_descendant_value(&v3)); - } -} diff --git a/starlark/src/values/error.rs b/starlark/src/values/error.rs deleted file mode 100644 index 38e78a30..00000000 --- a/starlark/src/values/error.rs +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Module define the common engine error. - -use crate::syntax::errors::SyntaxError; -use crate::values::string::interpolation::StringInterpolationError; -use crate::values::*; -use codemap::Span; -use codemap_diagnostic::{Diagnostic, SpanLabel, SpanStyle}; -use std::fmt; - -// TODO: move that code in some common error code list? -// CV prefix = Critical Value expression -pub const NOT_SUPPORTED_ERROR_CODE: &str = "CV00"; -pub const BORROW_MUT_ERROR_CODE: &str = "CV01"; -pub const INCORRECT_PARAMETER_TYPE_ERROR_CODE: &str = "CV02"; -pub const OUT_OF_BOUND_ERROR_CODE: &str = "CV03"; -pub const NOT_HASHABLE_VALUE_ERROR_CODE: &str = "CV04"; -pub const KEY_NOT_FOUND_ERROR_CODE: &str = "CV05"; -pub const INTERPOLATION_OUT_OF_UTF8_RANGE_ERROR_CODE: &str = "CV07"; -pub const DIVISION_BY_ZERO_ERROR_CODE: &str = "CV08"; -pub const INTERPOLATION_TOO_MANY_PARAMS_ERROR_CODE: &str = "CV09"; -pub const INTERPOLATION_NOT_ENOUGH_PARAMS_ERROR_CODE: &str = "CV10"; -pub const INTERPOLATION_VALUE_IS_NOT_CHAR_ERROR_CODE: &str = "CV12"; -pub const TOO_MANY_RECURSION_LEVEL_ERROR_CODE: &str = "CV13"; -pub const UNSUPPORTED_RECURSIVE_DATA_STRUCTURE_ERROR_CODE: &str = "CV14"; -const __RESERVED_CV15: &str = "CV15"; -pub const INTEGER_OVERFLOW_ERROR_CODE: &str = "CV16"; -pub const INTERPOLATION_UNEXPECTED_EOF_CLOSING_PAREN: &str = "CV17"; -pub const INTERPOLATION_UNEXPECTED_EOF_PERCENT: &str = "CV18"; -pub const INTERPOLATION_UNKNOWN_SPECIFIER: &str = "CV19"; - -/// Value used in diagnostics -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum UnsupportedOperation { - Compare, - ToInt, - Call, - At, - SetAt, - Slice, - Len, - GetAttr(String), - HasAttr, - SetAttr(String), - Dir, - In, - Plus, - Minus, - Mul, - Div, - FloorDiv, - Percent, - Pipe, -} - -impl fmt::Display for UnsupportedOperation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - UnsupportedOperation::Compare => write!(f, "compare"), - UnsupportedOperation::ToInt => write!(f, "int()"), - UnsupportedOperation::Call => write!(f, "call()"), - UnsupportedOperation::At => write!(f, "[]"), - UnsupportedOperation::SetAt => write!(f, "[] ="), - UnsupportedOperation::Slice => write!(f, "[::]"), - UnsupportedOperation::Len => write!(f, "len()"), - UnsupportedOperation::GetAttr(attr) => write!(f, ".{}", attr), - UnsupportedOperation::HasAttr => write!(f, "has_attr()"), - UnsupportedOperation::SetAttr(attr) => write!(f, ".{} =", attr), - UnsupportedOperation::Dir => write!(f, "dir()"), - UnsupportedOperation::In => write!(f, "in"), - UnsupportedOperation::Plus => write!(f, "+"), - UnsupportedOperation::Minus => write!(f, "-"), - UnsupportedOperation::Mul => write!(f, "*"), - UnsupportedOperation::Div => write!(f, "/"), - UnsupportedOperation::FloorDiv => write!(f, "//"), - UnsupportedOperation::Percent => write!(f, "%"), - UnsupportedOperation::Pipe => write!(f, "|"), - } - } -} - -/// Error that can be returned by function from the `TypedValue` trait, -#[derive(Clone, Debug)] -pub enum ValueError { - /// The operation is not supported for this type. - OperationNotSupported { - op: UnsupportedOperation, - left: String, - right: Option, - }, - /// The operation is not supported for this type because type is not of a certain category. - TypeNotX { object_type: String, op: String }, - /// Division by 0 - DivisionByZero, - /// Arithmetic operation results in integer overflow. - IntegerOverflow, - /// Trying to apply incorrect parameter type, e.g. for slicing. - IncorrectParameterType, - /// Trying to apply incorrect parameter type, e.g. for slicing. - IncorrectParameterTypeNamed(&'static str), - /// Trying to access an index outside of the value range, - IndexOutOfBound(i64), - /// The value is not hashable but was requested for a hash structure (e.g. dictionary). - NotHashableValue, - /// The key was not found in the collection - KeyNotFound(Value), - /// Wrapper around runtime errors to be bubbled up. - Runtime(RuntimeError), - /// Wrapper around diagnosed errors to be bubbled up. - DiagnosedError(Diagnostic), - /// String interpolation errors - StringInterpolation(StringInterpolationError), - /// Operation required mutable value - ObjectBorrowMutError(ObjectBorrowMutError), - /// Too many recursion in internal operation - TooManyRecursionLevel, - /// Recursive data structure are not allowed because they would allow infinite loop. - UnsupportedRecursiveDataStructure, - /// A type was used which isn't supported with the current feature set. Wraps the type name. - TypeNotSupported(String), -} - -/// A simpler error format to return as a ValueError -#[derive(Clone, Debug)] -pub struct RuntimeError { - pub code: &'static str, - pub message: String, - pub label: String, -} - -impl> SyntaxError for T { - fn to_diagnostic(self, file_span: Span) -> Diagnostic { - ValueError::Runtime(self.into()).to_diagnostic(file_span) - } -} - -impl From for ValueError { - fn from(e: RuntimeError) -> Self { - ValueError::Runtime(e) - } -} - -impl From for ValueError { - fn from(e: StringInterpolationError) -> Self { - ValueError::StringInterpolation(e) - } -} - -impl From for ValueError { - fn from(e: ObjectBorrowMutError) -> Self { - ValueError::ObjectBorrowMutError(e) - } -} - -impl SyntaxError for ValueError { - fn to_diagnostic(self, file_span: Span) -> Diagnostic { - match self { - ValueError::DiagnosedError(d) => d, - ValueError::StringInterpolation(e) => e.to_diagnostic(file_span), - _ => { - let sl = SpanLabel { - span: file_span, - style: SpanStyle::Primary, - label: Some(match self { - ValueError::Runtime(ref e) => e.label.clone(), - ValueError::OperationNotSupported { - ref op, - ref left, - right: Some(ref right), - } => format!("{} not supported for types {} and {}", op, left, right), - ValueError::OperationNotSupported { - ref op, - ref left, - right: None, - } => format!("{} not supported for type {}", op, left), - ValueError::TypeNotX { - ref object_type, - ref op, - } => format!("The type '{}' is not {}", object_type, op), - ValueError::DivisionByZero => "Division by zero".to_owned(), - ValueError::IntegerOverflow => "Integer overflow".to_owned(), - ValueError::ObjectBorrowMutError(_) => "Cannot mutate value".to_owned(), - ValueError::IncorrectParameterType => { - "Type of parameters mismatch".to_owned() - } - ValueError::IncorrectParameterTypeNamed(_) => { - "Type of parameters mismatch".to_owned() - } - ValueError::IndexOutOfBound(..) => "Index out of bound".to_owned(), - ValueError::NotHashableValue => "Value is not hashable".to_owned(), - ValueError::KeyNotFound(..) => "Key not found".to_owned(), - ValueError::TooManyRecursionLevel => "Too many recursion".to_owned(), - ValueError::UnsupportedRecursiveDataStructure => { - "Unsupported recursive data structure".to_owned() - } - ValueError::TypeNotSupported(ref t) => { - format!("Attempt to construct unsupported type ({})", t) - } - // handled above - ValueError::DiagnosedError(..) | ValueError::StringInterpolation(..) => { - unreachable!() - } - }), - }; - Diagnostic { - level: Level::Error, - message: match self { - ValueError::Runtime(ref e) => e.message.clone(), - ValueError::OperationNotSupported { - ref op, - ref left, - right: Some(ref right), - } => format!("Cannot {} types {} and {}", op, left, right), - ValueError::OperationNotSupported { - ref op, - ref left, - right: None, - } => format!("Cannot {} on type {}", op, left), - ValueError::TypeNotX { - ref object_type, - ref op, - } => format!("The type '{}' is not {}", object_type, op), - ValueError::DivisionByZero => "Cannot divide by zero".to_owned(), - ValueError::IntegerOverflow => "Integer overflow".to_owned(), - ValueError::IncorrectParameterType => { - "Type of parameters mismatch".to_owned() - } - ValueError::IncorrectParameterTypeNamed(name) => { - format!("Type of parameters {} mismatch", name) - } - ValueError::IndexOutOfBound(ref b) => { - format!("Index {} is out of bound", b) - } - ValueError::NotHashableValue => "Value is not hashable".to_owned(), - ValueError::KeyNotFound(ref k) => format!("Key '{}' was not found", k), - ValueError::TooManyRecursionLevel => "Too many recursion levels".to_owned(), - ValueError::UnsupportedRecursiveDataStructure => concat!( - "This operation create a recursive data structure. Recursive data", - "structure are disallowed because infinite loops are disallowed in Starlark." - ).to_owned(), - ValueError::TypeNotSupported(ref t) => { - format!("Type `{}` is not supported. Perhaps you need to enable some crate feature?", t) - } - // handled above - ValueError::DiagnosedError(..) | ValueError::StringInterpolation(..) => unreachable!(), - ValueError::ObjectBorrowMutError(ref e) => format!("{}", e), - }, - code: Some( - match self { - ValueError::OperationNotSupported { .. } | ValueError::TypeNotSupported(..) => NOT_SUPPORTED_ERROR_CODE, - ValueError::TypeNotX { .. } => NOT_SUPPORTED_ERROR_CODE, - ValueError::DivisionByZero => DIVISION_BY_ZERO_ERROR_CODE, - ValueError::IntegerOverflow => INTEGER_OVERFLOW_ERROR_CODE, - ValueError::ObjectBorrowMutError(_) => BORROW_MUT_ERROR_CODE, - ValueError::IncorrectParameterType | ValueError::IncorrectParameterTypeNamed(..) => { - INCORRECT_PARAMETER_TYPE_ERROR_CODE - } - ValueError::IndexOutOfBound(..) => OUT_OF_BOUND_ERROR_CODE, - ValueError::NotHashableValue => NOT_HASHABLE_VALUE_ERROR_CODE, - ValueError::KeyNotFound(..) => KEY_NOT_FOUND_ERROR_CODE, - ValueError::Runtime(e) => e.code, - ValueError::TooManyRecursionLevel => { - TOO_MANY_RECURSION_LEVEL_ERROR_CODE - } - ValueError::UnsupportedRecursiveDataStructure => { - UNSUPPORTED_RECURSIVE_DATA_STRUCTURE_ERROR_CODE - } - // handled above - ValueError::DiagnosedError(..) | ValueError::StringInterpolation(..) => unreachable!(), - }.to_owned(), - ), - spans: vec![sl], - } - } - } - } -} - -impl PartialEq for ValueError { - fn eq(&self, other: &ValueError) -> bool { - match (self, other) { - (ValueError::ObjectBorrowMutError(l), ValueError::ObjectBorrowMutError(r)) => l == r, - (&ValueError::IncorrectParameterType, &ValueError::IncorrectParameterType) => true, - ( - &ValueError::OperationNotSupported { op: ref x, .. }, - &ValueError::OperationNotSupported { op: ref y, .. }, - ) if x == y => true, - (&ValueError::IndexOutOfBound(x), &ValueError::IndexOutOfBound(y)) if x == y => true, - (&ValueError::IntegerOverflow, &ValueError::IntegerOverflow) => true, - _ => false, - } - } -} diff --git a/starlark/src/values/frozen.rs b/starlark/src/values/frozen.rs deleted file mode 100644 index 4c46d9c0..00000000 --- a/starlark/src/values/frozen.rs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Frozen value-related utilities. - -use crate::values::TypedValue; -use crate::values::Value; - -/// Marker trait for types which are frozen on creation. -/// -/// All types which are immutable and do not reference other objects -/// are frozen on creation. -/// -/// `int`, `NoneType`, `str` are permanently frozen. -/// -/// But tuple is not frozen on creation. -pub trait FrozenOnCreation {} - -/// [`Value`] wrapper which asserts the value is frozen. -#[derive(Clone, Debug)] -pub(crate) struct FrozenValue(Value); - -impl FrozenValue { - pub fn new(value: Value) -> Result { - if value.is_frozen() { - Ok(FrozenValue(value)) - } else { - Err(()) - } - } -} - -impl From for Value { - fn from(v: FrozenValue) -> Self { - v.0 - } -} - -impl From for FrozenValue -where - T: TypedValue + FrozenOnCreation, -{ - fn from(t: T) -> Self { - FrozenValue::new(Value::new(t)).unwrap() - } -} diff --git a/starlark/src/values/function.rs b/starlark/src/values/function.rs deleted file mode 100644 index ead834fa..00000000 --- a/starlark/src/values/function.rs +++ /dev/null @@ -1,690 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Function as a TypedValue -use super::*; -use crate::stdlib::macros::param::TryParamConvertFromValue; -use crate::values::error::RuntimeError; -use crate::values::none::NoneType; -use std::convert::TryInto; -use std::iter; -use std::mem; -use std::vec; - -#[derive(Debug, Clone)] -#[doc(hidden)] -pub enum FunctionParameter { - Normal(RcString), - Optional(RcString), - WithDefaultValue(RcString, Value), - ArgsArray(RcString), - KWArgsDict(RcString), -} - -#[derive(Debug, Clone)] -#[doc(hidden)] -pub struct FunctionSignature { - params: Vec, - /// Number of leading positional-only parameters - positional_count: usize, -} - -impl FunctionSignature { - pub(crate) fn new( - parameters: Vec, - positional_count: usize, - ) -> FunctionSignature { - FunctionSignature { - params: parameters, - positional_count, - } - } - - pub(crate) fn iter<'a>(&'a self) -> impl Iterator + 'a { - let positional_count = self.positional_count; - self.params - .iter() - .enumerate() - .map(move |(i, p)| (p, i < positional_count)) - } -} - -#[derive(Debug, Clone)] -#[doc(hidden)] -pub enum FunctionType { - Native(RcString), - Def(RcString, RcString), -} - -#[derive(Debug, Clone)] -pub enum FunctionArg { - Normal(Value), - Optional(Option), - ArgsArray(Vec), - KWArgsDict(LinkedHashMap), -} - -impl FunctionArg { - pub fn into_normal( - self, - param_name: &'static str, - ) -> Result { - match self { - FunctionArg::Normal(v) => { - T::try_from(v).map_err(|_| ValueError::IncorrectParameterTypeNamed(param_name)) - } - _ => Err(ValueError::IncorrectParameterType), - } - } - - pub fn into_optional( - self, - param_name: &'static str, - ) -> Result, ValueError> { - match self { - FunctionArg::Optional(Some(v)) => { - Ok(Some(T::try_from(v).map_err(|_| { - ValueError::IncorrectParameterTypeNamed(param_name) - })?)) - } - FunctionArg::Optional(None) => Ok(None), - _ => Err(ValueError::IncorrectParameterType), - } - } - - pub fn into_args_array( - self, - param_name: &'static str, - ) -> Result, ValueError> { - match self { - FunctionArg::ArgsArray(v) => Ok(v - .into_iter() - .map(T::try_from) - .collect::, _>>() - .map_err(|_| ValueError::IncorrectParameterTypeNamed(param_name))?), - _ => Err(ValueError::IncorrectParameterType), - } - } - - pub fn into_kw_args_dict( - self, - param_name: &'static str, - ) -> Result, ValueError> { - match self { - FunctionArg::KWArgsDict(dict) => Ok({ - let mut r = LinkedHashMap::new(); - for (k, v) in dict { - r.insert( - k, - T::try_from(v) - .map_err(|_| ValueError::IncorrectParameterTypeNamed(param_name))?, - ); - } - r - }), - _ => Err(ValueError::IncorrectParameterType), - } - } -} - -impl From for Value { - fn from(a: FunctionArg) -> Value { - match a { - FunctionArg::Normal(v) => v, - FunctionArg::ArgsArray(v) => v.into(), - FunctionArg::Optional(v) => match v { - Some(v) => v, - None => Value::new(NoneType::None), - }, - FunctionArg::KWArgsDict(v) => { - // `unwrap` does not panic, because key is a string which hashable - v.try_into().unwrap() - } - } - } -} - -pub type StarlarkFunctionPrototype = - dyn Fn(&CallStack, TypeValues, Vec) -> ValueResult; - -/// Function implementation for native (written in Rust) functions. -/// -/// Public to be referenced in macros. -#[doc(hidden)] -pub struct NativeFunction { - /// Pointer to a native function. - /// Note it is a function pointer, not `Box` - /// to avoid generic instantiation and allocation for each native function. - function: fn(&mut CallStack, &TypeValues, ParameterParser) -> ValueResult, - signature: FunctionSignature, - function_type: FunctionType, -} - -// Wrapper for method that have been affected the self object -pub(crate) struct WrappedMethod { - method: Value, - self_obj: Value, -} - -// TODO: move that code in some common error code list? -// CV prefix = Critical Function call -const NOT_ENOUGH_PARAMS_ERROR_CODE: &str = "CF00"; -const WRONG_ARGS_IDENT_ERROR_CODE: &str = "CF01"; -const ARGS_NOT_ITERABLE_ERROR_CODE: &str = "CF02"; -const KWARGS_NOT_MAPPABLE_ERROR_CODE: &str = "CF03"; -// Not an error: const KWARGS_KEY_IDENT_ERROR_CODE: &str = "CF04"; -const EXTRA_PARAMETER_ERROR_CODE: &str = "CF05"; - -#[derive(Debug, Clone)] -pub enum FunctionError { - NotEnoughParameter { - missing: String, - function_type: FunctionType, - signature: FunctionSignature, - }, - ArgsValueIsNotString, - ArgsArrayIsNotIterable, - KWArgsDictIsNotMappable, - ExtraParameter, -} - -impl Into for FunctionError { - fn into(self) -> RuntimeError { - RuntimeError { - code: match self { - FunctionError::NotEnoughParameter { .. } => NOT_ENOUGH_PARAMS_ERROR_CODE, - FunctionError::ArgsValueIsNotString => WRONG_ARGS_IDENT_ERROR_CODE, - FunctionError::ArgsArrayIsNotIterable => ARGS_NOT_ITERABLE_ERROR_CODE, - FunctionError::KWArgsDictIsNotMappable => KWARGS_NOT_MAPPABLE_ERROR_CODE, - FunctionError::ExtraParameter => EXTRA_PARAMETER_ERROR_CODE, - }, - label: match self { - FunctionError::NotEnoughParameter { .. } => { - "Not enough parameters in function call".to_owned() - } - FunctionError::ArgsValueIsNotString => "not an identifier for *args".to_owned(), - FunctionError::ArgsArrayIsNotIterable => "*args is not iterable".to_owned(), - FunctionError::KWArgsDictIsNotMappable => "**kwargs is not mappable".to_owned(), - FunctionError::ExtraParameter => "Extraneous parameter in function call".to_owned(), - }, - message: match self { - FunctionError::NotEnoughParameter { - missing, - function_type, - signature, - } => format!( - "Missing parameter {} for call to {}", - missing.trim_start_matches('$'), - repr(&function_type, &signature) - ), - FunctionError::ArgsValueIsNotString => { - "The argument provided for *args is not an identifier".to_owned() - } - FunctionError::ArgsArrayIsNotIterable => { - "The argument provided for *args is not iterable".to_owned() - } - FunctionError::KWArgsDictIsNotMappable => { - "The argument provided for **kwargs is not mappable".to_owned() - } - FunctionError::ExtraParameter => { - "Extraneous parameter passed to function call".to_owned() - } - }, - } - } -} - -impl From for ValueError { - fn from(e: FunctionError) -> Self { - ValueError::Runtime(e.into()) - } -} - -impl NativeFunction { - pub fn new( - name: RcString, - function: fn(&mut CallStack, &TypeValues, ParameterParser) -> ValueResult, - signature: FunctionSignature, - ) -> Value { - Value::new(NativeFunction { - function, - signature, - function_type: FunctionType::Native(name), - }) - } -} - -impl WrappedMethod { - pub fn new(self_obj: Value, method: Value) -> Value { - Value::new(WrappedMethod { method, self_obj }) - } -} - -impl FunctionType { - fn to_str(&self) -> String { - match self { - FunctionType::Native(ref name) => name.as_string().clone(), - FunctionType::Def(ref name, ..) => name.as_string().clone(), - } - } - - fn to_repr(&self) -> String { - match self { - FunctionType::Native(ref name) => format!("", name), - FunctionType::Def(ref name, ref module, ..) => { - format!("", name, module) - } - } - } -} - -pub(crate) enum StrOrRepr { - Str, - Repr, -} - -pub(crate) fn str_impl( - buf: &mut String, - function_type: &FunctionType, - signature: &FunctionSignature, - str_or_repr: StrOrRepr, -) -> fmt::Result { - write!( - buf, - "{}", - match str_or_repr { - StrOrRepr::Str => function_type.to_str(), - StrOrRepr::Repr => function_type.to_repr(), - } - )?; - write!(buf, "(")?; - - for (i, x) in signature.params.iter().enumerate() { - if i != 0 && i == signature.positional_count { - write!(buf, ", /")?; - } - - if i != 0 { - write!(buf, ", ")?; - } - - match x { - FunctionParameter::Normal(ref name) => write!(buf, "{}", name)?, - FunctionParameter::Optional(ref name) => write!(buf, "?{}", name)?, - FunctionParameter::WithDefaultValue(ref name, ref value) => { - write!(buf, "{} = {}", name, value.to_repr())?; - } - FunctionParameter::ArgsArray(ref name) => write!(buf, "*{}", name)?, - FunctionParameter::KWArgsDict(ref name) => write!(buf, "**{}", name)?, - } - } - - if signature.positional_count != 0 && signature.positional_count == signature.params.len() { - write!(buf, ", /")?; - } - - write!(buf, ")")?; - Ok(()) -} - -pub(crate) fn repr(function_type: &FunctionType, signature: &FunctionSignature) -> String { - let mut buf = String::new(); - str_impl(&mut buf, function_type, signature, StrOrRepr::Repr).unwrap(); - buf -} - -#[doc(hidden)] -pub struct ParameterParser<'a> { - signature: &'a FunctionSignature, - // current parameter index in function signature - index: usize, - function_type: &'a FunctionType, - positional: vec::IntoIter, - kwargs: LinkedHashMap, -} - -impl<'a> ParameterParser<'a> { - pub fn new( - signature: &'a FunctionSignature, - function_type: &'a FunctionType, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs_arg: Option, - ) -> Result, ValueError> { - // Collect args - let mut av = positional; - if let Some(x) = args { - match x.iter() { - Ok(y) => av.extend(y.iter()), - Err(..) => return Err(FunctionError::ArgsArrayIsNotIterable.into()), - } - }; - let positional = av.into_iter(); - // Collect kwargs - let mut kwargs = named; - if let Some(x) = kwargs_arg { - match x.iter() { - Ok(y) => { - for n in &y { - if let Some(k) = n.downcast_rc_str().cloned() { - if let Ok(v) = x.at(n) { - kwargs.insert(k, v); - } else { - return Err(FunctionError::KWArgsDictIsNotMappable.into()); - } - } else { - return Err(FunctionError::ArgsValueIsNotString.into()); - } - } - } - Err(..) => return Err(FunctionError::KWArgsDictIsNotMappable.into()), - } - } - - Ok(ParameterParser { - signature, - index: 0, - function_type, - positional, - kwargs, - }) - } - - pub fn next_normal(&mut self, name: &str, positional_only: bool) -> Result { - if let Some(x) = self.positional.next() { - self.index += 1; - return Ok(x); - } - - if !positional_only { - if let Some(ref r) = self.kwargs.remove(name) { - self.index += 1; - return Ok(r.clone()); - } - } - - Err(FunctionError::NotEnoughParameter { - missing: name.to_string(), - function_type: self.function_type.clone(), - signature: self.signature.clone(), - } - .into()) - } - - pub fn next_optional(&mut self, name: &str, positional_only: bool) -> Option { - self.index += 1; - if let Some(x) = self.positional.next() { - return Some(x); - } - - if !positional_only { - if let Some(ref r) = self.kwargs.remove(name) { - return Some(r.clone()); - } - } - - None - } - - pub fn next_with_default_value( - &mut self, - name: &str, - positional_only: bool, - default_value: &Value, - ) -> Value { - self.next_optional(name, positional_only) - .unwrap_or_else(|| default_value.clone()) - } - - pub fn next_args_array(&mut self) -> Vec { - self.index += 1; - mem::replace(&mut self.positional, Vec::new().into_iter()).collect() - } - - pub fn next_kwargs_dict(&mut self) -> LinkedHashMap { - self.index += 1; - mem::replace(&mut self.kwargs, Default::default()) - } - - pub fn check_no_more_args(&mut self) -> Result<(), ValueError> { - if self.positional.next().is_some() || !self.kwargs.is_empty() { - return Err(FunctionError::ExtraParameter.into()); - } - debug_assert_eq!(self.index, self.signature.params.len()); - Ok(()) - } - - /// This function is only called from macros - pub fn next_arg(&mut self) -> Result { - // Macros call this function exactly once for each signature item. - // So it's safe to panic here. - assert!(self.index != self.signature.params.len()); - let positional_only = self.index < self.signature.positional_count; - Ok(match &self.signature.params[self.index] { - FunctionParameter::Normal(ref name) => { - FunctionArg::Normal(self.next_normal(name, positional_only)?) - } - FunctionParameter::Optional(ref name) => { - FunctionArg::Optional(self.next_optional(name, positional_only)) - } - FunctionParameter::WithDefaultValue(ref name, ref value) => { - FunctionArg::Normal(self.next_with_default_value(name, positional_only, value)) - } - FunctionParameter::ArgsArray(..) => FunctionArg::ArgsArray(self.next_args_array()), - FunctionParameter::KWArgsDict(..) => FunctionArg::KWArgsDict(self.next_kwargs_dict()), - }) - } -} - -/// Define the function type -impl TypedValue for NativeFunction { - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - str_impl(buf, &self.function_type, &self.signature, StrOrRepr::Str) - } - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - str_impl(buf, &self.function_type, &self.signature, StrOrRepr::Repr) - } - - const TYPE: &'static str = "function"; - - fn call( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult { - let parser = ParameterParser::new( - &self.signature, - &self.function_type, - positional, - named, - args, - kwargs, - )?; - - (self.function)(call_stack, type_values, parser) - } -} - -impl TypedValue for WrappedMethod { - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(vec![self.method.clone(), self.self_obj.clone()].into_iter()) - } - - fn function_id(&self) -> Option { - Some(FunctionId(self.method.data_ptr())) - } - - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - self.method.to_str_impl(buf) - } - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - self.method.to_repr_impl(buf) - } - const TYPE: &'static str = "function"; - - fn call( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult { - // The only thing that this wrapper does is insert self at the beginning of the positional - // vector - let positional: Vec = Some(self.self_obj.clone()) - .into_iter() - .chain(positional.into_iter()) - .collect(); - self.method - .call(call_stack, type_values, positional, named, args, kwargs) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::values::function::{FunctionParameter, FunctionSignature, FunctionType}; - - #[test] - fn fmt_signature_positional() { - assert_eq!( - "()", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new(vec![], 0) - ) - ); - assert_eq!( - "(a)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new(vec![FunctionParameter::Normal("a".into())], 0) - ) - ); - assert_eq!( - "(a, /)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new(vec![FunctionParameter::Normal("a".into())], 1) - ) - ); - assert_eq!( - "(a, b)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new( - vec![ - FunctionParameter::Normal("a".into()), - FunctionParameter::Normal("b".into()), - ], - 0, - ) - ) - ); - assert_eq!( - "(a, /, b)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new( - vec![ - FunctionParameter::Normal("a".into()), - FunctionParameter::Normal("b".into()), - ], - 1, - ) - ) - ); - assert_eq!( - "(a, b, /)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new( - vec![ - FunctionParameter::Normal("a".into()), - FunctionParameter::Normal("b".into()), - ], - 2, - ) - ) - ); - assert_eq!( - "(a, b, /, **k)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new( - vec![ - FunctionParameter::Normal("a".into()), - FunctionParameter::Normal("b".into()), - FunctionParameter::KWArgsDict("k".into()), - ], - 2, - ) - ) - ); - } - - #[test] - fn fmt_signature_optional() { - assert_eq!( - "(?a)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new(vec![FunctionParameter::Optional("a".into())], 0) - ) - ); - assert_eq!( - "(?a, /)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new(vec![FunctionParameter::Optional("a".into())], 1) - ) - ); - } - - #[test] - fn fmt_signature_with_default_value() { - assert_eq!( - "(a = 10)", - repr( - &FunctionType::Native("f".into()), - &FunctionSignature::new( - vec![FunctionParameter::WithDefaultValue( - "a".into(), - Value::new(10), - )], - 0, - ) - ) - ); - } -} diff --git a/starlark/src/values/hashed_value.rs b/starlark/src/values/hashed_value.rs deleted file mode 100644 index c4a176ac..00000000 --- a/starlark/src/values/hashed_value.rs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Error-safe value and hash pair. - -use crate::values::error::ValueError; -use crate::values::Value; -use core::borrow::BorrowMut; -use std::hash::{Hash, Hasher}; - -/// A pair of value and cached value hash. -/// -/// This struct contains both value and a precomputed hash of that value. -/// If a value is not hashable, this error is raised at `DictionaryKey` construction. -/// So the implementation of `Hash` for this struct does not need to handle `hash` errors. -#[derive(Eq, Clone)] -pub struct HashedValue { - hash: u64, - value: Value, -} - -impl From for Value { - fn from(key: HashedValue) -> Value { - key.value - } -} - -impl HashedValue { - /// Returns error if the value is non hashable. - pub fn new(value: Value) -> Result { - let hash = value.get_hash()?; - Ok(HashedValue { hash, value }) - } - - /// Get precomputed hash. - pub fn get_hash(&self) -> u64 { - self.hash - } - - /// Get contained value. - pub fn get_value(&self) -> &Value { - &self.value - } - - /// Freeze the value, should be no-op since only immutable values can be hashed. - pub fn freeze(&mut self) { - self.value.borrow_mut().freeze(); - } -} - -impl PartialEq for HashedValue { - fn eq(&self, other: &HashedValue) -> bool { - self.hash == other.hash && self.value == other.value - } -} - -impl Hash for HashedValue { - fn hash(&self, state: &mut H) { - state.write_u64(self.hash) - } -} diff --git a/starlark/src/values/inspect.rs b/starlark/src/values/inspect.rs deleted file mode 100644 index 62db7a46..00000000 --- a/starlark/src/values/inspect.rs +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Utility for easier implementation of `inspect`. - -use crate::values::dict::Dictionary; -use crate::values::none::NoneType; -use crate::values::Value; -use codemap::Spanned; -use std::collections::HashMap; - -/// Convert "inspectable" to a Starlark value. -/// -/// Somewhat similar to `Value::from`, but also works with other types -/// which are not supposed to converted to `Value` implicitly. -pub(crate) trait Inspectable { - fn inspect(&self) -> Value; -} - -impl Inspectable for &'_ A { - fn inspect(&self) -> Value { - (**self).inspect() - } -} - -impl Inspectable for usize { - fn inspect(&self) -> Value { - Value::from(*self) - } -} - -impl Inspectable for String { - fn inspect(&self) -> Value { - Value::from(self.as_str()) - } -} - -impl Inspectable for Box { - fn inspect(&self) -> Value { - (**self).inspect() - } -} - -impl Inspectable for Option { - fn inspect(&self) -> Value { - match self { - None => Value::new(NoneType::None), - Some(v) => (v.inspect(),).into(), - } - } -} - -impl Inspectable for Vec { - fn inspect(&self) -> Value { - Value::from(self.iter().map(V::inspect).collect::>()) - } -} - -impl Inspectable for (A, B) { - fn inspect(&self) -> Value { - Value::from((self.0.inspect(), self.1.inspect())) - } -} - -impl Inspectable for (A, B, C) { - fn inspect(&self) -> Value { - Value::from((self.0.inspect(), self.1.inspect(), self.2.inspect())) - } -} - -impl Inspectable for (A, B, C, D) { - fn inspect(&self) -> Value { - Value::from(( - self.0.inspect(), - self.1.inspect(), - self.2.inspect(), - self.3.inspect(), - )) - } -} - -impl Inspectable - for (A, B, C, D, E) -{ - fn inspect(&self) -> Value { - Value::from(( - self.0.inspect(), - self.1.inspect(), - self.2.inspect(), - self.3.inspect(), - self.4.inspect(), - )) - } -} - -impl Inspectable for HashMap { - fn inspect(&self) -> Value { - let mut map = Dictionary::new_typed(); - for (k, v) in self { - map.insert(k.inspect(), v.inspect()).unwrap(); - } - Value::new(map) - } -} - -impl Inspectable for Spanned { - fn inspect(&self) -> Value { - self.node.inspect() - } -} diff --git a/starlark/src/values/int.rs b/starlark/src/values/int.rs deleted file mode 100644 index a95f67e2..00000000 --- a/starlark/src/values/int.rs +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the int type for Starlark. - -use crate::values::error::UnsupportedOperation; -use crate::values::error::ValueError; -use crate::values::frozen::FrozenOnCreation; -use crate::values::*; -use std::cmp::Ordering; -use std::fmt; -use std::iter; - -// A convenient macro for testing and documentation. -#[macro_export] -#[doc(hidden)] -macro_rules! int_op { - ($v1:tt. $op:ident($v2:expr)) => { - $crate::values::Value::new($v1) - .$op($crate::values::Value::new($v2)) - .unwrap() - .to_int() - .unwrap() - }; - ($v1:tt. $op:ident()) => { - $crate::values::Value::new($v1) - .$op() - .unwrap() - .to_int() - .unwrap() - }; -} - -macro_rules! from_int { - ($x: ty, $y: tt) => { - impl From<$x> for Value { - fn from(a: $x) -> Value { - Value::new(a as $y) - } - } - }; -} - -from_int!(i8, i64); -from_int!(i16, i64); -from_int!(i32, i64); -from_int!(u8, i64); -from_int!(u16, i64); -from_int!(u32, i64); -// TODO: check for overflow -from_int!(u64, i64); -// TODO: check for overflow -from_int!(usize, i64); -from_int!(isize, i64); - -impl From for Value { - fn from(v: i64) -> Self { - Value::new(v) - } -} - -fn i64_arith_bin_op(left: i64, right: Value, op: UnsupportedOperation, f: F) -> ValueResult -where - F: FnOnce(i64, i64) -> Result, -{ - match right.downcast_ref::() { - Some(right) => Ok(Value::new(f(left, *right)?)), - None => Err(ValueError::OperationNotSupported { - op, - left: i64::TYPE.to_owned(), - right: Some(right.get_type().to_owned()), - }), - } -} - -/// Define the int type -impl TypedValue for i64 { - type Holder = Immutable; - const TYPE: &'static str = "int"; - - const INLINE: bool = true; - - fn new_value(self) -> Value { - Value(ValueInner::Int(self)) - } - - fn equals(&self, other: &i64) -> Result { - Ok(self == other) - } - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{}", self) - } - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{}", self) - } - fn to_int(&self) -> Result { - Ok(*self) - } - fn to_bool(&self) -> bool { - *self != 0 - } - fn get_hash(&self) -> Result { - Ok(*self as u64) - } - fn plus(&self) -> Result { - Ok(*self) - } - fn minus(&self) -> Result { - self.checked_neg().ok_or(ValueError::IntegerOverflow) - } - fn add(&self, other: &i64) -> Result { - self.checked_add(*other).ok_or(ValueError::IntegerOverflow) - } - fn sub(&self, other: &i64) -> Result { - self.checked_sub(*other).ok_or(ValueError::IntegerOverflow) - } - fn mul(&self, other: Value) -> ValueResult { - match other.downcast_ref::() { - Some(other) => self - .checked_mul(*other) - .ok_or(ValueError::IntegerOverflow) - .map(Value::new), - None => other.mul(Value::new(*self)), - } - } - fn percent(&self, other: Value) -> ValueResult { - i64_arith_bin_op(*self, other, UnsupportedOperation::Percent, |a, b| { - if b == 0 { - return Err(ValueError::DivisionByZero); - } - // In Rust `i64::min_value() % -1` is overflow, but we should eval it to zero. - if *self == i64::min_value() && b == -1 { - return Ok(0); - } - let r = a % b; - if r == 0 { - Ok(0) - } else { - Ok(if b.signum() != r.signum() { r + b } else { r }) - } - }) - } - fn div(&self, other: Value) -> ValueResult { - self.floor_div(other) - } - fn floor_div(&self, other: Value) -> ValueResult { - i64_arith_bin_op(*self, other, UnsupportedOperation::FloorDiv, |a, b| { - if b == 0 { - return Err(ValueError::DivisionByZero); - } - let sig = b.signum() * a.signum(); - let offset = if sig < 0 && a % b != 0 { 1 } else { 0 }; - match a.checked_div(b) { - Some(div) => Ok(div - offset), - None => Err(ValueError::IntegerOverflow), - } - }) - } - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn compare(&self, other: &i64) -> Result { - Ok(self.cmp(other)) - } -} - -impl FrozenOnCreation for i64 {} - -#[cfg(test)] -mod test { - use crate::int_op; - - #[test] - fn test_arithmetic_operators() { - assert_eq!(1, int_op!(1.plus())); // 1.plus() = +1 = 1 - assert_eq!(-1, int_op!(1.minus())); // 1.minus() = -1 - assert_eq!(3, int_op!(1.add(2))); // 1.add(2) = 1 + 2 = 3 - assert_eq!(-1, int_op!(1.sub(2))); // 1.sub(2) = 1 - 2 = -1 - assert_eq!(6, int_op!(2.mul(3))); // 2.mul(3) = 2 * 3 = 6 - // Remainder of the floored division: 5.percent(3) = 5 % 3 = 2 - assert_eq!(2, int_op!(5.percent(3))); - assert_eq!(3, int_op!(7.div(2))); // 7.div(2) = 7 / 2 = 3 - } -} diff --git a/starlark/src/values/iter.rs b/starlark/src/values/iter.rs deleted file mode 100644 index 817db48f..00000000 --- a/starlark/src/values/iter.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Iterable for Starlark objects. - -use crate::values::cell::ObjectRef; -use crate::values::Value; - -/// Type to be implemented by types which are iterable. -pub trait TypedIterable: 'static { - /// Make an iterator. - fn to_iter<'a>(&'a self) -> Box + 'a>; - - /// Specialized faster version of iteration when results as vec is needed - fn to_vec(&self) -> Vec { - self.to_iter().into_iter().collect() - } -} - -/// Iterable which contains borrowed reference to a sequence. -pub struct RefIterable<'a> { - r: ObjectRef<'a, dyn TypedIterable>, -} - -impl<'a> RefIterable<'a> { - pub fn new(r: ObjectRef<'a, dyn TypedIterable>) -> RefIterable<'a> { - RefIterable { r } - } - - pub fn iter(&'a self) -> Box + 'a> { - self.r.to_iter() - } - - pub fn to_vec(&self) -> Vec { - self.r.to_vec() - } -} - -impl<'a> IntoIterator for &'a RefIterable<'a> { - type Item = Value; - type IntoIter = Box + 'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -/// Fake iterable needed to be able to do `Ref::map` with error. -pub(crate) struct FakeTypedIterable; - -impl TypedIterable for FakeTypedIterable { - fn to_iter<'a>(&'a self) -> Box + 'a> { - unreachable!() - } -} diff --git a/starlark/src/values/list.rs b/starlark/src/values/list.rs deleted file mode 100644 index 37431d32..00000000 --- a/starlark/src/values/list.rs +++ /dev/null @@ -1,365 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the list type of Starlark -use crate::stdlib::list::LIST_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE; -use crate::values::error::{RuntimeError, ValueError}; -use crate::values::iter::TypedIterable; -use crate::values::slice_indices::convert_slice_indices; -use crate::values::*; -use std::cmp::Ordering; -use std::fmt; - -#[derive(Clone, Default)] -pub struct List { - content: Vec, -} - -impl> From> for List { - fn from(a: Vec) -> List { - List { - content: a.into_iter().map(Into::into).collect(), - } - } -} - -impl> From> for Value { - fn from(a: Vec) -> Value { - Value::new(List::from(a)) - } -} - -impl List { - pub fn push(&mut self, value: Value) -> Result<(), ValueError> { - let value = value.clone_for_container(self)?; - self.content.push(value); - Ok(()) - } - - pub fn extend(&mut self, other: Value) -> Result<(), ValueError> { - let other: Vec = other - .iter()? - .iter() - .map(|v| v.clone_for_container(self)) - .collect::>()?; - self.content.extend(other); - Ok(()) - } - - pub fn clear(&mut self) { - self.content.clear(); - } - - pub fn insert(&mut self, index: usize, value: Value) -> Result<(), ValueError> { - let value = value.clone_for_container(self)?; - self.content.insert(index, value); - Ok(()) - } - - pub fn pop(&mut self, index: i64) -> Result { - if index < 0 || index >= self.content.len() as i64 { - return Err(ValueError::IndexOutOfBound(index)); - } - Ok(self.content.remove(index as usize)) - } - - pub fn remove(&mut self, needle: Value) -> Result<(), ValueError> { - let position = match self.content.iter().position(|v| v == &needle) { - Some(position) => position, - None => { - return Err(RuntimeError { - code: LIST_REMOVE_ELEMENT_NOT_FOUND_ERROR_CODE, - message: format!("Element '{}' not found in '{}'", needle, self.to_str()), - label: "not found".to_owned(), - } - .into()); - } - }; - self.content.remove(position); - Ok(()) - } - - pub fn remove_at(&mut self, index: usize) -> Value { - self.content.remove(index) - } -} - -impl TypedValue for List { - type Holder = Mutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(self.content.iter().cloned()) - } - - /// Returns a string representation for the list - /// - /// # Examples: - /// ``` - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// assert_eq!("[1, 2, 3]", Value::from(vec![1, 2, 3]).to_str()); - /// assert_eq!("[1, [2, 3]]", - /// Value::from(vec![Value::from(1), Value::from(vec![2, 3])]).to_str()); - /// assert_eq!("[1]", Value::from(vec![1]).to_str()); - /// assert_eq!("[]", Value::from(Vec::::new()).to_str()); - /// ``` - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "[")?; - for (i, v) in self.content.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - v.to_repr_impl(buf)?; - } - write!(buf, "]")?; - Ok(()) - } - - const TYPE: &'static str = "list"; - fn to_bool(&self) -> bool { - !self.content.is_empty() - } - - fn equals(&self, other: &List) -> Result { - if self.content.len() != other.content.len() { - return Ok(false); - } - - let mut self_iter = self.content.iter(); - let mut other_iter = other.content.iter(); - - loop { - match (self_iter.next(), other_iter.next()) { - (Some(a), Some(b)) => { - if !a.equals(b)? { - return Ok(false); - } - } - (None, None) => { - return Ok(true); - } - _ => unreachable!(), - } - } - } - - fn compare(&self, other: &List) -> Result { - let mut iter1 = self.content.iter(); - let mut iter2 = other.content.iter(); - loop { - match (iter1.next(), iter2.next()) { - (None, None) => return Ok(Ordering::Equal), - (None, Some(..)) => return Ok(Ordering::Less), - (Some(..), None) => return Ok(Ordering::Greater), - (Some(v1), Some(v2)) => { - let r = v1.compare(&v2)?; - if r != Ordering::Equal { - return Ok(r); - } - } - } - } - } - - fn at(&self, index: Value) -> ValueResult { - let i = index.convert_index(self.length()?)? as usize; - Ok(self.content[i].clone()) - } - - fn length(&self) -> Result { - Ok(self.content.len() as i64) - } - - fn contains(&self, other: &Value) -> Result { - for x in self.content.iter() { - if x.equals(other)? { - return Ok(true); - } - } - Ok(false) - } - - fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult { - let (start, stop, stride) = convert_slice_indices(self.length()?, start, stop, stride)?; - Ok(Value::from(tuple::slice_vector( - start, - stop, - stride, - self.content.iter(), - ))) - } - - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Ok(self) - } - - /// Concatenate `other` to the current value. - /// - /// `other` has to be a list. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// # assert!( - /// // [1, 2, 3] + [2, 3] == [1, 2, 3, 2, 3] - /// Value::from(vec![1,2,3]).add(Value::from(vec![2,3])).unwrap() - /// == Value::from(vec![1, 2, 3, 2, 3]) - /// # ); - /// ``` - fn add(&self, other: &List) -> Result { - let mut result = List { - content: Vec::new(), - }; - for x in &self.content { - result.content.push(x.clone()); - } - for x in &other.content { - result.content.push(x.clone()); - } - Ok(result) - } - - /// Repeat `other` times this tuple. - /// - /// `other` has to be an int or a boolean. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// # assert!( - /// // [1, 2, 3] * 3 == [1, 2, 3, 1, 2, 3, 1, 2, 3] - /// Value::from(vec![1,2,3]).mul(Value::from(3)).unwrap() - /// == Value::from(vec![1, 2, 3, 1, 2, 3, 1, 2, 3]) - /// # ); - /// ``` - fn mul(&self, other: Value) -> ValueResult { - match other.downcast_ref::() { - Some(l) => { - let mut result = List { - content: Vec::new(), - }; - for _i in 0..*l { - result.content.extend(self.content.iter().cloned()); - } - Ok(Value::new(result)) - } - None => Err(ValueError::IncorrectParameterType), - } - } - - /// Set the value at `index` to `new_value` - /// - /// # Example - /// ``` - /// # use starlark::values::*; - /// # use starlark::values::list::List; - /// let mut v = Value::from(vec![1, 2, 3]); - /// v.set_at(Value::from(1), Value::from(1)).unwrap(); - /// v.set_at(Value::from(2), Value::from(vec![2, 3])).unwrap(); - /// assert_eq!(&v.to_repr(), "[1, 1, [2, 3]]"); - /// ``` - fn set_at(&mut self, index: Value, new_value: Value) -> Result<(), ValueError> { - let i = index.convert_index(self.length()?)? as usize; - self.content[i] = new_value.clone_for_container(self)?; - Ok(()) - } -} - -impl TypedIterable for List { - fn to_iter<'a>(&'a self) -> Box + 'a> { - Box::new(self.content.iter().cloned()) - } - - fn to_vec(&self) -> Vec { - self.content.clone() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_to_str() { - assert_eq!("[1, 2, 3]", Value::from(vec![1, 2, 3]).to_str()); - assert_eq!( - "[1, [2, 3]]", - Value::from(vec![Value::from(1), Value::from(vec![2, 3])]).to_str() - ); - assert_eq!("[1]", Value::from(vec![1]).to_str()); - assert_eq!("[]", Value::from(Vec::::new()).to_str()); - } - - #[test] - fn test_mutate_list() { - let mut v = Value::from(vec![1, 2, 3]); - v.set_at(Value::from(1), Value::from(1)).unwrap(); - v.set_at(Value::from(2), Value::from(vec![2, 3])).unwrap(); - assert_eq!(&v.to_repr(), "[1, 1, [2, 3]]"); - } - - #[test] - fn test_arithmetic_on_list() { - // [1, 2, 3] + [2, 3] == [1, 2, 3, 2, 3] - assert_eq!( - Value::from(vec![1, 2, 3]) - .add(Value::from(vec![2, 3])) - .unwrap(), - Value::from(vec![1, 2, 3, 2, 3]) - ); - // [1, 2, 3] * 3 == [1, 2, 3, 1, 2, 3, 1, 2, 3] - assert_eq!( - Value::from(vec![1, 2, 3]).mul(Value::from(3)).unwrap(), - Value::from(vec![1, 2, 3, 1, 2, 3, 1, 2, 3]) - ); - } - - #[test] - fn test_value_alias() { - let v1 = Value::from(vec![1, 2, 3]); - let mut v2 = v1.clone(); - v2.set_at(Value::from(2), Value::from(4)).unwrap(); - assert_eq!(v2.to_str(), "[1, 2, 4]"); - assert_eq!(v1.to_str(), "[1, 2, 4]"); - } - - #[test] - fn test_is_descendant() { - let v1 = Value::from(vec![1, 2, 3]); - let v2 = Value::from(vec![Value::new(1), Value::new(2), v1.clone()]); - let v3 = Value::from(vec![Value::new(1), Value::new(2), v2.clone()]); - assert!(v3.is_descendant_value(&v2)); - assert!(v3.is_descendant_value(&v1)); - assert!(v3.is_descendant_value(&v3)); - - assert!(v2.is_descendant_value(&v1)); - assert!(v2.is_descendant_value(&v2)); - assert!(!v2.is_descendant_value(&v3)); - - assert!(v1.is_descendant_value(&v1)); - assert!(!v1.is_descendant_value(&v2)); - assert!(!v1.is_descendant_value(&v3)); - } -} diff --git a/starlark/src/values/mod.rs b/starlark/src/values/mod.rs deleted file mode 100644 index 8a35d23e..00000000 --- a/starlark/src/values/mod.rs +++ /dev/null @@ -1,1444 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! The values module define a trait `TypedValue` that defines the attribute of -//! any value in Starlark and a few macro to help implementing this trait. -//! The `Value` struct defines the actual structure holding a TypedValue. It is mostly used to -//! enable mutable and Rc behavior over a TypedValue. -//! This modules also defines this traits for the basic immutable values: int, bool and NoneType. -//! Sub-modules implement other common types of all Starlark dialect. -//! -//! __Note__: we use _sequence_, _iterable_ and _indexable_ according to the -//! definition in the [Starlark specification]( -//! https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#sequence-types). -//! We also use the term _container_ for denoting any of those type that can hold several values. -//! -//! -//! # Defining a new type -//! -//! Defining a new Starlark type can be done by implenting the [`TypedValue`](crate::values::TypedValue) -//! trait. All method of that trait are operation needed by Starlark interpreter to understand the -//! type. Most of `TypedValue` methods are optional with default implementations returning error. -//! -//! For example the `NoneType` trait implementation is the following: -//! -//! ```rust -//! # use starlark::values::{TypedValue, Value, Immutable}; -//! # use starlark::values::error::ValueError; -//! # use std::cmp::Ordering; -//! # use std::iter; -//! # use std::fmt; -//! # use std::fmt::Write as _; -//! -//! /// Define the NoneType type -//! pub enum NoneType { -//! None -//! } -//! -//! impl TypedValue for NoneType { -//! type Holder = Immutable; -//! const TYPE: &'static str = "NoneType"; -//! -//! fn compare(&self, _other: &NoneType) -> Result { -//! Ok(Ordering::Equal) -//! } -//! fn equals(&self, _other: &NoneType) -> Result { -//! Ok(true) -//! } -//! fn values_for_descendant_check_and_freeze<'a>(&'a self) -> Box + 'a> { -//! Box::new(iter::empty()) -//! } -//! fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { -//! write!(buf, "None") -//! } -//! fn to_bool(&self) -> bool { -//! false -//! } -//! // just took the result of hash(None) in macos python 2.7.10 interpreter. -//! fn get_hash(&self) -> Result { -//! Ok(9_223_380_832_852_120_682) -//! } -//! } -//! ``` -//! -//! In addition to the `TypedValue` trait, it is recommended to implement the `From` trait -//! for all type that can convert to the added type but parameterized it with the `Into` -//! type. For example the unary tuple `From` trait is defined as followed: -//! -//! ```rust,ignore -//! impl> From<(T,)> for Tuple { -//! fn from(a: (T,)) -> Tuple { -//! Tuple { content: vec![a.0.into()] } -//! } -//! } -//! ``` -use crate::environment::TypeValues; -use crate::eval::call_stack; -use crate::eval::call_stack::CallStack; -use crate::values::error::UnsupportedOperation; -use crate::values::error::ValueError; -use crate::values::iter::{FakeTypedIterable, RefIterable, TypedIterable}; -use codemap_diagnostic::Level; -use linked_hash_map::LinkedHashMap; -use std::any::type_name; -use std::any::Any; -use std::cmp::Ordering; -use std::fmt; -use std::fmt::Write as _; -use std::marker; -use std::rc::Rc; -use std::usize; - -/// Similar to [`Value`], but for specific type. -pub(crate) struct ValueOther(Rc>); - -impl Default for ValueOther { - fn default() -> Self { - ValueOther::new(T::default()) - } -} - -impl Clone for ValueOther { - fn clone(&self) -> Self { - ValueOther(self.0.clone()) - } -} - -impl ValueOther { - pub fn new(v: T) -> ValueOther { - assert!(!T::INLINE); - - ValueOther(Rc::new(ValueHolder { - value: if T::Holder::MUTABLE { - ObjectCell::new_mutable(v) - } else { - ObjectCell::new_immutable(v) - }, - })) - } - - pub fn borrow_mut(&self) -> ObjectRefMut { - self.0.value.borrow_mut() - } -} - -impl From> for Value { - fn from(v: ValueOther) -> Self { - Value(ValueInner::Other(v.0)) - } -} - -impl fmt::Debug for ValueOther { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let value: Value = self.clone().into(); - fmt::Debug::fmt(&value, f) - } -} - -/// ValueInner wraps the actual value or a memory pointer -/// to the actual value for complex type. -#[derive(Clone)] -enum ValueInner { - None(NoneType), - Bool(bool), - Int(i64), - String(RcString), - Other(Rc>), -} - -/// A value in Starlark. -/// -/// This is a wrapper around a [TypedValue] which is cheap to clone and safe to pass around. -#[derive(Clone)] -pub struct Value(ValueInner); - -pub type ValueResult = Result; - -impl Value { - /// Create a new `Value` from a static value. - pub fn new(t: T) -> Value { - t.new_value() - } - - fn try_value_holder( - &self, - for_iter: bool, - ) -> Result, ObjectBorrowError> { - match &self.0 { - ValueInner::None(n) => Ok(ObjectRef::immutable_frozen(n)), - ValueInner::Int(i) => Ok(ObjectRef::immutable_frozen(i)), - ValueInner::Bool(b) => Ok(ObjectRef::immutable_frozen(b)), - ValueInner::String(s) => Ok(ObjectRef::immutable_frozen(s.as_string())), - ValueInner::Other(rc) => rc.value.try_borrow(for_iter), - } - } - - /// Get a copy of the object header - fn object_header_copy(&self) -> ObjectHeader { - match &self.0 { - ValueInner::None(..) - | ValueInner::Int(..) - | ValueInner::Bool(..) - | ValueInner::String(..) => ObjectHeader::immutable_frozen(), - ValueInner::Other(rc) => rc.value.get_header_copy(), - } - } - - fn value_holder(&self) -> ObjectRef { - self.try_value_holder(false).unwrap() - } - - fn try_value_holder_mut( - &self, - ) -> Result, ObjectBorrowMutError> { - match &self.0 { - ValueInner::Other(rc) => rc.value.try_borrow_mut(), - _ => Err(ObjectBorrowMutError::Immutable), - } - } - - /// Clone for inserting into the other container, using weak reference if we do a - /// recursive insertion. - pub fn clone_for_container(&self, container: &T) -> Result { - if self.is_descendant(DataPtr::from(container)) { - Err(ValueError::UnsupportedRecursiveDataStructure) - } else { - Ok(self.clone()) - } - } - - pub fn clone_for_container_value(&self, other: &Value) -> Result { - if self.is_descendant_value(other) { - Err(ValueError::UnsupportedRecursiveDataStructure) - } else { - Ok(self.clone()) - } - } - - /// Check is the object is frozen. - pub(crate) fn is_frozen(&self) -> bool { - self.object_header_copy().is_frozen() - } - - /// Determine if the value pointed by other is a descendant of self - pub fn is_descendant_value(&self, other: &Value) -> bool { - self.is_descendant(other.data_ptr()) - } - - pub fn is_descendant(&self, other: DataPtr) -> bool { - match self.try_value_holder(false) { - Ok(v) => v.is_descendant_dyn(other), - Err(..) => { - // We have already borrowed mutably this value, - // which means we are trying to mutate it, assigning other to it. - true - } - } - } - - /// Object data pointer. - pub fn data_ptr(&self) -> DataPtr { - match &self.0 { - ValueInner::None(n) => DataPtr::from(n), - ValueInner::Int(i) => DataPtr::from(i), - ValueInner::Bool(b) => DataPtr::from(b), - ValueInner::String(b) => DataPtr::from(b.as_string()), - ValueInner::Other(rc) => rc.data_ptr(), - } - } - - /// Function id used to detect recursion. - pub fn function_id(&self) -> FunctionId { - self.value_holder().function_id_dyn() - } - - pub(crate) fn inspect(&self) -> StarlarkStruct { - let header = match &self.0 { - ValueInner::Bool(..) - | ValueInner::Int(..) - | ValueInner::None(..) - | ValueInner::String(..) => format!("{:?}", ObjectHeader::immutable_frozen_static()), - ValueInner::Other(rc) => format!("{:?}", rc.value.get_header()), - }; - - let mut fields = LinkedHashMap::new(); - fields.insert("data_ptr".into(), Value::from(self.data_ptr().0 as usize)); - let Inspect { - rust_type_name, - custom, - } = self.value_holder().inspect_dyn(); - fields.insert("rust_type_name".into(), Value::from(rust_type_name)); - fields.insert("header".into(), Value::from(header)); - fields.insert("custom".into(), custom); - StarlarkStruct::new(fields) - } -} - -pub trait Mutability { - type Content: TypedValue; - - /// This type is mutable or immutable. - const MUTABLE: bool; -} - -/// Type parameter for immutable types. -pub struct Immutable(marker::PhantomData); -/// Type parameter for mutable types. -pub struct Mutable(marker::PhantomData); - -impl Mutability for Mutable { - type Content = T; - const MUTABLE: bool = true; -} - -impl Mutability for Immutable { - type Content = T; - const MUTABLE: bool = false; -} - -/// Pointer to data, used for cycle checks. -#[derive(Copy, Clone, Debug, Eq)] -pub struct DataPtr(pub(crate) *const ()); - -impl From<*const T> for DataPtr { - fn from(p: *const T) -> Self { - DataPtr(p as *const ()) - } -} - -impl From<*mut T> for DataPtr { - fn from(p: *mut T) -> Self { - DataPtr::from(p as *const T) - } -} - -impl From<&'_ T> for DataPtr { - fn from(p: &T) -> Self { - DataPtr::from(p as *const T) - } -} - -impl From<&'_ dyn TypedValueDyn> for DataPtr { - fn from(p: &'_ dyn TypedValueDyn) -> Self { - DataPtr(p as *const dyn TypedValueDyn as *const ()) - } -} - -impl From for DataPtr { - fn from(v: Value) -> Self { - v.data_ptr() - } -} - -impl PartialEq for DataPtr { - fn eq(&self, other: &DataPtr) -> bool { - self.0 == other.0 - } -} - -/// Function identity to detect recursion. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct FunctionId(pub DataPtr); - -impl TypedValueDyn for T { - fn as_any_ref(&self) -> &dyn Any { - self as &dyn Any - } - - fn as_any_mut(&mut self) -> &mut dyn Any { - self as &mut dyn Any - } - - fn function_id_dyn(&self) -> FunctionId { - self.function_id() - .unwrap_or(FunctionId(DataPtr::from(self))) - } - - /// Freezes the current value. - fn freeze_dyn(&self) { - for mut value in self.values_for_descendant_check_and_freeze() { - value.freeze(); - } - } - - fn to_str_impl_dyn(&self, buf: &mut String) -> fmt::Result { - self.to_str_impl(buf) - } - - fn to_repr_impl_dyn(&self, buf: &mut String) -> fmt::Result { - self.to_repr_impl(buf) - } - - fn get_type_dyn(&self) -> &'static str { - T::TYPE - } - - fn to_bool_dyn(&self) -> bool { - self.to_bool() - } - - fn to_int_dyn(&self) -> Result { - self.to_int() - } - - fn get_hash_dyn(&self) -> Result { - self.get_hash() - } - - fn is_descendant_dyn(&self, other: DataPtr) -> bool { - if DataPtr::from(self) == other { - return true; - } - self.values_for_descendant_check_and_freeze() - .any(|x| x.is_descendant(other)) - } - - fn equals_dyn(&self, other: &Value) -> Result { - let _stack_depth_guard = call_stack::try_inc()?; - - match other.downcast_ref::() { - Some(other) => self.equals(&*other), - None => Ok(false), - } - } - - fn compare_dyn(&self, other: &Value) -> Result { - let _stack_depth_guard = call_stack::try_inc()?; - - match other.downcast_ref::() { - Some(other) => self.compare(&*other), - None => Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Compare, - left: self.get_type_dyn().to_owned(), - right: Some(other.get_type().to_owned()), - }), - } - } - - fn call_dyn( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult { - self.call(call_stack, type_values, positional, named, args, kwargs) - } - - fn at_dyn(&self, index: Value) -> Result { - self.at(index) - } - - fn set_at_dyn(&mut self, index: Value, new_value: Value) -> Result<(), ValueError> { - self.set_at(index, new_value) - } - - fn slice_dyn( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> Result { - self.slice(start, stop, stride) - } - - fn iter_dyn<'a>(&'a self) -> Result<&'a dyn TypedIterable, ValueError> { - self.iter() - } - - fn length_dyn(&self) -> Result { - self.length() - } - - fn get_attr_dyn(&self, attribute: &str) -> Result { - self.get_attr(attribute) - } - - fn has_attr_dyn(&self, attribute: &str) -> Result { - self.has_attr(attribute) - } - - fn set_attr_dyn(&mut self, attribute: &str, new_value: Value) -> Result<(), ValueError> { - self.set_attr(attribute, new_value) - } - - fn dir_attr_dyn(&self) -> Result, ValueError> { - self.dir_attr() - } - - fn contains_dyn(&self, other: &Value) -> Result { - self.contains(other) - } - - fn plus_dyn(&self) -> Result { - self.plus().map(Value::new) - } - - fn minus_dyn(&self) -> Result { - self.minus().map(Value::new) - } - - fn add_dyn(&self, other: Value) -> Result { - match other.downcast_ref::() { - Some(other) => self.add(&*other).map(Value::new), - None => Err(ValueError::IncorrectParameterType), - } - } - - fn sub_dyn(&self, other: Value) -> Result { - match other.downcast_ref() { - Some(other) => self.sub(&*other).map(Value::new), - None => Err(ValueError::IncorrectParameterType), - } - } - - fn mul_dyn(&self, other: Value) -> Result { - self.mul(other) - } - - fn percent_dyn(&self, other: Value) -> Result { - self.percent(other) - } - - fn div_dyn(&self, other: Value) -> Result { - self.div(other) - } - - fn floor_div_dyn(&self, other: Value) -> Result { - self.floor_div(other) - } - - fn pipe_dyn(&self, other: Value) -> Result { - self.pipe(other) - } - - fn inspect_dyn(&self) -> Inspect { - Inspect { - rust_type_name: type_name::(), - custom: self.inspect_custom(), - } - } -} - -/// Used in `inspect` function implementation. -pub(crate) struct Inspect { - rust_type_name: &'static str, - custom: Value, -} - -struct ValueHolder { - value: ObjectCell, -} - -impl ValueHolder { - /// Pointer to `TypedValue` object, used for cycle checks. - fn data_ptr(&self) -> DataPtr { - DataPtr(self.value.get_ptr() as *const ()) - } -} - -/// Dynamically-dispatched version of [`ValueHolder`]. -pub(crate) trait TypedValueDyn: 'static { - fn as_any_ref(&self) -> &dyn Any; - - fn as_any_mut(&mut self) -> &mut dyn Any; - - /// Id used to detect recursion (which is prohibited in Starlark) - fn function_id_dyn(&self) -> FunctionId; - - fn freeze_dyn(&self); - - fn to_str_impl_dyn(&self, buf: &mut String) -> fmt::Result; - - fn to_repr_impl_dyn(&self, buf: &mut String) -> fmt::Result; - - fn get_type_dyn(&self) -> &'static str; - - fn to_bool_dyn(&self) -> bool; - - fn to_int_dyn(&self) -> Result; - - fn get_hash_dyn(&self) -> Result; - - fn is_descendant_dyn(&self, other: DataPtr) -> bool; - - fn equals_dyn(&self, other: &Value) -> Result; - fn compare_dyn(&self, other: &Value) -> Result; - - fn call_dyn( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult; - - fn at_dyn(&self, index: Value) -> ValueResult; - - fn set_at_dyn(&mut self, index: Value, new_value: Value) -> Result<(), ValueError>; - fn slice_dyn( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult; - - fn iter_dyn(&self) -> Result<&dyn TypedIterable, ValueError>; - - fn length_dyn(&self) -> Result; - - fn get_attr_dyn(&self, attribute: &str) -> ValueResult; - - fn has_attr_dyn(&self, _attribute: &str) -> Result; - - fn set_attr_dyn(&mut self, attribute: &str, _new_value: Value) -> Result<(), ValueError>; - - fn dir_attr_dyn(&self) -> Result, ValueError>; - - fn contains_dyn(&self, other: &Value) -> Result; - - fn plus_dyn(&self) -> ValueResult; - - fn minus_dyn(&self) -> ValueResult; - - fn add_dyn(&self, other: Value) -> ValueResult; - - fn sub_dyn(&self, other: Value) -> ValueResult; - - fn mul_dyn(&self, other: Value) -> ValueResult; - - fn percent_dyn(&self, other: Value) -> ValueResult; - - fn div_dyn(&self, other: Value) -> ValueResult; - - fn floor_div_dyn(&self, other: Value) -> ValueResult; - - fn pipe_dyn(&self, other: Value) -> ValueResult; - - fn inspect_dyn(&self) -> Inspect; -} - -/// A trait for a value with a type that all variable container -/// will implement. -pub trait TypedValue: Sized + 'static { - /// Must be either `MutableHolder` or `ImmutableHolder` - type Holder: Mutability; - - /// Return a string describing the type of self, as returned by the type() function. - const TYPE: &'static str; - - /// True iff value is stored inline in [`Value`] (instead of in [`Rc`]). - #[doc(hidden)] - const INLINE: bool = false; - - /// Create a value for `TypedValue`. - /// - /// This function should be overridden only by builtin types. - #[doc(hidden)] - fn new_value(self) -> Value { - ValueOther::new(self).into() - } - - /// Return a list of values to be used in freeze or descendant check operations. - /// - /// Objects which do not contain references to other Starlark objects typically - /// implement it by returning an empty iterator: - /// - /// ``` - /// # use starlark::values::*; - /// # use std::iter; - /// # struct MyType; - /// - /// # impl TypedValue for MyType { - /// # type Holder = Immutable; - /// # const TYPE: &'static str = "MyType"; - /// # - /// fn values_for_descendant_check_and_freeze<'a>(&'a self) -> Box + 'a> { - /// Box::new(iter::empty()) - /// } - /// # - /// # } - /// ``` - fn values_for_descendant_check_and_freeze<'a>(&'a self) - -> Box + 'a>; - - /// Return function id to detect recursion. - /// - /// If `None` is returned, object id is used. - fn function_id(&self) -> Option { - None - } - - /// Return a string describing of self, as returned by the str() function. - fn to_str(&self) -> String { - let mut buf = String::new(); - self.to_str_impl(&mut buf).unwrap(); - buf - } - - /// The implementation of `to_str`, more efficient for nested objects - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - self.to_repr_impl(buf) - } - - /// Return a string representation of self, as returned by the repr() function. - fn to_repr(&self) -> String { - let mut buf = String::new(); - self.to_repr_impl(&mut buf).unwrap(); - buf - } - - /// The implementation of `to_repr`, more efficient for nested objects - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "<{}>", Self::TYPE) - } - - /// Convert self to a Boolean truth value, as returned by the bool() function. - fn to_bool(&self) -> bool { - // Return `true` by default, because this is default when implementing - // custom types in Python: https://docs.python.org/release/2.5.2/lib/truth.html - true - } - - /// Convert self to a integer value, as returned by the int() function if the type is numeric - /// (not for string). - fn to_int(&self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::ToInt, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Return a hash code for self, as returned by the hash() function, or - /// OperationNotSupported if there is no hash for this value (e.g. list). - fn get_hash(&self) -> Result { - Err(ValueError::NotHashableValue) - } - - /// Compare `self` with `other` for equality. - /// - /// `other` parameter is of type `Self` so it is safe to downcast it. - /// - /// Default implementation does pointer (id) comparison. - /// - /// Note: `==` in Starlark should work for arbitary objects, - /// so implementation should avoid returning errors except for - // unrecoverable runtime errors. - fn equals(&self, other: &Self) -> Result { - let self_ptr = self as *const Self as *const (); - let other_ptr = other as *const Self as *const (); - Ok(self_ptr == other_ptr) - } - - /// Compare `self` with `other`. - /// - /// This method returns a result of type [`Ordering`]. - /// - /// `other` parameter is of type `Self` so it is safe to downcast it. - /// - /// Default implementation returns error. - /// - /// __Note__: This does not use the [`PartialOrd`] trait as - /// the trait needs to know the actual type of the value we compare. - fn compare(&self, _other: &Self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Compare, - left: Self::TYPE.to_owned(), - right: Some(Self::TYPE.to_owned()), - }) - } - - /// Perform a call on the object, only meaningfull for function object. - /// - /// For instance, if this object is a callable (i.e. a function or a method) that adds 2 - /// integers then `self.call(vec![Value::new(1), Value::new(2)], HashMap::new(), - /// None, None)` would return `Ok(Value::new(3))`. - /// - /// # Parameters - /// - /// * call_stack: the calling stack, to detect recursion - /// * type_values: environment used to resolve type fields. - /// * positional: the list of arguments passed positionally. - /// * named: the list of argument that were named. - /// * args: if provided, the `*args` argument. - /// * kwargs: if provided, the `**kwargs` argument. - fn call( - &self, - _call_stack: &mut CallStack, - _type_values: &TypeValues, - _positional: Vec, - _named: LinkedHashMap, - _args: Option, - _kwargs: Option, - ) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Call, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Perform an array or dictionary indirection. - /// - /// This returns the result of `a[index]` if `a` is indexable. - fn at(&self, index: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::At, - left: Self::TYPE.to_owned(), - right: Some(index.get_type().to_owned()), - }) - } - - /// Set the value at `index` with `new_value`. - /// - /// This method should error with `ValueError::CannotMutateImmutableValue` if the value was - /// frozen (but with `ValueError::OperationNotSupported` if the operation is not supported - /// on this value, even if the value is immutable, e.g. for numbers). - fn set_at(&mut self, index: Value, _new_value: Value) -> Result<(), ValueError> { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::SetAt, - left: Self::TYPE.to_owned(), - right: Some(index.get_type().to_owned()), - }) - } - - /// Extract a slice of the underlying object if the object is indexable. The result will be - /// object between `start` and `stop` (both of them are added length() if negative and then - /// clamped between 0 and length()). `stride` indicates the direction. - /// - /// # Parameters - /// - /// * start: the start of the slice. - /// * stop: the end of the slice. - /// * stride: the direction of slice, - /// - /// # Examples - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::string; - /// # assert!( - /// // Remove the first element: "abc"[1:] == "bc". - /// Value::from("abc").slice(Some(Value::from(1)), None, None).unwrap() == Value::from("bc") - /// # ); - /// # assert!( - /// // Remove the last element: "abc"[:-1] == "ab". - /// Value::from("abc").slice(None, Some(Value::from(-1)), None).unwrap() - /// == Value::from("ab") - /// # ); - /// # assert!( - /// // Remove the first and the last element: "abc"[1:-1] == "b". - /// Value::from("abc").slice(Some(Value::from(1)), Some(Value::from(-1)), None).unwrap() - /// == Value::from("b") - /// # ); - /// # assert!( - /// // Select one element out of 2, skipping the first: "banana"[1::2] == "aaa". - /// Value::from("banana").slice(Some(Value::from(1)), None, Some(Value::from(2))).unwrap() - /// == Value::from("aaa") - /// # ); - /// # assert!( - /// // Select one element out of 2 in reverse order, starting at index 4: - /// // "banana"[4::-2] = "nnb" - /// Value::from("banana").slice(Some(Value::from(4)), None, Some(Value::from(-2))).unwrap() - /// == Value::from("nnb") - /// # ); - /// ``` - fn slice( - &self, - _start: Option, - _stop: Option, - _stride: Option, - ) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Slice, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Returns an iterable over the value of this container if this value hold an iterable - /// container. - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Err(ValueError::TypeNotX { - object_type: Self::TYPE.to_owned(), - op: "iterable".to_owned(), - }) - } - - /// Returns the length of the value, if this value is a sequence. - fn length(&self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Len, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Get an attribute for the current value as would be returned by dotted expression (i.e. - /// `a.attribute`). - /// - /// __Note__: this does not handle native methods which are handled through universe. - fn get_attr(&self, attribute: &str) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::GetAttr(attribute.to_owned()), - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Return true if an attribute of name `attribute` exists for the current value. - /// - /// __Note__: this does not handle native methods which are handled through universe. - fn has_attr(&self, _attribute: &str) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::HasAttr, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Set the attribute named `attribute` of the current value to `new_value` (e.g. - /// `a.attribute = new_value`). - /// - /// This method should error with `ValueError::CannotMutateImmutableValue` if the value was - /// frozen or the attribute is immutable (but with `ValueError::OperationNotSupported` - /// if the operation is not supported on this value, even if the self is immutable, - /// e.g. for numbers). - fn set_attr(&mut self, attribute: &str, _new_value: Value) -> Result<(), ValueError> { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::SetAttr(attribute.to_owned()), - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Return a vector of string listing all attribute of the current value, excluding native - /// methods. - fn dir_attr(&self) -> Result, ValueError> { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Dir, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Tell wether `other` is in the current value, if it is a container. - /// - /// Non container value should return an error `ValueError::OperationNotSupported`. - /// - /// # Examples - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::string; - /// // "a" in "abc" == True - /// assert!(Value::from("abc").contains(&Value::from("a")).unwrap().to_bool()); - /// // "b" in "abc" == True - /// assert!(Value::from("abc").contains(&Value::from("b")).unwrap().to_bool()); - /// // "z" in "abc" == False - /// assert!(!Value::from("abc").contains(&Value::from("z")).unwrap().to_bool()); - /// ``` - fn contains(&self, other: &Value) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::In, - left: other.get_type().to_owned(), - right: Some(Self::TYPE.to_owned()), - }) - } - - /// Apply the `+` unary operator to the current value. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(1, int_op!(1.plus())); // 1.plus() = +1 = 1 - /// # } - /// ``` - fn plus(&self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Plus, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Apply the `-` unary operator to the current value. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(-1, int_op!(1.minus())); // 1.minus() = -1 - /// # } - /// ``` - fn minus(&self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Minus, - left: Self::TYPE.to_owned(), - right: None, - }) - } - - /// Add `other` to the current value. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(3, int_op!(1.add(2))); // 1.add(2) = 1 + 2 = 3 - /// # } - /// ``` - fn add(&self, _other: &Self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Plus, - left: Self::TYPE.to_owned(), - right: Some(Self::TYPE.to_owned()), - }) - } - - /// Substract `other` from the current value. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(-1, int_op!(1.sub(2))); // 1.sub(2) = 1 - 2 = -1 - /// # } - /// ``` - fn sub(&self, _other: &Self) -> Result { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Minus, - left: Self::TYPE.to_owned(), - right: Some(Self::TYPE.to_owned()), - }) - } - - /// Multiply the current value with `other`. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(6, int_op!(2.mul(3))); // 2.mul(3) = 2 * 3 = 6 - /// # } - /// ``` - fn mul(&self, other: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Mul, - left: Self::TYPE.to_owned(), - right: Some(other.get_type().to_owned()), - }) - } - - /// Apply the percent operator between the current value and `other`. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # use starlark::values::string; - /// # fn main() { - /// // Remainder of the floored division: 5.percent(3) = 5 % 3 = 2 - /// assert_eq!(2, int_op!(5.percent(3))); - /// // String formatting: "a {} c" % 3 == "a 3 c" - /// assert_eq!(Value::from("a 3 c"), Value::from("a %s c").percent(Value::from(3)).unwrap()); - /// # } - /// ``` - fn percent(&self, other: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Percent, - left: Self::TYPE.to_owned(), - right: Some(other.get_type().to_owned()), - }) - } - - /// Divide the current value with `other`. division. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(3, int_op!(7.div(2))); // 7.div(2) = 7 / 2 = 3 - /// # } - /// ``` - fn div(&self, other: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Div, - left: Self::TYPE.to_owned(), - right: Some(other.get_type().to_owned()), - }) - } - - /// Floor division between the current value and `other`. - /// - /// # Examples - /// - /// ```rust - /// # #[macro_use] extern crate starlark; - /// # use starlark::values::*; - /// # fn main() { - /// assert_eq!(3, int_op!(7.floor_div(2))); // 7.div(2) = 7 / 2 = 3 - /// # } - /// ``` - fn floor_div(&self, other: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::FloorDiv, - left: Self::TYPE.to_owned(), - right: Some(other.get_type().to_owned()), - }) - } - - /// Apply the operator pipe to the current value and `other`. - /// - /// This is usually the union on set. - fn pipe(&self, other: Value) -> ValueResult { - Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::Pipe, - left: Self::TYPE.to_owned(), - right: Some(other.get_type().to_owned()), - }) - } - - /// Provide custom fields for `inspect` function - #[doc(hidden)] - fn inspect_custom(&self) -> Value { - Value::new(NoneType::None) - } -} - -impl fmt::Debug for Value { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Value[{}]({})", self.get_type(), self.to_repr()) - } -} - -impl Value { - pub fn freeze(&mut self) { - match &self.0 { - ValueInner::Other(rc) => { - if rc.value.freeze() { - // Only freeze content if the object was not frozen earlier - self.value_holder().freeze_dyn(); - } - } - _ => { - // `None`, `bool`, `int` are frozen at construction - } - } - } - pub fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - self.value_holder().to_str_impl_dyn(buf) - } - pub fn to_str(&self) -> String { - let mut buf = String::new(); - self.to_str_impl(&mut buf).unwrap(); - buf - } - pub fn to_rc_string(&self) -> RcString { - if let ValueInner::String(s) = &self.0 { - return s.clone(); - } - RcString::from(self.to_str()) - } - pub fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - self.value_holder().to_repr_impl_dyn(buf) - } - pub fn to_repr(&self) -> String { - let mut buf = String::new(); - self.to_repr_impl(&mut buf).unwrap(); - buf - } - pub fn get_type(&self) -> &'static str { - self.value_holder().get_type_dyn() - } - pub fn to_bool(&self) -> bool { - self.value_holder().to_bool_dyn() - } - pub fn to_int(&self) -> Result { - self.value_holder().to_int_dyn() - } - pub fn get_hash(&self) -> Result { - self.value_holder().get_hash_dyn() - } - pub fn equals(&self, other: &Value) -> Result { - self.value_holder().equals_dyn(other) - } - pub fn compare(&self, other: &Value) -> Result { - self.value_holder().compare_dyn(other) - } - - pub fn call( - &self, - call_stack: &mut CallStack, - type_values: &TypeValues, - positional: Vec, - named: LinkedHashMap, - args: Option, - kwargs: Option, - ) -> ValueResult { - self.value_holder() - .call_dyn(call_stack, type_values, positional, named, args, kwargs) - } - - pub fn at(&self, index: Value) -> ValueResult { - self.value_holder().at_dyn(index) - } - - pub fn set_at(&mut self, index: Value, new_value: Value) -> Result<(), ValueError> { - match self.try_value_holder_mut() { - Err(ObjectBorrowMutError::Immutable) => { - return Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::SetAt, - left: self.get_type().to_owned(), - right: Some(index.get_type().to_owned()), - }); - } - Err(e) => Err(e.into()), - Ok(mut v) => v.set_at_dyn(index, new_value), - } - } - pub fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult { - self.value_holder().slice_dyn(start, stop, stride) - } - pub fn iter(&self) -> Result { - let borrowed: ObjectRef = self.try_value_holder(true).unwrap(); - let mut err = Ok(()); - let typed_into_iter = ObjectRef::map(borrowed, |t| match t.iter_dyn() { - Ok(r) => r, - Err(e) => { - err = Err(e); - &FakeTypedIterable - } - }); - err?; - Ok(RefIterable::new(typed_into_iter)) - } - pub fn to_vec(&self) -> Result, ValueError> { - Ok(self.iter()?.to_vec()) - } - pub fn length(&self) -> Result { - self.value_holder().length_dyn() - } - pub fn get_attr(&self, attribute: &str) -> ValueResult { - self.value_holder().get_attr_dyn(attribute) - } - pub fn has_attr(&self, attribute: &str) -> Result { - self.value_holder().has_attr_dyn(attribute) - } - pub fn set_attr(&mut self, attribute: &str, new_value: Value) -> Result<(), ValueError> { - match self.try_value_holder_mut() { - Err(ObjectBorrowMutError::Immutable) => { - return Err(ValueError::OperationNotSupported { - op: UnsupportedOperation::SetAttr(attribute.to_owned()), - left: self.get_type().to_owned(), - right: None, - }); - } - Err(e) => Err(e.into()), - Ok(mut v) => v.set_attr_dyn(attribute, new_value), - } - } - pub fn dir_attr(&self) -> Result, ValueError> { - self.value_holder().dir_attr_dyn() - } - pub fn contains(&self, other: &Value) -> Result { - self.value_holder().contains_dyn(other) - } - pub fn plus(&self) -> ValueResult { - self.value_holder().plus_dyn() - } - pub fn minus(&self) -> ValueResult { - self.value_holder().minus_dyn() - } - pub fn add(&self, other: Value) -> ValueResult { - self.value_holder().add_dyn(other) - } - pub fn sub(&self, other: Value) -> ValueResult { - self.value_holder().sub_dyn(other) - } - pub fn mul(&self, other: Value) -> ValueResult { - self.value_holder().mul_dyn(other) - } - pub fn percent(&self, other: Value) -> ValueResult { - self.value_holder().percent_dyn(other) - } - pub fn div(&self, other: Value) -> ValueResult { - self.value_holder().div_dyn(other) - } - pub fn floor_div(&self, other: Value) -> ValueResult { - self.value_holder().floor_div_dyn(other) - } - pub fn pipe(&self, other: Value) -> ValueResult { - self.value_holder().pipe_dyn(other) - } -} - -impl fmt::Display for Value { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - write!(f, "{}", self.to_str()) - } -} - -impl PartialEq for Value { - fn eq(&self, other: &Value) -> bool { - self.equals(other) == Ok(true) - } -} -impl Eq for Value {} - -impl Value { - /// Get a reference to underlying data or `None` - /// if contained object has different type than requested. - /// - /// This function panics if the `Value` is borrowed mutably. - pub fn downcast_ref(&self) -> Option> { - let object_ref = self.value_holder(); - let any = ObjectRef::map(object_ref, |o| o.as_any_ref()); - ObjectRef::flat_map(any, |any| any.downcast_ref()) - } - - /// Get a mutable reference to underlying data or `None` - /// if contained object has different type than requested. - /// - /// This function panics if the `Value` is borrowed. - /// - /// Error is returned if the value is frozen or frozen for iteration. - pub fn downcast_mut>>( - &self, - ) -> Result>, ValueError> { - let object_ref = match self.try_value_holder_mut() { - Err(e @ ObjectBorrowMutError::Frozen) - | Err(e @ ObjectBorrowMutError::FrozenForIteration) => return Err(e.into()), - Err(e) => panic!("already borrowed: {:?}", e), - Ok(v) => v, - }; - let any = ObjectRefMut::map(object_ref, |o| o.as_any_mut()); - Ok(ObjectRefMut::flat_map(any, |any| any.downcast_mut())) - } - - /// `downcast_ref` cannot be used, because we are obtaining `RcString`, not `String` - pub fn downcast_rc_str(&self) -> Option<&RcString> { - if let ValueInner::String(s) = &self.0 { - Some(s) - } else { - None - } - } -} - -// Submodules -pub mod boolean; -pub(crate) mod cell; -pub mod context; -pub mod dict; -pub mod error; -pub(crate) mod frozen; -pub mod function; -pub mod hashed_value; -pub(crate) mod inspect; -pub mod int; -pub mod iter; -pub mod list; -pub mod none; -pub mod range; -pub mod slice_indices; -pub mod string; -pub mod tuple; - -use crate::stdlib::structs::StarlarkStruct; -use crate::values::cell::error::ObjectBorrowError; -use crate::values::cell::error::ObjectBorrowMutError; -use crate::values::cell::header::ObjectHeader; -use crate::values::cell::ObjectCell; -use crate::values::cell::ObjectRef; -use crate::values::cell::ObjectRefMut; -use crate::values::none::NoneType; -use crate::values::string::rc::RcString; - -#[cfg(test)] -mod tests { - use super::*; - use std::iter; - - #[test] - fn can_implement_compare() { - #[derive(Debug, PartialEq, Eq, Ord, PartialOrd)] - struct WrappedNumber(u64); - - /// Define the NoneType type - impl TypedValue for WrappedNumber { - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "{:?}", self) - } - const TYPE: &'static str = "WrappedNumber"; - fn to_bool(&self) -> bool { - false - } - fn get_hash(&self) -> Result { - Ok(self.0) - } - fn compare(&self, other: &WrappedNumber) -> Result { - Ok(std::cmp::Ord::cmp(self, other)) - } - - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - } - - let one = Value::new(WrappedNumber(1)); - let another_one = Value::new(WrappedNumber(1)); - let two = Value::new(WrappedNumber(2)); - - use std::cmp::Ordering::*; - - assert_eq!(one.compare(&one), Ok(Equal)); - assert_eq!(one.compare(&another_one), Ok(Equal)); - assert_eq!(one.compare(&two), Ok(Less)); - assert_eq!(two.compare(&one), Ok(Greater)); - } - - #[test] - fn compare_between_different_types() { - assert!(Value::new(1).compare(&Value::new(false)).is_err()); - } -} diff --git a/starlark/src/values/none.rs b/starlark/src/values/none.rs deleted file mode 100644 index e5cff1a1..00000000 --- a/starlark/src/values/none.rs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the None type for Starlark. - -use crate::values::error::ValueError; -use crate::values::frozen::FrozenOnCreation; -use crate::values::*; -use std::cmp::Ordering; -use std::fmt; -use std::iter; - -/// Define the NoneType type -#[derive(Debug, Clone, Copy)] -pub enum NoneType { - None, -} - -/// Define the NoneType type -impl TypedValue for NoneType { - type Holder = Immutable; - const TYPE: &'static str = "NoneType"; - - const INLINE: bool = true; - - fn new_value(self) -> Value { - Value(ValueInner::None(self)) - } - - fn equals(&self, _other: &NoneType) -> Result { - Ok(true) - } - fn compare(&self, _other: &NoneType) -> Result { - Ok(Ordering::Equal) - } - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "None") - } - fn to_bool(&self) -> bool { - false - } - // just took the result of hash(None) in macos python 2.7.10 interpreter. - fn get_hash(&self) -> Result { - Ok(9_223_380_832_852_120_682) - } -} - -impl FrozenOnCreation for NoneType {} - -impl From for Value { - fn from(NoneType::None: NoneType) -> Self { - Value::new(NoneType::None) - } -} diff --git a/starlark/src/values/range.rs b/starlark/src/values/range.rs deleted file mode 100644 index 73d3f297..00000000 --- a/starlark/src/values/range.rs +++ /dev/null @@ -1,284 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! `range()` builtin implementation - -use crate::values::frozen::FrozenOnCreation; -use crate::values::iter::TypedIterable; -use crate::values::slice_indices::convert_slice_indices; -use crate::values::{Immutable, TypedValue, Value, ValueError}; -use std::fmt; -use std::fmt::Write as _; -use std::iter; -use std::mem; -use std::num::NonZeroI64; - -/// Representation of `range()` type. -#[derive(Clone, Debug)] -pub struct Range { - start: i64, - stop: i64, - step: NonZeroI64, -} - -impl Range { - pub fn new(start: i64, stop: i64, step: NonZeroI64) -> Range { - Range { start, stop, step } - } -} - -/// Implementation of iterator over range. -struct RangeIterator(Range); - -impl Iterator for RangeIterator { - type Item = Value; - - fn next(&mut self) -> Option { - if !self.0.to_bool() { - return None; - } - - let new_start = self.0.start.saturating_add(self.0.step.get()); - Some(Value::new(mem::replace(&mut self.0.start, new_start))) - } -} - -impl TypedValue for Range { - const TYPE: &'static str = "range"; - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - if self.step.get() != 1 { - write!(buf, "range({}, {}, {})", self.start, self.stop, self.step) - } else if self.start != 0 { - write!(buf, "range({}, {})", self.start, self.stop) - } else { - write!(buf, "range({})", self.stop) - } - } - - fn to_bool(&self) -> bool { - (self.start < self.stop && self.step.get() > 0) - || (self.start > self.stop && self.step.get() < 0) - } - - fn length(&self) -> Result { - if self.start == self.stop { - return Ok(0); - } - - // If step is into opposite direction of stop, then length is zero. - if (self.stop >= self.start) != (self.step.get() > 0) { - return Ok(0); - } - - // Convert range and step to `u64` - let (dist, step) = if self.step.get() >= 0 { - ( - self.stop.wrapping_sub(self.start) as u64, - self.step.get() as u64, - ) - } else { - ( - self.start.wrapping_sub(self.stop) as u64, - self.step.get().wrapping_neg() as u64, - ) - }; - let i = ((dist - 1) / step + 1) as i64; - if i >= 0 { - Ok(i) - } else { - Err(ValueError::IntegerOverflow) - } - } - - fn at(&self, index: Value) -> Result { - let index = index.convert_index(self.length()?)?; - // Must not overflow if `length` is computed correctly - Ok(Value::new(self.start + self.step.get() * index)) - } - - fn equals(&self, other: &Range) -> Result { - let self_length = self.length()?; - let other_length = other.length()?; - if self_length == 0 || other_length == 0 { - return Ok(self_length == other_length); - } - if self.start != other.start { - return Ok(false); - } - if self_length == 1 || other_length == 1 { - return Ok(self_length == other_length); - } - debug_assert!(self_length > 1); - debug_assert!(other_length > 1); - if self.step.get() == other.step.get() { - return Ok(self_length == other_length); - } else { - return Ok(false); - } - } - - fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> Result { - let (start, stop, step) = convert_slice_indices(self.length()?, start, stop, stride)?; - return Ok(Value::new(Range { - start: self - .start - .checked_add( - start - .checked_mul(self.step.get()) - .ok_or(ValueError::IntegerOverflow)?, - ) - .ok_or(ValueError::IntegerOverflow)?, - stop: self - .start - .checked_add( - stop.checked_mul(self.step.get()) - .ok_or(ValueError::IntegerOverflow)?, - ) - .ok_or(ValueError::IntegerOverflow)?, - step: NonZeroI64::new( - step.checked_mul(self.step.get()) - .ok_or(ValueError::IntegerOverflow)?, - ) - .unwrap(), - })); - } - - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Ok(self) - } - - fn contains(&self, other: &Value) -> Result { - let other = match other.downcast_ref::() { - Some(other) => *other, - None => { - // Go implementation errors here, - // Python3 returns `False`. - // ``` - // "a" in range(3) - // ``` - return Ok(false); - } - }; - if !self.to_bool() { - return Ok(false); - } - if self.start == other { - return Ok(true); - } - if self.step.get() > 0 { - if other < self.start || other >= self.stop { - return Ok(false); - } - Ok((other.wrapping_sub(self.start) as u64) % (self.step.get() as u64) == 0) - } else { - if other > self.start || other <= self.stop { - return Ok(false); - } - Ok( - (self.start.wrapping_sub(other) as u64) % (self.step.get().wrapping_neg() as u64) - == 0, - ) - } - } - - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze(&self) -> Box> { - Box::new(iter::empty()) - } -} - -impl FrozenOnCreation for Range {} - -/// For tests -impl PartialEq for Range { - fn eq(&self, other: &Range) -> bool { - self.equals(other).unwrap() - } -} - -impl TypedIterable for Range { - fn to_iter(&self) -> Box> { - Box::new(RangeIterator(self.clone())) - } - - fn to_vec(&self) -> Vec { - RangeIterator(self.clone()).collect() - } -} - -#[cfg(test)] -mod test { - use crate::values::range::Range; - use crate::values::{TypedValue, ValueError}; - use std::i64; - use std::num::NonZeroI64; - - fn range(start: i64, stop: i64, range: i64) -> Range { - Range { - start, - stop, - step: NonZeroI64::new(range).unwrap(), - } - } - - fn range_start_stop(start: i64, stop: i64) -> Range { - range(start, stop, 1) - } - - fn range_stop(stop: i64) -> Range { - range_start_stop(0, stop) - } - - #[test] - fn length_stop() { - assert_eq!(Ok(0), range_stop(0).length()); - assert_eq!(Ok(17), range_stop(17).length()); - } - - #[test] - fn length_start_stop() { - assert_eq!(Ok(20), range_start_stop(10, 30).length()); - assert_eq!(Ok(0), range_start_stop(10, -30).length()); - assert_eq!( - Ok(i64::max_value()), - range_start_stop(0, i64::max_value()).length() - ); - assert_eq!( - Err(ValueError::IntegerOverflow), - range_start_stop(-1, i64::max_value()).length() - ); - } - - #[test] - fn length_start_stop_step() { - assert_eq!(Ok(5), range(0, 10, 2).length()); - assert_eq!(Ok(5), range(0, 9, 2).length()); - assert_eq!(Ok(0), range(0, 10, -2).length()); - assert_eq!(Ok(5), range(10, 0, -2).length()); - assert_eq!(Ok(5), range(9, 0, -2).length()); - assert_eq!(Ok(1), range(4, 14, 10).length()); - } - - #[test] - fn eq() { - assert_eq!(range_stop(0), range(2, 1, 3)); - } -} diff --git a/starlark/src/values/slice_indices.rs b/starlark/src/values/slice_indices.rs deleted file mode 100644 index 4cce280c..00000000 --- a/starlark/src/values/slice_indices.rs +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Slice indices utils live here. - -use crate::values::error::ValueError; -use crate::values::Value; - -impl Value { - /// Macro to parse the index for at/set_at methods. - /// - /// Return an `i64` from self corresponding to the index recenterd between 0 and len. - /// Raise the correct errors if the value is not numeric or the index is out of bound. - /// - /// # Examples - /// - /// ```rust - /// # use starlark::values::*; - /// # assert!( - /// Value::new(6).convert_index(7).unwrap() == 6 - /// # ); - /// # assert!( - /// Value::new(-1).convert_index(7).unwrap() == 6 - /// # ); - /// ``` - /// - /// The following examples would return an error - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::error::*; - /// # use starlark::values::string; - /// # assert!( - /// Value::from("a").convert_index(7) == Err(ValueError::IncorrectParameterType) - /// # ); - /// # assert!( - /// Value::new(8).convert_index(7) == Err(ValueError::IndexOutOfBound(8)) // 8 > 7 = len - /// # ); - /// # assert!( - /// Value::new(-8).convert_index(7) == Err(ValueError::IndexOutOfBound(-1)) // -8 + 7 = -1 < 0 - /// # ); - /// ``` - pub fn convert_index(&self, len: i64) -> Result { - match self.value_holder().to_int_dyn() { - Ok(x) => { - let i = if x < 0 { - len.checked_add(x).ok_or(ValueError::IntegerOverflow)? - } else { - x - }; - if i < 0 || i >= len { - Err(ValueError::IndexOutOfBound(i)) - } else { - Ok(i) - } - } - Err(..) => Err(ValueError::IncorrectParameterType), - } - } -} - -// To be called by convert_slice_indices only -fn convert_index_aux( - len: i64, - v1: Option, - default: i64, - min: i64, - max: i64, -) -> Result { - if let Some(v) = v1 { - if v.get_type() == "NoneType" { - Ok(default) - } else { - match v.to_int() { - Ok(x) => { - let i = if x < 0 { len + x } else { x }; - if i < min { - Ok(min) - } else if i > max { - Ok(max) - } else { - Ok(i) - } - } - Err(..) => Err(ValueError::IncorrectParameterType), - } - } - } else { - Ok(default) - } -} - -/// Parse indices for slicing. -/// -/// Takes the object length and 3 optional values and returns `(i64, i64, i64)` -/// with those index correctly converted in range of length. -/// Return the correct errors if the values are not numeric or the stride is 0. -/// -/// # Examples -/// -/// ```rust -/// # use starlark::values::*; -/// # use starlark::values::slice_indices::convert_slice_indices; -/// let six = Some(Value::new(6)); -/// let minusone = Some(Value::new(-1)); -/// let ten = Some(Value::new(10)); -/// -/// # assert!( -/// convert_slice_indices(7, six, None, None).unwrap() == (6, 7, 1) -/// # ); -/// # assert!( -/// convert_slice_indices(7, minusone.clone(), None, minusone.clone()).unwrap() -/// == (6, -1, -1) -/// # ); -/// # assert!( -/// convert_slice_indices(7, minusone, ten, None).unwrap() == (6, 7, 1) -/// # ); -/// ``` -pub fn convert_slice_indices( - len: i64, - start: Option, - stop: Option, - stride: Option, -) -> Result<(i64, i64, i64), ValueError> { - let stride = stride.unwrap_or_else(|| Value::new(1)); - let stride = if stride.get_type() == "NoneType" { - Ok(1) - } else { - stride.to_int() - }; - match stride { - Ok(0) => Err(ValueError::IndexOutOfBound(0)), - Ok(stride) => { - let def_start = if stride < 0 { len - 1 } else { 0 }; - let def_end = if stride < 0 { -1 } else { len }; - let clamp = if stride < 0 { -1 } else { 0 }; - let start = convert_index_aux(len, start, def_start, clamp, len + clamp); - let stop = convert_index_aux(len, stop, def_end, clamp, len + clamp); - match (start, stop) { - (Ok(s1), Ok(s2)) => Ok((s1, s2, stride)), - (Err(x), ..) => Err(x), - (Ok(..), Err(x)) => Err(x), - } - } - _ => Err(ValueError::IncorrectParameterType), - } -} - -#[cfg(test)] -mod test { - use crate::values::error::ValueError; - use crate::values::slice_indices::convert_slice_indices; - use crate::values::Value; - - #[test] - fn test_convert_slice_indices() { - assert_eq!(Ok(6), Value::new(6).convert_index(7)); - assert_eq!(Ok(6), Value::new(-1).convert_index(7)); - assert_eq!( - Ok((6, 7, 1)), - convert_slice_indices(7, Some(Value::new(6)), None, None) - ); - assert_eq!( - Ok((6, -1, -1)), - convert_slice_indices(7, Some(Value::new(-1)), None, Some(Value::new(-1))) - ); - assert_eq!( - Ok((6, 7, 1)), - convert_slice_indices(7, Some(Value::new(-1)), Some(Value::new(10)), None) - ); - // Errors - assert_eq!( - Err(ValueError::IncorrectParameterType), - Value::from("a").convert_index(7) - ); - assert_eq!( - Err(ValueError::IndexOutOfBound(8)), - Value::new(8).convert_index(7) - ); - assert_eq!( - Err(ValueError::IndexOutOfBound(-1)), - Value::new(-8).convert_index(7) - ); - } -} diff --git a/starlark/src/values/string/interpolation.rs b/starlark/src/values/string/interpolation.rs deleted file mode 100644 index f2e25594..00000000 --- a/starlark/src/values/string/interpolation.rs +++ /dev/null @@ -1,399 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! String interpolation-related code. - -use crate::syntax::errors::SyntaxError; -use crate::values::error::*; -use crate::values::Value; -use codemap::Span; -use codemap_diagnostic::{Diagnostic, Level, SpanLabel, SpanStyle}; -use std::convert::TryFrom; -use std::iter; - -/// Operator `%` format or evaluation errors -#[derive(Clone, Debug)] -pub enum StringInterpolationError { - /// `)` is not found when parsing `%(name)` expression. - UnexpectedEOFClosingParen, - /// `%` must be followed by specifier. - UnexpectedEOFPercent, - /// `%z` where `z` is unknown specifier. - UnknownSpecifier(char), - /// Trying to interpolate with %c an integer that is not in the UTF-8 range. - ValueNotInUTFRange(i64), - /// Interpolation parameter is too big for the format string. - TooManyParameters, - /// Interpolation parameter is too small for the format string. - NotEnoughParameters, - /// Value for `%s` is required to be a char - ValueNotChar, -} - -impl SyntaxError for StringInterpolationError { - fn to_diagnostic(self, file_span: Span) -> Diagnostic { - let (label, message, code) = match self { - StringInterpolationError::UnexpectedEOFClosingParen => ( - "Unexpected EOF in format string when looking for closing paren".to_owned(), - "Could not found ')' when parsing '%(name)f' expression".to_owned(), - INTERPOLATION_UNEXPECTED_EOF_CLOSING_PAREN, - ), - StringInterpolationError::UnexpectedEOFPercent => ( - "End of string while expecting format specifier".to_owned(), - concat!( - "Interpolation string format is incorrect:", - " '%' must be followed by an optional name and a specifier ", - "('s', 'r', 'd', 'i', 'o', 'x', 'X', 'c') or '%'", - ) - .to_owned(), - INTERPOLATION_UNEXPECTED_EOF_PERCENT, - ), - StringInterpolationError::UnknownSpecifier(c) => ( - format!("Unknown format string specifier '{}'", c.escape_default()), - concat!( - "Interpolation string format is incorrect:", - " '%' must be followed by an optional name and a specifier ", - "('s', 'r', 'd', 'i', 'o', 'x', 'X', 'c') or '%'", - ) - .to_owned(), - INTERPOLATION_UNKNOWN_SPECIFIER, - ), - StringInterpolationError::ValueNotInUTFRange(ref c) => ( - format!("Invalid codepoint 0x{:x}", c), - format!( - concat!( - "Value 0x{:x} passed for %c formatter is not a valid", - " UTF-8 codepoint" - ), - c - ), - INTERPOLATION_OUT_OF_UTF8_RANGE_ERROR_CODE, - ), - StringInterpolationError::TooManyParameters => ( - "Too many arguments for format string".to_owned(), - "Too many arguments for format string".to_owned(), - INTERPOLATION_TOO_MANY_PARAMS_ERROR_CODE, - ), - StringInterpolationError::NotEnoughParameters => ( - "Not enough arguments for format string".to_owned(), - "Not enough arguments for format string".to_owned(), - INTERPOLATION_NOT_ENOUGH_PARAMS_ERROR_CODE, - ), - StringInterpolationError::ValueNotChar => ( - "'%c' formatter requires a single-character string".to_owned(), - "'%c' formatter requires a single-character string".to_owned(), - INTERPOLATION_VALUE_IS_NOT_CHAR_ERROR_CODE, - ), - }; - let sl = SpanLabel { - span: file_span, - style: SpanStyle::Primary, - label: Some(label), - }; - Diagnostic { - level: Level::Error, - message, - code: Some(code.to_owned()), - spans: vec![sl], - } - } -} - -/// Format char -pub(crate) enum ArgFormat { - // str(x) - Str, - // repr(x) - Repr, - // signed integer decimal - Dec, - // signed octal - Oct, - // signed hexadecimal, lowercase - HexLower, - // signed hexadecimal, uppercase - HexUpper, - // x for string, chr(x) for int - Char, - // `%` sign - Percent, -} - -impl ArgFormat { - fn format_arg(&self, out: &mut String, arg: Value) -> Result<(), ValueError> { - use std::fmt::Write; - - match self { - ArgFormat::Str => write!(out, "{}", arg.to_str()).unwrap(), - ArgFormat::Repr => write!(out, "{}", arg.to_repr()).unwrap(), - ArgFormat::Dec => write!(out, "{}", arg.to_int()?).unwrap(), - ArgFormat::Oct => { - let v = arg.to_int()?; - write!( - out, - "{}{:o}", - if v < 0 { "-" } else { "" }, - v.wrapping_abs() as u64 - ) - .unwrap(); - } - ArgFormat::HexLower => { - let v = arg.to_int()?; - write!( - out, - "{}{:x}", - if v < 0 { "-" } else { "" }, - v.wrapping_abs() as u64 - ) - .unwrap(); - } - ArgFormat::HexUpper => { - let v = arg.to_int()?; - write!( - out, - "{}{:X}", - if v < 0 { "-" } else { "" }, - v.wrapping_abs() as u64 - ) - .unwrap(); - } - ArgFormat::Char => match arg.get_type() { - "string" => { - if arg.length()? != 1 { - return Err(StringInterpolationError::ValueNotChar.into()); - } else { - write!(out, "{}", arg.to_str()).unwrap(); - } - } - _ => { - let i = arg.to_int()?; - let codepoint = match u32::try_from(i) { - Ok(codepoint) => codepoint, - Err(_) => { - return Err(StringInterpolationError::ValueNotInUTFRange(i).into()) - } - }; - match std::char::from_u32(codepoint) { - Some(c) => write!(out, "{}", c).unwrap(), - None => { - return Err(StringInterpolationError::ValueNotInUTFRange(i64::from( - codepoint, - )) - .into()) - } - } - } - }, - ArgFormat::Percent => { - write!(out, "%").unwrap(); - } - } - Ok(()) - } -} - -// %(name)s or %s -enum NamedOrPositional { - Named(String), - Positional, -} - -/// Parsed format string -pub(crate) struct ArgsFormat { - /// String before first parameter - init: String, - /// Number of positional arguments - positional_count: usize, - /// Number of named arguments - named_count: usize, - /// Arguments followed by uninterpreted strings - parameters: Vec<(NamedOrPositional, ArgFormat, String)>, -} - -impl ArgsFormat { - fn append_literal(&mut self, c: char) { - if let Some(p) = self.parameters.last_mut() { - p.2.push(c); - } else { - self.init.push(c) - } - } - - pub fn parse(format: &str) -> Result { - let mut result = ArgsFormat { - init: String::new(), - positional_count: 0, - named_count: 0, - parameters: Vec::new(), - }; - let mut chars = format.chars(); - while let Some(c) = chars.next() { - if c != '%' { - result.append_literal(c); - } else { - let next = chars - .next() - .ok_or(StringInterpolationError::UnexpectedEOFPercent)?; - let (named_or_positional, format_char) = if next == '(' { - let mut name = String::new(); - loop { - match chars.next() { - None => { - return Err( - StringInterpolationError::UnexpectedEOFClosingParen.into() - ) - } - Some(')') => { - break; - } - Some(c) => name.push(c), - } - } - ( - NamedOrPositional::Named(name), - chars - .next() - .ok_or(StringInterpolationError::UnexpectedEOFPercent)?, - ) - } else { - (NamedOrPositional::Positional, next) - }; - let format = match format_char { - 's' => ArgFormat::Str, - 'r' => ArgFormat::Repr, - 'd' | 'i' => ArgFormat::Dec, - 'o' => ArgFormat::Oct, - 'x' => ArgFormat::HexLower, - 'X' => ArgFormat::HexUpper, - 'c' => ArgFormat::Char, - '%' => match named_or_positional { - NamedOrPositional::Positional => { - result.append_literal('%'); - continue; - } - NamedOrPositional::Named(_) => { - // In both Python and Starlark Go implementations - // `%(n)%` consumes named argument, but - // `%%` does not consume positional argument. - // So `Percent` variant is added only when `ArgFormat` is `Named`. - ArgFormat::Percent - } - }, - c => return Err(StringInterpolationError::UnknownSpecifier(c).into()), - }; - match named_or_positional { - NamedOrPositional::Positional => { - result.positional_count += 1; - } - NamedOrPositional::Named(..) => { - result.named_count += 1; - } - } - result - .parameters - .push((named_or_positional, format, String::new())); - } - } - Ok(result) - } - - pub fn format(self, other: Value) -> Result { - let mut r = self.init; - let other_iter; - let mut arg_iter: Box> = if self.positional_count > 1 { - other_iter = Some(other.iter()?); - other_iter.as_ref().unwrap().iter() - } else if self.positional_count == 1 { - Box::new(iter::once(other.clone())) - } else if self.named_count != 0 { - Box::new(iter::empty()) - } else { - // If both positional count is zero and named count is zero - // we should check that iterable has zero elements. - other_iter = Some(other.iter()?); - other_iter.as_ref().unwrap().iter() - }; - for (named_or_positional, format, tail) in self.parameters { - let arg = match named_or_positional { - NamedOrPositional::Positional => match arg_iter.next() { - Some(a) => a, - None => return Err(StringInterpolationError::NotEnoughParameters.into()), - }, - NamedOrPositional::Named(name) => other.at(Value::new(name))?, - }; - format.format_arg(&mut r, arg)?; - r.push_str(&tail); - } - - if arg_iter.next().is_some() { - return Err(StringInterpolationError::TooManyParameters.into()); - } - - Ok(r) - } -} - -#[cfg(test)] -mod test { - use crate::values::Value; - use std::collections::HashMap; - use std::convert::TryFrom; - - #[test] - fn test_string_interpolation() { - // "Hello %s, your score is %d" % ("Bob", 75) == "Hello Bob, your score is 75" - assert_eq!( - Value::from("Hello %s, your score is %d") - .percent(Value::from(("Bob", 75))) - .unwrap(), - Value::from("Hello Bob, your score is 75") - ); - - // "%d %o %x %c" % (65, 65, 65, 65) == "65 101 41 A" - assert_eq!( - Value::from("%d %o %x %c") - .percent(Value::from((65, 65, 65, 65))) - .unwrap(), - Value::from("65 101 41 A") - ); - - // "%(greeting)s, %(audience)s" % {"greeting": "Hello", "audience": "world"} == - // "Hello, world" - let mut d = Value::try_from(HashMap::::new()).unwrap(); - d.set_at(Value::from("greeting"), Value::from("Hello")) - .unwrap(); - d.set_at(Value::from("audience"), Value::from("world")) - .unwrap(); - assert_eq!( - Value::from("%(greeting)s, %(audience)s") - .percent(d) - .unwrap(), - Value::from("Hello, world") - ); - - // Both Python and Starlark Go behave this way: - // "%s%(a)%" % {"a": 1} == "{\"a\": 1}%" - // "%s%(a)s" % {"a": 1} == "{\"a\": 1}1" - let mut d = Value::try_from(HashMap::::new()).unwrap(); - d.set_at(Value::from("a"), Value::from(1)).unwrap(); - assert_eq!( - Value::from("%s%(a)%").percent(d.clone()).unwrap(), - Value::from("{\"a\": 1}%") - ); - assert_eq!( - Value::from("%s%(a)s").percent(d.clone()).unwrap(), - Value::from("{\"a\": 1}1") - ); - } -} diff --git a/starlark/src/values/string/mod.rs b/starlark/src/values/string/mod.rs deleted file mode 100644 index 478bf624..00000000 --- a/starlark/src/values/string/mod.rs +++ /dev/null @@ -1,320 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the string type for Starlark. -use crate::values::error::ValueError; -use crate::values::string::interpolation::ArgsFormat; -use crate::values::*; -use std; -use std::cmp::Ordering; -use std::collections::hash_map::DefaultHasher; -use std::hash::{Hash, Hasher}; - -pub mod interpolation; -pub mod rc; - -use crate::values::frozen::FrozenOnCreation; -use crate::values::slice_indices::convert_slice_indices; -use crate::values::string::rc::RcString; -use std::fmt; -use std::iter; - -impl TypedValue for String { - type Holder = Immutable; - - const INLINE: bool = true; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - Box::new(iter::empty()) - } - - fn to_str_impl(&self, buf: &mut String) -> fmt::Result { - buf.push_str(&self); - Ok(()) - } - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "\"")?; - for c in self.chars() { - write!(buf, "{}", c.escape_debug())?; - } - write!(buf, "\"")?; - Ok(()) - } - - const TYPE: &'static str = "string"; - - fn new_value(self) -> Value { - Value(ValueInner::String(RcString::from(self))) - } - - fn to_bool(&self) -> bool { - !self.is_empty() - } - - fn get_hash(&self) -> Result { - let mut s = DefaultHasher::new(); - self.hash(&mut s); - Ok(s.finish()) - } - - fn equals(&self, other: &String) -> Result { - Ok(*self == *other) - } - - fn compare(&self, other: &String) -> Result { - Ok(self.cmp(other)) - } - - fn at(&self, index: Value) -> ValueResult { - let i = index.convert_index(self.len() as i64)? as usize; - Ok(Value::new(self.chars().nth(i).unwrap().to_string())) - } - - fn length(&self) -> Result { - Ok(self.chars().count() as i64) - } - - fn contains(&self, other: &Value) -> Result { - if other.get_type() == "string" { - Ok(str::contains(self, &other.to_str())) - } else { - Err(ValueError::IncorrectParameterType) - } - } - - fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult { - let (start, stop, stride) = convert_slice_indices(self.len() as i64, start, stop, stride)?; - let (low, take, astride) = if stride < 0 { - (stop + 1, start - stop, -stride) - } else { - (start, stop - start, stride) - }; - if take <= 0 { - return Ok(Value::from("")); - }; - - let v: String = self - .chars() - .skip(low as usize) - .take(take as usize) - .collect(); - let v: String = if stride > 0 { - v.chars() - .enumerate() - .filter_map(|x| { - if 0 == (x.0 as i64 % astride) { - Some(x.1) - } else { - None - } - }) - .collect() - } else { - v.chars() - .rev() - .enumerate() - .filter_map(|x| { - if 0 == (x.0 as i64 % astride) { - Some(x.1) - } else { - None - } - }) - .collect() - }; - Ok(Value::new(v)) - } - - /// Concatenate `other` to the current value. - /// - /// `other` has to be a string. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::string; - /// # assert!( - /// // "abc" + "def" = "abcdef" - /// Value::from("abc").add(Value::from("def")).unwrap() == Value::from("abcdef") - /// # ); - /// ``` - fn add(&self, other: &String) -> Result { - Ok(self.chars().chain(other.chars()).collect()) - } - - /// Repeat `other` times this string. - /// - /// `other` has to be an int. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::string; - /// # assert!( - /// // "abc" * 3 == "abcabcabc" - /// Value::from("abc").mul(Value::from(3)).unwrap() == Value::from("abcabcabc") - /// # ); - /// ``` - fn mul(&self, other: Value) -> ValueResult { - match other.downcast_ref::() { - Some(l) => { - let mut result = String::new(); - for _i in 0..*l { - result += self - } - Ok(Value::new(result)) - } - None => Err(ValueError::IncorrectParameterType), - } - } - - /// Perform string interpolation - /// - /// Cf. [String interpolation on the Starlark spec]( - /// https://github.com/google/skylark/blob/a0e5de7e63b47e716cca7226662a4c95d47bf873/doc/spec.md#string-interpolation - /// ) - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::string; - /// # use std::collections::HashMap; - /// # use std::convert::TryFrom; - /// # assert!( - /// // "Hello %s, your score is %d" % ("Bob", 75) == "Hello Bob, your score is 75" - /// Value::from("Hello %s, your score is %d").percent(Value::from(("Bob", 75))).unwrap() - /// == Value::from("Hello Bob, your score is 75") - /// # ); - /// # assert!( - /// // "%d %o %x %c" % (65, 65, 65, 65) == "65 101 41 A" - /// Value::from("%d %o %x %c").percent(Value::from((65, 65, 65, 65))).unwrap() - /// == Value::from("65 101 41 A") - /// # ); - /// // "%(greeting)s, %(audience)s" % {"greeting": "Hello", "audience": "world"} == - /// // "Hello, world" - /// let mut d = Value::try_from(HashMap::::new()).unwrap(); - /// d.set_at(Value::from("greeting"), Value::from("Hello")); - /// d.set_at(Value::from("audience"), Value::from("world")); - /// # assert!( - /// Value::from("%(greeting)s, %(audience)s").percent(d).unwrap() == Value::from("Hello, world") - /// # ); - /// ``` - fn percent(&self, other: Value) -> ValueResult { - Ok(Value::new(ArgsFormat::parse(&self)?.format(other)?)) - } -} - -impl FrozenOnCreation for String {} - -impl From for Value { - fn from(s: String) -> Self { - Value::new(s) - } -} - -impl<'a> From<&'a str> for Value { - fn from(a: &'a str) -> Value { - Value::new(a.to_owned()) - } -} - -#[cfg(test)] -mod tests { - use super::super::Value; - - #[test] - fn test_to_repr() { - assert_eq!("\"\\t\\n\\'\\\"\"", Value::from("\t\n'\"").to_repr()); - assert_eq!("\"Hello, 世界\"", Value::from("Hello, 世界").to_repr()); - } - - #[test] - fn test_string_len() { - assert_eq!(1, Value::from("😿").length().unwrap()) - } - - #[test] - fn test_arithmetic_on_string() { - // "abc" + "def" = "abcdef" - assert_eq!( - Value::from("abc").add(Value::from("def")).unwrap(), - Value::from("abcdef") - ); - // "abc" * 3 == "abcabcabc" - assert_eq!( - Value::from("abc").mul(Value::from(3)).unwrap(), - Value::from("abcabcabc") - ); - } - - #[test] - fn test_slice_string() { - // Remove the first element: "abc"[1:] == "bc". - assert_eq!( - Value::from("abc") - .slice(Some(Value::from(1)), None, None) - .unwrap(), - Value::from("bc") - ); - // Remove the last element: "abc"[:-1] == "ab". - assert_eq!( - Value::from("abc") - .slice(None, Some(Value::from(-1)), None) - .unwrap(), - Value::from("ab") - ); - // Remove the first and the last element: "abc"[1:-1] == "b". - assert_eq!( - Value::from("abc") - .slice(Some(Value::from(1)), Some(Value::from(-1)), None) - .unwrap(), - Value::from("b") - ); - // Select one element out of 2, skipping the first: "banana"[1::2] == "aaa". - assert_eq!( - Value::from("banana") - .slice(Some(Value::from(1)), None, Some(Value::from(2))) - .unwrap(), - Value::from("aaa") - ); - // Select one element out of 2 in reverse order, starting at index 4: - // "banana"[4::-2] = "nnb" - assert_eq!( - Value::from("banana") - .slice(Some(Value::from(4)), None, Some(Value::from(-2))) - .unwrap(), - Value::from("nnb") - ); - } - - #[test] - fn test_string_is_in() { - // "a" in "abc" == True - assert!(Value::from("abc").contains(&Value::from("a")).unwrap()); - // "b" in "abc" == True - assert!(Value::from("abc").contains(&Value::from("b")).unwrap()); - // "z" in "abc" == False - assert!(!Value::from("abc").contains(&Value::from("z")).unwrap()); - } -} diff --git a/starlark/src/values/string/rc.rs b/starlark/src/values/string/rc.rs deleted file mode 100644 index cd95c4c3..00000000 --- a/starlark/src/values/string/rc.rs +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2019 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Refcounted string, - -use crate::values::inspect::Inspectable; -use crate::values::Value; -use crate::values::ValueInner; -use std::borrow::Borrow; -use std::hash::{Hash, Hasher}; -use std::rc::Rc; -use std::{fmt, ops}; - -/// Refcounted string. -/// -/// Newtype to avoid rewriting a lot of code when implementation changes. -#[derive(Eq, PartialEq, PartialOrd, Ord, Clone)] -// Note `None` is empty string and `Some("")` is not permitted, -// otherwise derives won't work correctly -pub struct RcString(Option>); - -impl Borrow for RcString { - fn borrow(&self) -> &str { - self.as_str() - } -} - -/// Note `Hash` must be compatible with [`str`], otherwise -/// `HashMap` query by `str` won't work correctly. -impl Hash for RcString { - fn hash(&self, state: &mut H) { - self.as_str().hash(state) - } -} - -impl ops::Deref for RcString { - type Target = str; - - fn deref(&self) -> &str { - self.as_str() - } -} - -static EMPTY_STRING: String = String::new(); - -impl RcString { - /// Useful for downcasting - pub(crate) fn as_string(&self) -> &String { - match &self.0 { - Some(s) => s, - None => &EMPTY_STRING, - } - } - - pub fn as_str(&self) -> &str { - self.as_string().as_str() - } -} - -impl fmt::Display for RcString { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(self.as_str(), f) - } -} - -impl fmt::Debug for RcString { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.as_str(), f) - } -} - -impl> From for RcString { - fn from(s: I) -> Self { - let s = s.into(); - if s.is_empty() { - RcString(None) - } else { - RcString(Some(Rc::new(s))) - } - } -} - -impl From for Value { - fn from(s: RcString) -> Self { - Value(ValueInner::String(s)) - } -} - -impl Inspectable for RcString { - fn inspect(&self) -> Value { - Value::from(self.clone()) - } -} - -#[cfg(test)] -mod test { - use crate::values::string::rc::RcString; - use std::borrow::Borrow; - - #[test] - fn eq() { - assert_eq!(RcString::from("ab"), RcString::from("ab")) - } - - #[test] - fn from() { - assert_eq!("ab", format!("{}", RcString::from("ab"))); - assert_eq!("ab", format!("{}", RcString::from("ab".to_owned()))); - } - - #[test] - fn borrow() { - assert_eq!("ab", Borrow::::borrow(&RcString::from("ab"))); - } -} diff --git a/starlark/src/values/tuple.rs b/starlark/src/values/tuple.rs deleted file mode 100644 index 91feb429..00000000 --- a/starlark/src/values/tuple.rs +++ /dev/null @@ -1,531 +0,0 @@ -// Copyright 2018 The Starlark in Rust Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Define the tuple type for Starlark. -use crate::values::error::ValueError; -use crate::values::iter::TypedIterable; -use crate::values::slice_indices::convert_slice_indices; -use crate::values::*; -use std::cmp::Ordering; -use std::collections::hash_map::DefaultHasher; -use std::fmt; -use std::hash::Hasher; - -/// A starlark tuple -#[derive(Debug, Clone)] -pub struct Tuple { - content: Vec, -} - -pub(crate) fn slice_vector<'a, I: Iterator>( - start: i64, - stop: i64, - stride: i64, - content: I, -) -> Vec { - let (low, take, astride) = if stride < 0 { - (stop + 1, start - stop, -stride) - } else { - (start, stop - start, stride) - }; - if take <= 0 { - return Vec::new(); - } - let mut v: Vec = content - .skip(low as usize) - .take(take as usize) - .cloned() - .collect(); - if stride < 0 { - v.reverse(); - } - v.into_iter() - .enumerate() - .filter_map(|x| { - if 0 == (x.0 as i64 % astride) { - Some(x.1) - } else { - None - } - }) - .collect() -} - -impl Tuple { - pub fn new(values: Vec) -> Tuple { - Tuple { content: values } - } -} - -impl From<()> for Tuple { - fn from(_a: ()) -> Tuple { - Tuple { content: vec![] } - } -} - -// TODO: Can we do that with macro? i.e. generating the index number automatically? -impl> From<(T,)> for Tuple { - fn from(a: (T,)) -> Tuple { - Tuple { - content: vec![a.0.into()], - } - } -} - -impl, T2: Into> From<(T1, T2)> for Tuple { - fn from(a: (T1, T2)) -> Tuple { - Tuple { - content: vec![a.0.into(), a.1.into()], - } - } -} - -impl, T2: Into, T3: Into> From<(T1, T2, T3)> for Tuple { - fn from(a: (T1, T2, T3)) -> Tuple { - Tuple { - content: vec![a.0.into(), a.1.into(), a.2.into()], - } - } -} - -impl, T2: Into, T3: Into, T4: Into> From<(T1, T2, T3, T4)> - for Tuple -{ - fn from(a: (T1, T2, T3, T4)) -> Tuple { - Tuple { - content: vec![a.0.into(), a.1.into(), a.2.into(), a.3.into()], - } - } -} - -impl, T2: Into, T3: Into, T4: Into, T5: Into> - From<(T1, T2, T3, T4, T5)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5)) -> Tuple { - Tuple { - content: vec![a.0.into(), a.1.into(), a.2.into(), a.3.into(), a.4.into()], - } - } -} - -impl< - T1: Into, - T2: Into, - T3: Into, - T4: Into, - T5: Into, - T6: Into, - > From<(T1, T2, T3, T4, T5, T6)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5, T6)) -> Tuple { - Tuple { - content: vec![ - a.0.into(), - a.1.into(), - a.2.into(), - a.3.into(), - a.4.into(), - a.5.into(), - ], - } - } -} - -impl< - T1: Into, - T2: Into, - T3: Into, - T4: Into, - T5: Into, - T6: Into, - T7: Into, - > From<(T1, T2, T3, T4, T5, T6, T7)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5, T6, T7)) -> Tuple { - Tuple { - content: vec![ - a.0.into(), - a.1.into(), - a.2.into(), - a.3.into(), - a.4.into(), - a.5.into(), - a.6.into(), - ], - } - } -} - -impl< - T1: Into, - T2: Into, - T3: Into, - T4: Into, - T5: Into, - T6: Into, - T7: Into, - T8: Into, - > From<(T1, T2, T3, T4, T5, T6, T7, T8)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5, T6, T7, T8)) -> Tuple { - Tuple { - content: vec![ - a.0.into(), - a.1.into(), - a.2.into(), - a.3.into(), - a.4.into(), - a.5.into(), - a.6.into(), - a.7.into(), - ], - } - } -} - -impl< - T1: Into, - T2: Into, - T3: Into, - T4: Into, - T5: Into, - T6: Into, - T7: Into, - T8: Into, - T9: Into, - > From<(T1, T2, T3, T4, T5, T6, T7, T8, T9)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5, T6, T7, T8, T9)) -> Tuple { - Tuple { - content: vec![ - a.0.into(), - a.1.into(), - a.2.into(), - a.3.into(), - a.4.into(), - a.5.into(), - a.6.into(), - a.7.into(), - a.8.into(), - ], - } - } -} - -impl< - T1: Into, - T2: Into, - T3: Into, - T4: Into, - T5: Into, - T6: Into, - T7: Into, - T8: Into, - T9: Into, - T10: Into, - > From<(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)> for Tuple -{ - fn from(a: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)) -> Tuple { - Tuple { - content: vec![ - a.0.into(), - a.1.into(), - a.2.into(), - a.3.into(), - a.4.into(), - a.5.into(), - a.6.into(), - a.7.into(), - a.8.into(), - a.9.into(), - ], - } - } -} - -impl TypedValue for Tuple { - type Holder = Immutable; - - fn values_for_descendant_check_and_freeze<'a>( - &'a self, - ) -> Box + 'a> { - // Tuple are weird, immutable but with potentially mutable - Box::new(self.content.iter().cloned()) - } - - fn to_repr_impl(&self, buf: &mut String) -> fmt::Result { - write!(buf, "(")?; - for (i, v) in self.content.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - v.to_repr_impl(buf)?; - } - if self.content.len() == 1 { - write!(buf, ",")?; - } - write!(buf, ")")?; - Ok(()) - } - const TYPE: &'static str = "tuple"; - fn to_bool(&self) -> bool { - !self.content.is_empty() - } - fn get_hash(&self) -> Result { - let mut s = DefaultHasher::new(); - for v in self.content.iter() { - s.write_u64(v.get_hash()?) - } - Ok(s.finish()) - } - - fn equals(&self, other: &Tuple) -> Result { - if self.content.len() != other.content.len() { - return Ok(false); - } - - let mut self_iter = self.content.iter(); - let mut other_iter = other.content.iter(); - - loop { - match (self_iter.next(), other_iter.next()) { - (Some(a), Some(b)) => { - if !a.equals(b)? { - return Ok(false); - } - } - (None, None) => { - return Ok(true); - } - _ => unreachable!(), - } - } - } - - fn compare(&self, other: &Tuple) -> Result { - let mut iter1 = self.content.iter(); - let mut iter2 = other.content.iter(); - loop { - match (iter1.next(), iter2.next()) { - (None, None) => return Ok(Ordering::Equal), - (None, Some(..)) => return Ok(Ordering::Less), - (Some(..), None) => return Ok(Ordering::Greater), - (Some(v1), Some(v2)) => { - let r = v1.compare(&v2)?; - if r != Ordering::Equal { - return Ok(r); - } - } - } - } - } - - fn at(&self, index: Value) -> ValueResult { - let i = index.convert_index(self.length()?)? as usize; - Ok(self.content[i].clone()) - } - - fn length(&self) -> Result { - Ok(self.content.len() as i64) - } - - fn contains(&self, other: &Value) -> Result { - for x in self.content.iter() { - if x.equals(other)? { - return Ok(true); - } - } - Ok(false) - } - - fn slice( - &self, - start: Option, - stop: Option, - stride: Option, - ) -> ValueResult { - let (start, stop, stride) = convert_slice_indices(self.length()?, start, stop, stride)?; - Ok(Value::new(Tuple::new(slice_vector( - start, - stop, - stride, - self.content.iter(), - )))) - } - - fn iter(&self) -> Result<&dyn TypedIterable, ValueError> { - Ok(self) - } - - /// Concatenate `other` to the current value. - /// - /// `other` has to be a tuple. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::tuple::Tuple; - /// # assert!( - /// // (1, 2, 3) + (2, 3) == (1, 2, 3, 2, 3) - /// Value::from((1,2,3)).add(Value::from((2,3))).unwrap() == Value::from((1, 2, 3, 2, 3)) - /// # ); - /// ``` - fn add(&self, other: &Tuple) -> Result { - let mut result = Tuple { - content: Vec::with_capacity(self.content.len() + other.content.len()), - }; - for x in &self.content { - result.content.push(x.clone()); - } - for x in &other.content { - result.content.push(x.clone()); - } - Ok(result) - } - - /// Repeat `other` times this tuple. - /// - /// `other` has to be an int or a boolean. - /// - /// # Example - /// - /// ```rust - /// # use starlark::values::*; - /// # use starlark::values::tuple::Tuple; - /// # assert!( - /// // (1, 2, 3) * 3 == (1, 2, 3, 1, 2, 3, 1, 2, 3) - /// Value::from((1,2,3)).mul(Value::from(3)).unwrap() - /// == Value::from((1, 2, 3, 1, 2, 3, 1, 2, 3)) - /// # ); - /// ``` - fn mul(&self, other: Value) -> ValueResult { - match other.downcast_ref::() { - Some(l) => { - let mut result = Tuple { - content: Vec::new(), - }; - for _i in 0..*l { - result.content.extend(self.content.iter().cloned()); - } - Ok(Value::new(result)) - } - None => Err(ValueError::IncorrectParameterType), - } - } -} - -impl TypedIterable for Tuple { - fn to_iter<'a>(&'a self) -> Box + 'a> { - Box::new(self.content.iter().cloned()) - } - - fn to_vec(&self) -> Vec { - self.content.clone() - } -} - -impl From<()> for Value { - fn from(_a: ()) -> Value { - Value::new(Tuple::from(())) - } -} - -macro_rules! from_tuple { - ($x: ty) => { - impl From<$x> for Value { - fn from(a: $x) -> Value { - Value::new(a) - } - } - }; - ($x: ty, $y: tt) => { - impl + Clone> From<$x> for Value { - fn from(a: $x) -> Value { - Value::new($y::from(a)) - } - } - }; - ($x: ty, $y: tt, noT) => { - impl From<$x> for Value { - fn from(a: $x) -> Value { - Value::new(a as $y) - } - } - }; - ($y: tt, $($x: tt),+) => { - impl<$($x: Into + Clone),+> From<($($x),+)> for Value { - fn from(a: ($($x),+)) -> Value { - Value::new($y::from(a)) - } - } - - }; -} - -from_tuple!((T,), Tuple); -from_tuple!(Tuple, T1, T2); -from_tuple!(Tuple, T1, T2, T3); -from_tuple!(Tuple, T1, T2, T3, T4); -from_tuple!(Tuple, T1, T2, T3, T4, T5); -from_tuple!(Tuple, T1, T2, T3, T4, T5, T6); -from_tuple!(Tuple, T1, T2, T3, T4, T5, T6, T7); -from_tuple!(Tuple, T1, T2, T3, T4, T5, T6, T7, T8); -from_tuple!(Tuple, T1, T2, T3, T4, T5, T6, T7, T8, T9); -from_tuple!(Tuple, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10); - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_to_str() { - assert_eq!("(1, 2, 3)", Value::from((1, 2, 3)).to_str()); - assert_eq!("(1, (2, 3))", Value::from((1, (2, 3))).to_str()); - assert_eq!("(1,)", Value::from((1,)).to_str()); - assert_eq!("()", Value::from(()).to_str()); - } - - #[test] - fn test_arithmetic_on_tuple() { - // (1, 2, 3) + (2, 3) == (1, 2, 3, 2, 3) - assert_eq!( - Value::from((1, 2, 3)).add(Value::from((2, 3))).unwrap(), - Value::from((1, 2, 3, 2, 3)) - ); - // (1, 2, 3) * 3 == (1, 2, 3, 1, 2, 3, 1, 2, 3) - assert_eq!( - Value::from((1, 2, 3)).mul(Value::from(3)).unwrap(), - Value::from((1, 2, 3, 1, 2, 3, 1, 2, 3)) - ); - } - - #[test] - fn test_is_descendant() { - let v1 = Value::from((1, 2, 3)); - let v2 = Value::from((1, 2, v1.clone())); - let v3 = Value::from((1, 2, v2.clone())); - assert!(v3.is_descendant_value(&v2)); - assert!(v3.is_descendant_value(&v1)); - assert!(v3.is_descendant_value(&v3)); - - assert!(v2.is_descendant_value(&v1)); - assert!(v2.is_descendant_value(&v2)); - assert!(!v2.is_descendant_value(&v3)); - - assert!(v1.is_descendant_value(&v1)); - assert!(!v1.is_descendant_value(&v2)); - assert!(!v1.is_descendant_value(&v3)); - } -}