From c8e001d470c28d4d1296081310591928be8139d3 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Fri, 26 Jun 2020 20:03:26 -0700 Subject: [PATCH 1/4] Remove v1 portions of `backend/native` # Rust tests will be skipped. Delete if not intended. [ci skip-rust-tests] --- examples/3rdparty/BUILD | 7 - examples/3rdparty/python/BUILD | 40 --- examples/3rdparty/python/requirements.txt | 1 - examples/src/python/example/BUILD | 9 - .../python/example/tensorflow_custom_op/BUILD | 57 ---- .../example/tensorflow_custom_op/README.md | 17 - .../example/tensorflow_custom_op/__init__.py | 0 .../example/tensorflow_custom_op/setup.py | 11 - .../tensorflow_custom_op/show_tf_version.py | 6 - .../tensorflow_custom_op/wrap_lib/__init__.py | 0 .../wrap_lib/wrap_zero_out_op.py | 15 - .../zero_out_custom_op.py | 13 - .../tensorflow_custom_op/zero_out_op.cpp | 45 --- pants.toml | 14 - src/python/pants/backend/native/BUILD | 9 - src/python/pants/backend/native/register.py | 57 ---- src/python/pants/backend/native/rules/BUILD | 14 - .../pants/backend/native/rules/__init__.py | 0 .../subsystems/native_build_settings.py | 35 -- .../backend/native/subsystems/packaging/BUILD | 10 - .../native/subsystems/packaging/__init__.py | 0 .../native/subsystems/packaging/conan.py | 17 - .../pants/backend/native/target_types.py | 229 ------------- src/python/pants/backend/native/targets/BUILD | 18 - .../pants/backend/native/targets/__init__.py | 0 .../native/targets/external_native_library.py | 136 -------- .../backend/native/targets/native_artifact.py | 56 --- .../backend/native/targets/native_library.py | 112 ------ .../native/targets/packaged_native_library.py | 68 ---- src/python/pants/backend/native/tasks/BUILD | 25 -- .../pants/backend/native/tasks/__init__.py | 0 .../pants/backend/native/tasks/c_compile.py | 31 -- .../pants/backend/native/tasks/conan_fetch.py | 211 ------------ .../pants/backend/native/tasks/conan_prep.py | 18 - .../pants/backend/native/tasks/cpp_compile.py | 31 -- .../native/tasks/link_shared_libraries.py | 226 ------------- .../backend/native/tasks/native_compile.py | 301 ----------------- .../pants/backend/native/tasks/native_task.py | 160 --------- src/python/pants/backend/python/register.py | 4 - .../pants/backend/python/subsystems/BUILD | 1 - .../python/subsystems/python_native_code.py | 105 +----- src/python/pants/backend/python/tasks/BUILD | 2 - .../tasks/build_local_python_distributions.py | 318 ------------------ src/python/pants/option/global_options.py | 7 +- testprojects/src/python/BUILD | 10 - .../python/python_distribution/ctypes/BUILD | 48 --- .../python_distribution/ctypes/__init__.py | 0 .../ctypes/ctypes_python_pkg/__init__.py | 0 .../ctypes_python_pkg/ctypes_wrapper.py | 26 -- .../python/python_distribution/ctypes/main.py | 8 - .../python_distribution/ctypes/setup.py | 13 - .../python_distribution/ctypes/some_math.c | 3 - .../python_distribution/ctypes/some_math.h | 6 - .../ctypes/some_more_math.cpp | 5 - .../ctypes/some_more_math.hpp | 8 - .../ctypes/src-subdir/add_three.c | 3 - .../ctypes/src-subdir/add_three.h | 6 - .../python_distribution/ctypes_interop/BUILD | 23 -- .../ctypes_interop/__init__.py | 0 .../ctypes_python_pkg/__init__.py | 0 .../ctypes_python_pkg/ctypes_wrapper.py | 35 -- .../ctypes_interop/main.py | 8 - .../ctypes_interop/setup.py | 13 - .../ctypes_interop/some-math/BUILD | 1 - .../ctypes_interop/some-math/some_math.c | 3 - .../ctypes_interop/some-math/some_math.h | 6 - .../ctypes_interop/some-more-math/BUILD | 6 - .../some-more-math/some_more_math.cpp | 12 - .../some-more-math/some_more_math.hpp | 14 - .../ctypes_interop/wrapped-math/BUILD | 10 - .../wrapped-math/wrapped_math.c | 5 - .../wrapped-math/wrapped_math.h | 6 - .../ctypes_with_extra_compiler_flags/BUILD | 30 -- .../__init__.py | 0 .../ctypes_python_pkg/__init__.py | 0 .../ctypes_python_pkg/ctypes_wrapper.py | 22 -- .../ctypes_with_extra_compiler_flags/main.py | 8 - .../ctypes_with_extra_compiler_flags/setup.py | 12 - .../some_more_math.cpp | 8 - .../some_more_math.hpp | 18 - .../ctypes_with_third_party/BUILD | 46 --- .../ctypes_with_third_party/__init__.py | 0 .../ctypes_python_pkg/__init__.py | 0 .../ctypes_python_pkg/ctypes_wrapper.py | 23 -- .../ctypes_with_third_party/main.py | 8 - .../ctypes_with_third_party/setup.py | 13 - .../some_more_math.hpp | 8 - .../some_more_math_with_third_party.cpp | 65 ---- .../hello_with_install_requires/BUILD | 19 -- .../hello_package/__init__.py | 0 .../hello_package/hello.py | 10 - .../hello_with_install_requires/main.py | 9 - .../hello_with_install_requires/setup.py | 12 - .../python_distribution/setup_requires/BUILD | 20 -- .../setup_requires/__init__.py | 0 .../setup_requires/setup.py | 24 -- .../setup_requires/test_setup_requires.py | 11 - testprojects/tests/python/BUILD | 8 - .../example_test/python_distribution/BUILD | 8 - .../python_distribution/test_hello.py | 11 - 100 files changed, 2 insertions(+), 3105 deletions(-) delete mode 100644 examples/3rdparty/BUILD delete mode 100644 examples/3rdparty/python/BUILD delete mode 100644 examples/3rdparty/python/requirements.txt delete mode 100644 examples/src/python/example/tensorflow_custom_op/BUILD delete mode 100644 examples/src/python/example/tensorflow_custom_op/README.md delete mode 100644 examples/src/python/example/tensorflow_custom_op/__init__.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/setup.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/show_tf_version.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/wrap_lib/__init__.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/wrap_lib/wrap_zero_out_op.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/zero_out_custom_op.py delete mode 100644 examples/src/python/example/tensorflow_custom_op/zero_out_op.cpp delete mode 100644 src/python/pants/backend/native/rules/BUILD delete mode 100644 src/python/pants/backend/native/rules/__init__.py delete mode 100644 src/python/pants/backend/native/subsystems/native_build_settings.py delete mode 100644 src/python/pants/backend/native/subsystems/packaging/BUILD delete mode 100644 src/python/pants/backend/native/subsystems/packaging/__init__.py delete mode 100644 src/python/pants/backend/native/subsystems/packaging/conan.py delete mode 100644 src/python/pants/backend/native/target_types.py delete mode 100644 src/python/pants/backend/native/targets/BUILD delete mode 100644 src/python/pants/backend/native/targets/__init__.py delete mode 100644 src/python/pants/backend/native/targets/external_native_library.py delete mode 100644 src/python/pants/backend/native/targets/native_artifact.py delete mode 100644 src/python/pants/backend/native/targets/native_library.py delete mode 100644 src/python/pants/backend/native/targets/packaged_native_library.py delete mode 100644 src/python/pants/backend/native/tasks/BUILD delete mode 100644 src/python/pants/backend/native/tasks/__init__.py delete mode 100644 src/python/pants/backend/native/tasks/c_compile.py delete mode 100644 src/python/pants/backend/native/tasks/conan_fetch.py delete mode 100644 src/python/pants/backend/native/tasks/conan_prep.py delete mode 100644 src/python/pants/backend/native/tasks/cpp_compile.py delete mode 100644 src/python/pants/backend/native/tasks/link_shared_libraries.py delete mode 100644 src/python/pants/backend/native/tasks/native_compile.py delete mode 100644 src/python/pants/backend/native/tasks/native_task.py delete mode 100644 src/python/pants/backend/python/tasks/build_local_python_distributions.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/ctypes_wrapper.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/main.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/setup.py delete mode 100644 testprojects/src/python/python_distribution/ctypes/some_math.c delete mode 100644 testprojects/src/python/python_distribution/ctypes/some_math.h delete mode 100644 testprojects/src/python/python_distribution/ctypes/some_more_math.cpp delete mode 100644 testprojects/src/python/python_distribution/ctypes/some_more_math.hpp delete mode 100644 testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.c delete mode 100644 testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.h delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/ctypes_wrapper.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/main.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/setup.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-math/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.c delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.h delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-more-math/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.cpp delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.hpp delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.c delete mode 100644 testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.h delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/ctypes_wrapper.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/main.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/setup.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.cpp delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.hpp delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/BUILD delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/__init__.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/ctypes_wrapper.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/main.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/setup.py delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math.hpp delete mode 100644 testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math_with_third_party.cpp delete mode 100644 testprojects/src/python/python_distribution/hello_with_install_requires/BUILD delete mode 100644 testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/__init__.py delete mode 100644 testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/hello.py delete mode 100644 testprojects/src/python/python_distribution/hello_with_install_requires/main.py delete mode 100644 testprojects/src/python/python_distribution/hello_with_install_requires/setup.py delete mode 100644 testprojects/src/python/python_distribution/setup_requires/BUILD delete mode 100644 testprojects/src/python/python_distribution/setup_requires/__init__.py delete mode 100644 testprojects/src/python/python_distribution/setup_requires/setup.py delete mode 100644 testprojects/src/python/python_distribution/setup_requires/test_setup_requires.py delete mode 100644 testprojects/tests/python/example_test/python_distribution/BUILD delete mode 100644 testprojects/tests/python/example_test/python_distribution/test_hello.py diff --git a/examples/3rdparty/BUILD b/examples/3rdparty/BUILD deleted file mode 100644 index 5617a894a70..00000000000 --- a/examples/3rdparty/BUILD +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -files( - name='python_directory', - sources=['python/**/*'], -) diff --git a/examples/3rdparty/python/BUILD b/examples/3rdparty/python/BUILD deleted file mode 100644 index 960b22def3d..00000000000 --- a/examples/3rdparty/python/BUILD +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -# see/edit requirements.txt in this directory to change deps. -python_requirements() - -remote_sources( - name='tensorflow-framework', - dest=packaged_native_library, - sources_target=':tensorflow-native-libs', - args=dict( - include_relpath='include', - lib_relpath='', - native_lib_names = { - 'darwin': ['tensorflow_framework'], - # On Linux, the shared lib is literally named `libtensorflow_framework.so.1` in tensorflow - # 1.14. Adding a linker command line argument `-l:lib_tensorflow_framework.so.1` will tell the - # linker to search for and use a library literally named that filename, without any further - # trickery. - 'linux': [':libtensorflow_framework.so.1'], - }, - ), -) - -unpacked_whls( - name='tensorflow-native-libs', - libraries=[':tensorflow'], - module_name='tensorflow', - include_patterns=[ - 'include/**/*', - './*.so*', - './*.dylib*', - ], - within_data_subdir=True, -) - -files( - name='examples_python_3rdparty', - sources=['**/*'], -) diff --git a/examples/3rdparty/python/requirements.txt b/examples/3rdparty/python/requirements.txt deleted file mode 100644 index 5037b61d34f..00000000000 --- a/examples/3rdparty/python/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -tensorflow==1.14.0 diff --git a/examples/src/python/example/BUILD b/examples/src/python/example/BUILD index 580fa6dd947..b00e967c1c5 100644 --- a/examples/src/python/example/BUILD +++ b/examples/src/python/example/BUILD @@ -6,7 +6,6 @@ target( dependencies = [ ':hello_directory', ':pants_publish_plugin_directory', - ':tensorflow_custom_op_directory', ], ) @@ -19,11 +18,3 @@ files( name='pants_publish_plugin_directory', sources=['pants_publish_plugin/**/*'], ) - -files( - name='tensorflow_custom_op_directory', - sources=['tensorflow_custom_op/**/*'], - dependencies = [ - 'examples/3rdparty:python_directory', - ], -) diff --git a/examples/src/python/example/tensorflow_custom_op/BUILD b/examples/src/python/example/tensorflow_custom_op/BUILD deleted file mode 100644 index 6554acc3c75..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/BUILD +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -# TODO(#7059): add a page() for the README once it is easier to extend the docsite! - -# See https://www.tensorflow.org/guide/extend/op! - -python_library( - sources=['zero_out_custom_op.py'], - dependencies=[ - 'examples/3rdparty/python:tensorflow', - ':tensorflow-zero-out-op-wrapper', - ], -) - -# TODO: allow merging of this and a python_library() somehow! If we could make dependencies (such as -# tensorflow) into install_requires() with the SetupPy task somehow, this would be easy! -python_dist( - name='tensorflow-zero-out-op-wrapper', - sources=[ - 'setup.py', - '__init__.py', - # TODO: we shouldn't have to introduce this second-level package, but I can't figure out how to - # use wrap_zero_out_op.py at the top level! - 'wrap_lib/wrap_zero_out_op.py', - 'wrap_lib/__init__.py', - ], - dependencies=[ - ':tensorflow-zero-out-op', - ], -) - -ctypes_compatible_cpp_library( - name='tensorflow-zero-out-op', - dependencies=[ - 'examples/3rdparty/python:tensorflow-framework', - ], - ctypes_native_library=native_artifact(lib_name='tensorflow-zero-out-operator'), -) - - -python_binary( - name='show-tf-version', - sources=['show_tf_version.py'], - dependencies=[ - 'examples/3rdparty/python:tensorflow', - ], - compatibility=['CPython>=3.6,<4'], -) - -files( - name='show-tf-version-files', - sources=['**/*'], - dependencies=[ - 'examples/3rdparty/python:examples_python_3rdparty', - ], -) diff --git a/examples/src/python/example/tensorflow_custom_op/README.md b/examples/src/python/example/tensorflow_custom_op/README.md deleted file mode 100644 index 9aa3d06cb65..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/README.md +++ /dev/null @@ -1,17 +0,0 @@ -tensorflow_custom_op -==================== - -This directory implements the `ZeroOut` custom TensorFlow operator described in [Adding a New Op](https://www.tensorflow.org/guide/extend/op) in the TensorFlow docs. This can be built and tested with (see the [Pants Python README](https://www.pantsbuild.org/python_readme.html)): - -``` bash -> ./pants test examples/tests/python/example_test/tensorflow_custom_op:: -- -vs -``` - - -Note that due to a current limitation (see [#6848](https://github.com/pantsbuild/pants/issues/6848)), this can only be run with the LLVM toolchain on OSX, which can be done with: - -``` bash -./pants --native-build-step-toolchain-variant=llvm test examples/tests/python/example_test/tensorflow_custom_op:: -- -vs -``` - -or by setting the toolchain variant [option in `pants.toml`](https://www.pantsbuild.org/options.html). diff --git a/examples/src/python/example/tensorflow_custom_op/__init__.py b/examples/src/python/example/tensorflow_custom_op/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/examples/src/python/example/tensorflow_custom_op/setup.py b/examples/src/python/example/tensorflow_custom_op/setup.py deleted file mode 100644 index aecaa6625b4..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/setup.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - -setup( - name="tensorflow_custom_op", - version="0.0.1", - packages=find_packages(), - data_files=[("", ["libtensorflow-zero-out-operator.so"])], -) diff --git a/examples/src/python/example/tensorflow_custom_op/show_tf_version.py b/examples/src/python/example/tensorflow_custom_op/show_tf_version.py deleted file mode 100644 index c6dc2686ff2..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/show_tf_version.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import tensorflow as tf - -print(f"tf version: {tf.__version__}") diff --git a/examples/src/python/example/tensorflow_custom_op/wrap_lib/__init__.py b/examples/src/python/example/tensorflow_custom_op/wrap_lib/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/examples/src/python/example/tensorflow_custom_op/wrap_lib/wrap_zero_out_op.py b/examples/src/python/example/tensorflow_custom_op/wrap_lib/wrap_zero_out_op.py deleted file mode 100644 index 74d470a8cf9..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/wrap_lib/wrap_zero_out_op.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import os - - -def _get_generated_shared_lib(lib_name): - # These are the same filenames as in setup.py. - filename = "lib{}.so".format(lib_name) - # The data files are in the root directory. - rel_path = os.path.join(os.path.dirname(__file__), "..", filename) - return os.path.normpath(rel_path) - - -zero_out_op_lib_path = _get_generated_shared_lib("tensorflow-zero-out-operator") diff --git a/examples/src/python/example/tensorflow_custom_op/zero_out_custom_op.py b/examples/src/python/example/tensorflow_custom_op/zero_out_custom_op.py deleted file mode 100644 index a920077eebe..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/zero_out_custom_op.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -# TODO: It would be great if we could maintain the example.tensorflow_custom_op package prefix for -# this python_dist()! - -import tensorflow as tf -from wrap_lib.wrap_zero_out_op import zero_out_op_lib_path - - -# We make this a function in order to lazily load the op library. -def zero_out_module(): - return tf.load_op_library(zero_out_op_lib_path) diff --git a/examples/src/python/example/tensorflow_custom_op/zero_out_op.cpp b/examples/src/python/example/tensorflow_custom_op/zero_out_op.cpp deleted file mode 100644 index 61366d9fea6..00000000000 --- a/examples/src/python/example/tensorflow_custom_op/zero_out_op.cpp +++ /dev/null @@ -1,45 +0,0 @@ -// This code is from the guide in https://www.tensorflow.org/guide/extend/op. - -#include "tensorflow/core/framework/op.h" -#include "tensorflow/core/framework/op_kernel.h" -#include "tensorflow/core/framework/shape_inference.h" - -using namespace tensorflow; - -/* Describe the kernel. */ -class ZeroOutOp : public OpKernel { -public: - explicit ZeroOutOp(OpKernelConstruction *context) : OpKernel(context) {} - - void Compute(OpKernelContext *context) override { - // Grab the input tensor - const Tensor &input_tensor = context->input(0); - auto input = input_tensor.flat(); - - // Create an output tensor - Tensor *output_tensor = NULL; - OP_REQUIRES_OK(context, context->allocate_output(0, input_tensor.shape(), - &output_tensor)); - auto output_flat = output_tensor->flat(); - - // Set all but the first element of the output tensor to 0. - const int N = input.size(); - for (int i = 1; i < N; i++) { - output_flat(i) = 0; - } - - // Preserve the first input value if possible. - if (N > 0) - output_flat(0) = input(0); - } -}; - -REGISTER_OP("ZeroOut") - .Input("to_zero: int32") - .Output("zeroed: int32") - .SetShapeFn([](::tensorflow::shape_inference::InferenceContext *c) { - c->set_output(0, c->input(0)); - return Status::OK(); - }); - -REGISTER_KERNEL_BUILDER(Name("ZeroOut").Device(DEVICE_CPU), ZeroOutOp); diff --git a/pants.toml b/pants.toml index 71358bad611..ea999b13fd4 100644 --- a/pants.toml +++ b/pants.toml @@ -163,19 +163,5 @@ timeout_default = 60 [test.pytest] timeout_default = 60 -[native-build-step.cpp-compile-settings] -# TensorFlow 1.13 on python 3.7 specifically uses a newer C++ ABI than any other TensorFlow release, -# so in this repo's CI we override this option to avoid using the previous ABI for our python 3.7 -# testing by setting PANTS_NATIVE_BUILD_STEP_CPP_COMPILE_SETTINGS_DEFAULT_COMPILER_OPTION_SETS="[]" -# in the environment. -default_compiler_option_sets = ['glibcxx_use_old_abi'] -# TensorFlow custom operators cannot be built against the C++11 ABI yet: see -# https://www.tensorflow.org/guide/extend/op. -compiler_option_sets_enabled_args = "{'glibcxx_use_old_abi': ['-D_GLIBCXX_USE_CXX11_ABI=0']}" -compiler_option_sets_disabled_args = "{'glibcxx_use_old_abi': ['-D_GLIBCXX_USE_CXX11_ABI=1']}" - -[libc] -enable_libc_search = true - [sourcefile-validation] config = "@build-support/regexes/config.yaml" diff --git a/src/python/pants/backend/native/BUILD b/src/python/pants/backend/native/BUILD index a5a111286e1..8b83fa58dea 100644 --- a/src/python/pants/backend/native/BUILD +++ b/src/python/pants/backend/native/BUILD @@ -1,4 +1,3 @@ -# coding=utf-8 # Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). @@ -7,14 +6,6 @@ python_library( 'src/python/pants/backend/native/config', 'src/python/pants/backend/native/subsystems', 'src/python/pants/backend/native/subsystems/binaries', - 'src/python/pants/backend/native/targets', - 'src/python/pants/backend/native/tasks', - 'src/python/pants/build_graph', - 'src/python/pants/engine:addresses', - 'src/python/pants/engine:target', - 'src/python/pants/goal:task_registrar', - 'src/python/pants/util:collections', - 'src/python/pants/util:frozendict', ], tags = {"partially_type_checked"}, ) diff --git a/src/python/pants/backend/native/register.py b/src/python/pants/backend/native/register.py index 47158db418f..52f5a604d84 100644 --- a/src/python/pants/backend/native/register.py +++ b/src/python/pants/backend/native/register.py @@ -6,61 +6,8 @@ from pants.backend.native.subsystems.binaries.binutils import create_binutils_rules from pants.backend.native.subsystems.binaries.gcc import create_gcc_rules from pants.backend.native.subsystems.binaries.llvm import create_llvm_rules -from pants.backend.native.subsystems.native_build_settings import NativeBuildSettings from pants.backend.native.subsystems.native_toolchain import create_native_toolchain_rules from pants.backend.native.subsystems.xcode_cli_tools import create_xcode_cli_tools_rules -from pants.backend.native.target_types import ( - CLibrary, - ConanNativeLibrary, - CppLibrary, - PackagedNativeLibrary, -) -from pants.backend.native.targets.external_native_library import ConanRequirement -from pants.backend.native.targets.external_native_library import ( - ExternalNativeLibrary as ExternalNativeLibraryV1, -) -from pants.backend.native.targets.native_artifact import NativeArtifact -from pants.backend.native.targets.native_library import CLibrary as CLibraryV1 -from pants.backend.native.targets.native_library import CppLibrary as CppLibraryV1 -from pants.backend.native.targets.packaged_native_library import ( - PackagedNativeLibrary as PackagedNativeLibraryV1, -) -from pants.backend.native.tasks.c_compile import CCompile -from pants.backend.native.tasks.conan_fetch import ConanFetch -from pants.backend.native.tasks.conan_prep import ConanPrep -from pants.backend.native.tasks.cpp_compile import CppCompile -from pants.backend.native.tasks.link_shared_libraries import LinkSharedLibraries -from pants.build_graph.build_file_aliases import BuildFileAliases -from pants.goal.task_registrar import TaskRegistrar as task - - -def build_file_aliases(): - return BuildFileAliases( - targets={ - CLibraryV1.alias(): CLibraryV1, - CppLibraryV1.alias(): CppLibraryV1, - ExternalNativeLibraryV1.alias(): ExternalNativeLibraryV1, - PackagedNativeLibraryV1.alias(): PackagedNativeLibraryV1, - }, - objects={ - ConanRequirement.alias(): ConanRequirement, - NativeArtifact.alias(): NativeArtifact, - }, - ) - - -def global_subsystems(): - return {NativeBuildSettings} - - -def register_goals(): - # TODO(#5962): register these under the 'compile' goal when we eliminate the product transitive - # dependency from export -> compile. - task(name="conan-prep", action=ConanPrep).install("native-compile") - task(name="conan-fetch", action=ConanFetch).install("native-compile") - task(name="c-for-ctypes", action=CCompile).install("native-compile") - task(name="cpp-for-ctypes", action=CppCompile).install("native-compile") - task(name="shared-libraries", action=LinkSharedLibraries).install("link") def rules(): @@ -71,7 +18,3 @@ def rules(): *create_gcc_rules(), *create_llvm_rules(), ) - - -def target_types(): - return [CLibrary, CppLibrary, ConanNativeLibrary, PackagedNativeLibrary] diff --git a/src/python/pants/backend/native/rules/BUILD b/src/python/pants/backend/native/rules/BUILD deleted file mode 100644 index 92ecb7b882a..00000000000 --- a/src/python/pants/backend/native/rules/BUILD +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_library( - dependencies=[ - 'src/python/pants/backend/native/subsystems', - 'src/python/pants/backend/native/targets', - 'src/python/pants/engine:addresses', - 'src/python/pants/engine:target', - 'src/python/pants/util:collections', - 'src/python/pants/util:frozendict', - ], - tags = {"type_checked"}, -) diff --git a/src/python/pants/backend/native/rules/__init__.py b/src/python/pants/backend/native/rules/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/python/pants/backend/native/subsystems/native_build_settings.py b/src/python/pants/backend/native/subsystems/native_build_settings.py deleted file mode 100644 index 6611cc4719d..00000000000 --- a/src/python/pants/backend/native/subsystems/native_build_settings.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pants.build_graph.mirrored_target_option_mixin import MirroredTargetOptionMixin -from pants.subsystem.subsystem import Subsystem - - -class NativeBuildSettings(Subsystem, MirroredTargetOptionMixin): - """Settings which affect both the compile and link phases.""" - - options_scope = "native-build-settings" - - mirrored_target_option_actions = { - "strict_deps": lambda tgt: tgt.strict_deps, - } - - @classmethod - def register_options(cls, register): - super().register_options(register) - - # TODO: rename this so it's clear it is not the same option as JVM strict deps! - register( - "--strict-deps", - type=bool, - default=True, - fingerprint=True, - advanced=True, - help="Whether to include only dependencies directly declared in the BUILD file " - "for C and C++ targets by default. If this is False, all transitive dependencies " - "are used when compiling and linking native code. C and C++ targets may override " - "this behavior with the strict_deps keyword argument as well.", - ) - - def get_strict_deps_value_for_target(self, target): - return self.get_scalar_mirrored_target_option("strict_deps", target) diff --git a/src/python/pants/backend/native/subsystems/packaging/BUILD b/src/python/pants/backend/native/subsystems/packaging/BUILD deleted file mode 100644 index 54a10f3a503..00000000000 --- a/src/python/pants/backend/native/subsystems/packaging/BUILD +++ /dev/null @@ -1,10 +0,0 @@ -# coding=utf-8 -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_library( - dependencies=[ - 'src/python/pants/backend/python/subsystems', - ], - tags = {"partially_type_checked"}, -) diff --git a/src/python/pants/backend/native/subsystems/packaging/__init__.py b/src/python/pants/backend/native/subsystems/packaging/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/python/pants/backend/native/subsystems/packaging/conan.py b/src/python/pants/backend/native/subsystems/packaging/conan.py deleted file mode 100644 index 074486f010e..00000000000 --- a/src/python/pants/backend/native/subsystems/packaging/conan.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import logging - -from pants.backend.python.subsystems.python_tool_base import PythonToolBase - -logger = logging.getLogger(__name__) - - -class Conan(PythonToolBase): - options_scope = "conan" - default_version = "conan==1.19.2" - # NB: Only versions of pylint below `2.0.0` support use in python 2. - default_extra_requirements = ["pylint==1.9.3"] - default_entry_point = "conans.conan" - default_interpreter_constraints = ["CPython>=2.7"] diff --git a/src/python/pants/backend/native/target_types.py b/src/python/pants/backend/native/target_types.py deleted file mode 100644 index 6906c9cfc75..00000000000 --- a/src/python/pants/backend/native/target_types.py +++ /dev/null @@ -1,229 +0,0 @@ -# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from typing import Dict, Iterable, Optional, Tuple, Union, cast - -from pants.backend.native.subsystems.native_build_step import ToolchainVariant -from pants.backend.native.targets.external_native_library import ConanRequirement -from pants.backend.native.targets.native_artifact import NativeArtifact -from pants.engine.addresses import Address -from pants.engine.target import ( - COMMON_TARGET_FIELDS, - BoolField, - Dependencies, - InvalidFieldTypeException, - PrimitiveField, - ScalarField, - SequenceField, - Sources, - StringField, - StringSequenceField, - Target, -) -from pants.util.collections import ensure_str_list -from pants.util.frozendict import FrozenDict - -# ----------------------------------------------------------------------------------------------- -# `ctypes_compatible_c_library` and `ctypes_compatible_cpp_library` targets -# ----------------------------------------------------------------------------------------------- - - -class CSources(Sources): - default = ("*.h", "*.c") - - -class CppSources(Sources): - # TODO: Add support for all the different C++ file extensions, including `.cc` and `.cxx`. See - # https://stackoverflow.com/a/1546107. - default = ("*.h", "*.hpp", "*.cpp") - - -class CtypesNativeLibrary(ScalarField): - alias = "ctypes_native_library" - expected_type = NativeArtifact - expected_type_description = "a `native_artifact` object" - value: Optional[NativeArtifact] - - @classmethod - def compute_value( - cls, raw_value: Optional[NativeArtifact], *, address: Address - ) -> Optional[NativeArtifact]: - return super().compute_value(raw_value, address=address) - - -class NativeFatalWarnings(BoolField): - alias = "fatal_warnings" - default = False - - -# NB: This is very similar to the JvmStrictDeps field in `backend/jvm`. Consider using the same -# field for both purposes. -class NativeStrictDeps(BoolField): - """Whether to include only dependencies directly declared in the BUILD file. - - If this is False, all transitive dependencies are used when compiling and linking native code. - """ - - alias = "strict_deps" - default = False - - -class ToolchainVariantField(StringField): - """Whether to use gcc (gnu) or clang (llvm) to compile. - - Note that currently, despite the choice of toolchain, all linking is done with binutils ld on - Linux, and the XCode CLI Tools on MacOS. - """ - - alias = "toolchain_variant" - valid_choices = ToolchainVariant - - -class NativeCompilerOptionSets(StringSequenceField): - alias = "compiler_option_sets" - - -NATIVE_LIBRARY_COMMON_FIELDS = ( - *COMMON_TARGET_FIELDS, - Dependencies, - CtypesNativeLibrary, - NativeStrictDeps, - NativeFatalWarnings, - ToolchainVariantField, - NativeCompilerOptionSets, -) - - -class CLibrary(Target): - """A C library that is compatible with Python's ctypes.""" - - alias = "ctypes_compatible_c_library" - core_fields = (*NATIVE_LIBRARY_COMMON_FIELDS, CSources) - v1_only = True - - -class CppLibrary(Target): - """A C++ library that is compatible with Python's ctypes.""" - - alias = "ctypes_compatible_cpp_library" - core_fields = (*NATIVE_LIBRARY_COMMON_FIELDS, CppSources) - v1_only = True - - -# ----------------------------------------------------------------------------------------------- -# `external_native_library` target -# ----------------------------------------------------------------------------------------------- - - -class ConanPackages(SequenceField): - """The `ConanRequirement`s to resolve into a `packaged_native_library()` target.""" - - alias = "packages" - expected_element_type = ConanRequirement - expected_type_description = "an iterable of `conan_requirement` objects (e.g. a list)" - value: Tuple[ConanRequirement, ...] - required = True - - @classmethod - def compute_value( - cls, raw_value: Optional[Iterable[ConanRequirement]], *, address: Address - ) -> Tuple[ConanRequirement, ...]: - return cast(Tuple[ConanRequirement, ...], super().compute_value(raw_value, address=address)) - - -class ConanNativeLibrary(Target): - """A set of Conan package strings to be passed to the Conan package manager.""" - - alias = "external_native_library" - core_fields = (*COMMON_TARGET_FIELDS, ConanPackages) - v1_only = True - - -# ----------------------------------------------------------------------------------------------- -# `packaged_native_library` target -# ----------------------------------------------------------------------------------------------- - - -class NativeIncludeRelpath(StringField): - """The path where C/C++ headers are located, relative to this target's directory. - - Libraries depending on this target will be able to #include files relative to this directory. - """ - - alias = "include_relpath" - - -class NativeLibRelpath(StringField): - """The path where native libraries are located, relative to this target's directory.""" - - alias = "lib_relpath" - - -class NativeLibNames(PrimitiveField): - """Libraries to add to the linker command line. - - These libraries become `-l` arguments, so they must exist and be named - `lib.so` (or `lib.dylib` depending on the platform) or the linker will exit with - an error. - - This field may also be a dict mapping the OS name ('darwin' or 'linux') to a list of - such strings. - """ - - alias = "native_lib_names" - value: Optional[Union[Tuple[str, ...], FrozenDict[str, Tuple[str, ...]]]] - default = None - - @classmethod - def compute_value( - cls, - raw_value: Optional[Union[Iterable[str], Dict[str, Iterable[str]]]], - *, - address: Address, - ) -> Optional[Union[Tuple[str, ...], FrozenDict[str, Tuple[str, ...]]]]: - value_or_default = super().compute_value(raw_value, address=address) - invalid_field_type_exception = InvalidFieldTypeException( - address, - cls.alias, - value_or_default, - expected_type=( - "either an iterable of strings or a dictionary of platforms to iterable of " - "strings" - ), - ) - if isinstance(value_or_default, dict): - try: - return FrozenDict( - { - platform: tuple(sorted(ensure_str_list(lib_names))) - for platform, lib_names in value_or_default.items() - } - ) - except ValueError: - raise invalid_field_type_exception - try: - ensure_str_list(value_or_default) - except ValueError: - raise invalid_field_type_exception - return tuple(sorted(value_or_default)) - - -class PackagedNativeLibrary(Target): - """A container for headers and libraries from external sources. - - This target type is intended to be generated by a codegen task to wrap various sources of C/C++ - packages in a homogeneous container. It can also be used to wrap native libraries which are - checked into the repository -- the `sources` argument does not allow files outside of the - buildroot. - """ - - alias = "packaged_native_library" - core_fields = ( - *COMMON_TARGET_FIELDS, - Dependencies, - Sources, - NativeIncludeRelpath, - NativeLibRelpath, - NativeLibNames, - ) - v1_only = True diff --git a/src/python/pants/backend/native/targets/BUILD b/src/python/pants/backend/native/targets/BUILD deleted file mode 100644 index 78e485f6592..00000000000 --- a/src/python/pants/backend/native/targets/BUILD +++ /dev/null @@ -1,18 +0,0 @@ -# coding=utf-8 -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_library( - dependencies=[ - '3rdparty/python:dataclasses', - 'src/python/pants/base:build_environment', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:payload', - 'src/python/pants/base:payload_field', - 'src/python/pants/build_graph', - 'src/python/pants/engine:platform', - 'src/python/pants/util:enums', - 'src/python/pants/util:memo', - ], - tags = {"partially_type_checked"}, -) diff --git a/src/python/pants/backend/native/targets/__init__.py b/src/python/pants/backend/native/targets/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/python/pants/backend/native/targets/external_native_library.py b/src/python/pants/backend/native/targets/external_native_library.py deleted file mode 100644 index 12fb622ab1a..00000000000 --- a/src/python/pants/backend/native/targets/external_native_library.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import re -from dataclasses import dataclass -from typing import Optional, Sequence, Tuple - -from pants.base.hash_utils import stable_json_sha1 -from pants.base.payload import Payload -from pants.base.payload_field import PayloadField -from pants.base.validation import assert_list -from pants.build_graph.target import Target -from pants.util.memo import memoized_property -from pants.util.meta import frozen_after_init - - -# TODO: generalize this to a DatatypeSetField subclass in payload_field.py! -class ConanRequirementSetField(tuple, PayloadField): - def _compute_fingerprint(self): - return stable_json_sha1(tuple(hash(req) for req in self)) - - -@frozen_after_init -@dataclass(unsafe_hash=True) -class ConanRequirement: - """A specification for a conan package to be resolved against a remote repository. - - Example `pkg_spec`: 'lzo/2.10@twitter/stable' - - The include and lib dirs default to 'include/' and 'lib/', but as this is a convention, they can - be overridden for the specific package to be resolved. See - https://docs.conan.io/en/latest/using_packages/conanfile_txt.html#imports for more info. - """ - - pkg_spec: str - include_relpath: str - lib_relpath: str - lib_names: Tuple[str, ...] - - def __init__( - self, - pkg_spec: str, - include_relpath: Optional[str] = None, - lib_relpath: Optional[str] = None, - lib_names: Optional[Sequence[str]] = None, - ) -> None: - """ - :param pkg_spec: A string specifying a conan package at a specific version, as per - https://docs.conan.io/en/latest/using_packages/conanfile_txt.html#requires - :param include_relpath: The relative path from the package root directory to where C/C++ - headers are located. - :param lib_relpath: The relative path from the package root directory to where native - libraries are located. - :param lib_names: Strings containing the libraries to add to the linker command - line. Collected into the `native_lib_names` field of a - `packaged_native_library()` target. - """ - self.pkg_spec = pkg_spec - self.include_relpath = include_relpath or "include" - self.lib_relpath = lib_relpath or "lib" - self.lib_names = tuple(lib_names or ()) - - @classmethod - def alias(cls): - return "conan_requirement" - - def parse_conan_stdout_for_pkg_sha(self, stdout): - # TODO(#6168): Add a JSON output mode in upstream Conan instead of parsing this. - pkg_spec_pattern = re.compile(r"{}:([^\s]+)".format(re.escape(self.pkg_spec))) - return pkg_spec_pattern.search(stdout).group(1) - - @memoized_property - def directory_path(self): - """A helper method for converting Conan to package specifications to the data directory path - that Conan creates for each package. - - Example package specification: - "my_library/1.0.0@pants/stable" - Example of the directory path that Conan downloads package data for this package to: - "my_library/1.0.0/pants/stable" - - For more info on Conan package specifications, see: - https://docs.conan.io/en/latest/introduction.html - """ - return self.pkg_spec.replace("@", "/") - - -class ExternalNativeLibrary(Target): - """A set of Conan package strings to be passed to the Conan package manager.""" - - @classmethod - def alias(cls): - return "external_native_library" - - class _DeprecatedStringPackage(Exception): - pass - - def __init__(self, payload=None, packages=None, **kwargs): - """ - :param list packages: the `ConanRequirement`s to resolve into a `packaged_native_library()` target. - """ - payload = payload or Payload() - - assert_list( - packages, - key_arg="packages", - expected_type=ConanRequirement, - raise_type=self._DeprecatedStringPackage, - ) - - payload.add_fields({"packages": ConanRequirementSetField(packages)}) - super().__init__(payload=payload, **kwargs) - - @property - def packages(self): - return self.payload.packages - - # NB: These are always going to be include/ and lib/ as we populate the constituent requirements - # there in `ConanFetch`, and we need to add these to the copied attributes for - # generated targets in ._copy_target_attributes. These need to have the same names as in - # `packaged_native_library()` so that the methods in the `SimpleCodegenTask` superclass can copy - # the attributes over. - @property - def include_relpath(self): - return "include" - - @property - def lib_relpath(self): - return "lib" - - @property - def native_lib_names(self): - lib_names = [] - for req in self.payload.packages: - lib_names.extend(req.lib_names) - return lib_names diff --git a/src/python/pants/backend/native/targets/native_artifact.py b/src/python/pants/backend/native/targets/native_artifact.py deleted file mode 100644 index 99f9f2cf1b0..00000000000 --- a/src/python/pants/backend/native/targets/native_artifact.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from dataclasses import FrozenInstanceError, dataclass -from hashlib import sha1 -from typing import Any - -from pants.base.payload_field import PayloadField -from pants.engine.platform import Platform -from pants.util.enums import match - - -# NB: We manually implement __hash__(), rather than using unsafe_hash=True, because PayloadField -# will mutate _fingerprint_memo and we must ensure that mutation does not affect the hash. For -# extra safety, we override __setattr__() to ensure that the hash can never be accidentally changed. -@dataclass -class NativeArtifact(PayloadField): - """A BUILD file object declaring a target can be exported to other languages with a native - ABI.""" - - lib_name: str - - def __init__(self, lib_name: str) -> None: - self.lib_name = lib_name - self._is_frozen = True - - def __hash__(self) -> int: - return hash(self.lib_name) - - def __setattr__(self, key: str, value: Any) -> None: - if hasattr(self, "_is_frozen") and key == "lib_name": - raise FrozenInstanceError( - f"Attempting to modify the attribute {key} with value {value} on {self}." - ) - super().__setattr__(key, value) - - # TODO: This should probably be made into an @classproperty (see PR #5901). - @classmethod - def alias(cls): - return "native_artifact" - - def as_shared_lib(self, platform): - # TODO: check that the name conforms to some format in the constructor (e.g. no dots?). - return match( - platform, - { - Platform.darwin: f"lib{self.lib_name}.dylib", - Platform.linux: f"lib{self.lib_name}.so", - }, - ) - - def _compute_fingerprint(self): - # TODO: This fingerprint computation boilerplate is error-prone and could probably be - # streamlined, for simple payload fields. - hasher = sha1(self.lib_name.encode()) - return hasher.hexdigest() diff --git a/src/python/pants/backend/native/targets/native_library.py b/src/python/pants/backend/native/targets/native_library.py deleted file mode 100644 index ad39c1039fe..00000000000 --- a/src/python/pants/backend/native/targets/native_library.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from abc import ABCMeta - -from pants.backend.native.subsystems.native_build_step import ToolchainVariant -from pants.backend.native.targets.native_artifact import NativeArtifact -from pants.base.exceptions import TargetDefinitionException -from pants.base.payload import Payload -from pants.base.payload_field import PrimitiveField, PrimitivesSetField -from pants.build_graph.target import Target - - -class NativeLibrary(Target, metaclass=ABCMeta): - """A class wrapping targets containing sources for C-family languages and related code.""" - - # TODO: replace this awkward classmethod with a mixin! - @classmethod - def produces_ctypes_native_library(cls, target): - return isinstance(target, cls) and bool(target.ctypes_native_library) - - def __init__( - self, - address, - payload=None, - sources=None, - ctypes_native_library=None, - strict_deps=None, - fatal_warnings=None, - compiler_option_sets=None, - toolchain_variant=None, - **kwargs - ): - - if not payload: - payload = Payload() - sources_field = self.create_sources_field(sources, address.spec_path, key_arg="sources") - payload.add_fields( - { - "sources": sources_field, - "ctypes_native_library": ctypes_native_library, - "strict_deps": PrimitiveField(strict_deps), - "fatal_warnings": PrimitiveField(fatal_warnings), - "compiler_option_sets": PrimitivesSetField(compiler_option_sets), - "toolchain_variant": PrimitiveField(toolchain_variant), - } - ) - - if ctypes_native_library and not isinstance(ctypes_native_library, NativeArtifact): - raise TargetDefinitionException( - "Target must provide a valid pants '{}' object. Received an object with type '{}' " - "and value: {}.".format( - NativeArtifact.alias(), - type(ctypes_native_library).__name__, - ctypes_native_library, - ) - ) - - super().__init__(address=address, payload=payload, **kwargs) - - @property - def toolchain_variant(self): - if not self.payload.toolchain_variant: - return None - - return ToolchainVariant(self.payload.toolchain_variant) - - @property - def strict_deps(self): - return self.payload.strict_deps - - @property - def fatal_warnings(self): - return self.payload.fatal_warnings - - @property - def compiler_option_sets(self): - """For every element in this list, enable the corresponding flags on compilation of targets. - - :return: See constructor. - :rtype: list - """ - return self.payload.compiler_option_sets - - @property - def ctypes_native_library(self): - return self.payload.ctypes_native_library - - -class CLibrary(NativeLibrary): - - default_sources_globs = [ - "*.h", - "*.c", - ] - - @classmethod - def alias(cls): - return "ctypes_compatible_c_library" - - -class CppLibrary(NativeLibrary): - - default_sources_globs = [ - "*.h", - "*.hpp", - "*.cpp", - ] - - @classmethod - def alias(cls): - return "ctypes_compatible_cpp_library" diff --git a/src/python/pants/backend/native/targets/packaged_native_library.py b/src/python/pants/backend/native/targets/packaged_native_library.py deleted file mode 100644 index 489b09211c3..00000000000 --- a/src/python/pants/backend/native/targets/packaged_native_library.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pants.base.payload import Payload -from pants.base.payload_field import PrimitiveField -from pants.build_graph.target import Target - - -class PackagedNativeLibrary(Target): - """A container for headers and libraries from external sources. - - This target type is intended to be generated by a codegen task to wrap various sources of C/C++ - packages in a homogenous container. It can also be used to wrap native libraries which are - checked into the repository -- the `sources` argument does not allow files outside of the - buildroot. - """ - - @classmethod - def alias(cls): - return "packaged_native_library" - - def __init__( - self, - address, - payload=None, - sources=None, - include_relpath=None, - lib_relpath=None, - native_lib_names=None, - **kwargs - ): - """ - :param sources: Files owned by this target. - :param str include_relpath: The path where C/C++ headers are located, relative to this target's - directory. Libraries depending on this target will be able to - #include files relative to this directory. - :param str lib_relpath: The path where native libraries are located, relative to this target's - directory. - :param list native_lib_names: Strings containing the libraries to add to the linker command - line. These libraries become `-l` arguments, so they must - exist and be named `lib.so` (or `lib.dylib` depending - on the platform) or the linker will exit with an error. This field - may also be a dict mapping the OS name ('darwin' or 'linux') to a - list of such strings. - """ - if not payload: - payload = Payload() - payload.add_fields( - { - "sources": self.create_sources_field(sources, address.spec_path, key_arg="sources"), - "include_relpath": PrimitiveField(include_relpath), - "lib_relpath": PrimitiveField(lib_relpath), - "native_lib_names": PrimitiveField(native_lib_names), - } - ) - super().__init__(address=address, payload=payload, **kwargs) - - @property - def include_relpath(self): - return self.payload.include_relpath - - @property - def lib_relpath(self): - return self.payload.lib_relpath - - @property - def native_lib_names(self): - return self.payload.native_lib_names diff --git a/src/python/pants/backend/native/tasks/BUILD b/src/python/pants/backend/native/tasks/BUILD deleted file mode 100644 index 4c05357e708..00000000000 --- a/src/python/pants/backend/native/tasks/BUILD +++ /dev/null @@ -1,25 +0,0 @@ -# coding=utf-8 -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_library( - dependencies=[ - '3rdparty/python:dataclasses', - 'src/python/pants/backend/native/config', - 'src/python/pants/backend/native/subsystems', - 'src/python/pants/backend/native/subsystems/packaging', - 'src/python/pants/backend/native/targets', - 'src/python/pants/base:build_environment', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:workunit', - 'src/python/pants/build_graph', - 'src/python/pants/task', - 'src/python/pants/util:collections', - 'src/python/pants/util:contextutil', - 'src/python/pants/util:enums', - 'src/python/pants/util:memo', - 'src/python/pants/util:objects', - 'src/python/pants/util:strutil', - ], - tags = {"partially_type_checked"}, -) diff --git a/src/python/pants/backend/native/tasks/__init__.py b/src/python/pants/backend/native/tasks/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/python/pants/backend/native/tasks/c_compile.py b/src/python/pants/backend/native/tasks/c_compile.py deleted file mode 100644 index 89ee32eb5bf..00000000000 --- a/src/python/pants/backend/native/tasks/c_compile.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pants.backend.native.subsystems.native_build_step import CCompileSettings -from pants.backend.native.targets.native_library import CLibrary -from pants.backend.native.tasks.native_compile import NativeCompile -from pants.util.objects import SubclassesOf - - -class CCompile(NativeCompile): - - options_scope = "c-compile" - - # Compile only C library targets. - source_target_constraint = SubclassesOf(CLibrary) - - workunit_label = "c-compile" - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("CCompile", 0)] - - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + (CCompileSettings.scoped(cls),) - - def get_compile_settings(self): - return CCompileSettings.scoped_instance(self) - - def get_compiler(self, native_library_target): - return self.get_c_toolchain_variant(native_library_target).c_compiler diff --git a/src/python/pants/backend/native/tasks/conan_fetch.py b/src/python/pants/backend/native/tasks/conan_fetch.py deleted file mode 100644 index e9d80339d76..00000000000 --- a/src/python/pants/backend/native/tasks/conan_fetch.py +++ /dev/null @@ -1,211 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import functools -import os -import re - -from pants.backend.native.targets.external_native_library import ExternalNativeLibrary -from pants.backend.native.targets.packaged_native_library import PackagedNativeLibrary -from pants.backend.native.tasks.conan_prep import ConanPrep -from pants.base.build_environment import get_pants_cachedir -from pants.base.exceptions import TaskError -from pants.base.workunit import WorkUnitLabel -from pants.engine.platform import Platform -from pants.task.simple_codegen_task import SimpleCodegenTask -from pants.util.contextutil import temporary_dir -from pants.util.dirutil import mergetree, safe_file_dump, safe_mkdir -from pants.util.enums import match -from pants.util.memo import memoized_property - - -class ConanFetch(SimpleCodegenTask): - - gentarget_type = ExternalNativeLibrary - - sources_globs = ( - "include/**/*", - "lib/*", - ) - - @property - def validate_sources_present(self): - return False - - def synthetic_target_type(self, target): - return PackagedNativeLibrary - - default_remotes = { - "conan-center": "https://conan.bintray.com", - } - - @classmethod - def register_options(cls, register): - super().register_options(register) - register( - "--conan-remotes", - type=dict, - default=cls.default_remotes, - advanced=True, - fingerprint=True, - help="The conan remotes to download conan packages from.", - ) - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("ConanFetch", 1)] - - @classmethod - def prepare(cls, options, round_manager): - super().prepare(options, round_manager) - round_manager.require_data(ConanPrep.tool_instance_cls) - - class ConanConfigError(TaskError): - pass - - class ConanFetchError(TaskError): - pass - - @property - def _remotes_txt_content(self): - """Generate a file containing overrides for Conan remotes which get applied to - registry.json.""" - return "{}\n".format( - "\n".join( - "{name} {url} {is_ssl}".format( - name=name, url=url, is_ssl=re.match(r"^https://", url) is not None - ) - for name, url in self.get_options().conan_remotes.items() - ) - ) - - def _conan_user_home(self, conan, in_workdir=False): - """Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes. - - See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install - for docs on configuring remotes. - """ - # This argument is exposed so tests don't leak out of the workdir. - if in_workdir: - base_cache_dir = self.workdir - else: - base_cache_dir = get_pants_cachedir() - user_home_base = os.path.join(base_cache_dir, "conan-support", "conan-user-home") - # Locate the subdirectory of the pants shared cachedir specific to this task's option values. - user_home = os.path.join(user_home_base, self.fingerprint) - conan_install_base = os.path.join(user_home, ".conan") - # Conan doesn't copy remotes.txt into the .conan subdir after the "config install" command, it - # simply edits registry.json. However, it is valid to have this file there, and Conan won't - # touch it, so we use its presence to detect whether we have appropriately initialized the - # Conan installation. - remotes_txt_sentinel = os.path.join(conan_install_base, "remotes.txt") - if not os.path.isfile(remotes_txt_sentinel): - safe_mkdir(conan_install_base) - # Conan doesn't consume the remotes.txt file just by being in the conan directory -- we need - # to create another directory containing any selection of files detailed in - # https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install - # and "install" from there to our desired conan directory. - with temporary_dir() as remotes_install_dir: - # Create an artificial conan configuration dir containing just remotes.txt. - remotes_txt_for_install = os.path.join(remotes_install_dir, "remotes.txt") - safe_file_dump(remotes_txt_for_install, self._remotes_txt_content) - # Configure the desired user home from this artificial config dir. - argv = ["config", "install", remotes_install_dir] - workunit_factory = functools.partial( - self.context.new_workunit, - name="initial-conan-config", - labels=[WorkUnitLabel.TOOL], - ) - env = { - "CONAN_USER_HOME": user_home, - } - cmdline, exit_code = conan.run(workunit_factory, argv, env=env) - if exit_code != 0: - raise self.ConanConfigError( - "Error configuring conan with argv {} and environment {}: exited non-zero ({}).".format( - cmdline, env, exit_code - ), - exit_code=exit_code, - ) - # Generate the sentinel file so that we know the remotes have been successfully configured for - # this particular task fingerprint in successive pants runs. - safe_file_dump(remotes_txt_sentinel, self._remotes_txt_content) - - return user_home - - @memoized_property - def _conan_os_name(self): - return match(Platform.current, {Platform.darwin: "Macos", Platform.linux: "Linux"}) - - @property - def _copy_target_attributes(self): - basic_attributes = [a for a in super()._copy_target_attributes if a != "provides"] - return basic_attributes + [ - "include_relpath", - "lib_relpath", - "native_lib_names", - ] - - def execute_codegen(self, target, target_workdir): - """Invoke the conan pex to fetch conan packages specified by a `ExternalNativeLibrary` - target. - - :param ExternalNativeLibrary target: a target containing conan package specifications. - :param str target_workdir: where to copy the installed package contents to. - """ - conan = self.context.products.get_data(ConanPrep.tool_instance_cls) - - # TODO: we should really be able to download all of these in one go, and we should make an - # upstream PR to allow that against Conan if not. - for conan_requirement in target.packages: - # See https://docs.conan.io/en/latest/reference/commands/consumer/install.html for - # documentation on the 'install' command. - argv = [ - "install", - conan_requirement.pkg_spec, - "--settings", - "os={}".format(self._conan_os_name), - ] - for remote in self.get_options().conan_remotes: - argv.extend(["--remote", remote]) - - workunit_factory = functools.partial( - self.context.new_workunit, - name="install-conan-{}".format(conan_requirement.pkg_spec), - labels=[WorkUnitLabel.TOOL], - ) - # CONAN_USER_HOME is somewhat documented at - # https://docs.conan.io/en/latest/mastering/sharing_settings_and_config.html. - user_home = self._conan_user_home(conan) - env = { - "CONAN_USER_HOME": user_home, - } - - with conan.run_with(workunit_factory, argv, env=env) as (cmdline, exit_code, workunit): - if exit_code != 0: - raise self.ConanFetchError( - "Error performing conan install with argv {} and environment {}: exited non-zero ({}).".format( - cmdline, env, exit_code - ), - exit_code=exit_code, - ) - - # Read the stdout from the read-write buffer, from the beginning of the output, and convert - # to unicode. - conan_install_stdout = workunit.output("stdout").read_from(0).decode() - pkg_sha = conan_requirement.parse_conan_stdout_for_pkg_sha(conan_install_stdout) - - installed_data_dir = os.path.join( - user_home, ".conan", "data", conan_requirement.directory_path, "package", pkg_sha - ) - - # Copy over the contents of the installed package into the target output directory. These - # paths are currently hardcoded -- see `ExternalNativeLibrary`. - mergetree( - os.path.join(installed_data_dir, conan_requirement.include_relpath), - os.path.join(target_workdir, "include"), - ) - mergetree( - os.path.join(installed_data_dir, conan_requirement.lib_relpath), - os.path.join(target_workdir, "lib"), - ) diff --git a/src/python/pants/backend/native/tasks/conan_prep.py b/src/python/pants/backend/native/tasks/conan_prep.py deleted file mode 100644 index 747f998d83c..00000000000 --- a/src/python/pants/backend/native/tasks/conan_prep.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pants.backend.native.subsystems.packaging.conan import Conan -from pants.backend.native.targets.external_native_library import ExternalNativeLibrary -from pants.backend.python.tasks.python_tool_prep_base import PythonToolInstance, PythonToolPrepBase - - -class ConanInstance(PythonToolInstance): - pass - - -class ConanPrep(PythonToolPrepBase): - tool_subsystem_cls = Conan - tool_instance_cls = ConanInstance - - def will_be_invoked(self): - return any(self.get_targets(lambda t: isinstance(t, ExternalNativeLibrary))) diff --git a/src/python/pants/backend/native/tasks/cpp_compile.py b/src/python/pants/backend/native/tasks/cpp_compile.py deleted file mode 100644 index d77bc585fe4..00000000000 --- a/src/python/pants/backend/native/tasks/cpp_compile.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pants.backend.native.subsystems.native_build_step import CppCompileSettings -from pants.backend.native.targets.native_library import CppLibrary -from pants.backend.native.tasks.native_compile import NativeCompile -from pants.util.objects import SubclassesOf - - -class CppCompile(NativeCompile): - - options_scope = "cpp-compile" - - # Compile only C++ library targets. - source_target_constraint = SubclassesOf(CppLibrary) - - workunit_label = "cpp-compile" - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("CppCompile", 0)] - - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + (CppCompileSettings.scoped(cls),) - - def get_compile_settings(self): - return CppCompileSettings.scoped_instance(self) - - def get_compiler(self, native_library_target): - return self.get_cpp_toolchain_variant(native_library_target).cpp_compiler diff --git a/src/python/pants/backend/native/tasks/link_shared_libraries.py b/src/python/pants/backend/native/tasks/link_shared_libraries.py deleted file mode 100644 index aab4dedd3af..00000000000 --- a/src/python/pants/backend/native/tasks/link_shared_libraries.py +++ /dev/null @@ -1,226 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import os -import subprocess -from dataclasses import dataclass -from typing import Any, Tuple - -from pants.backend.native.config.environment import Linker -from pants.backend.native.targets.native_artifact import NativeArtifact -from pants.backend.native.targets.native_library import NativeLibrary -from pants.backend.native.tasks.native_compile import ObjectFiles -from pants.backend.native.tasks.native_task import NativeTask -from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError -from pants.base.workunit import WorkUnit, WorkUnitLabel -from pants.engine.platform import Platform -from pants.util.enums import match -from pants.util.memo import memoized_property - - -@dataclass(frozen=True) -class SharedLibrary: - name: Any - path: Any - - -@dataclass(frozen=True) -class LinkSharedLibraryRequest: - linker: Linker - object_files: Tuple - native_artifact: NativeArtifact - output_dir: Any - external_lib_dirs: Tuple - external_lib_names: Tuple - - -class LinkSharedLibraries(NativeTask): - - options_scope = "link-shared-libraries" - - @classmethod - def product_types(cls): - return [SharedLibrary] - - @classmethod - def prepare(cls, options, round_manager): - super().prepare(options, round_manager) - round_manager.require(ObjectFiles) - - @property - def cache_target_dirs(self): - return True - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("LinkSharedLibraries", 1)] - - class LinkSharedLibrariesError(TaskError): - pass - - def linker(self, native_library_target): - # NB: we are using the C++ toolchain here for linking every type of input, including compiled C - # source files. - return self.get_cpp_toolchain_variant(native_library_target).cpp_linker - - @memoized_property - def platform(self) -> Platform: - # TODO: convert this to a v2 engine dependency injection. - return Platform.current - - def execute(self): - targets_providing_artifacts = self.context.targets( - NativeLibrary.produces_ctypes_native_library - ) - compiled_objects_product = self.context.products.get(ObjectFiles) - shared_libs_product = self.context.products.get(SharedLibrary) - - all_shared_libs_by_name = {} - - with self.invalidated( - targets_providing_artifacts, invalidate_dependents=True - ) as invalidation_check: - for vt in invalidation_check.all_vts: - if vt.valid: - shared_library = self._retrieve_shared_lib_from_cache(vt) - else: - # TODO: We need to partition links based on proper dependency edges and not - # perform a link to every packaged_native_library() for all targets in the closure. - # https://github.com/pantsbuild/pants/issues/6178 - link_request = self._make_link_request(vt, compiled_objects_product) - self.context.log.debug("link_request: {}".format(link_request)) - shared_library = self._execute_link_request(link_request) - - same_name_shared_lib = all_shared_libs_by_name.get(shared_library.name, None) - if same_name_shared_lib: - # TODO: test this branch! - raise self.LinkSharedLibrariesError( - "The name '{name}' was used for two shared libraries: {prev} and {cur}.".format( - name=shared_library.name, prev=same_name_shared_lib, cur=shared_library - ) - ) - else: - all_shared_libs_by_name[shared_library.name] = shared_library - - self._add_product_at_target_base(shared_libs_product, vt.target, shared_library) - - def _retrieve_shared_lib_from_cache(self, vt): - native_artifact = vt.target.ctypes_native_library - path_to_cached_lib = os.path.join( - vt.results_dir, native_artifact.as_shared_lib(self.platform) - ) - if not os.path.isfile(path_to_cached_lib): - raise self.LinkSharedLibrariesError( - "The shared library at {} does not exist!".format(path_to_cached_lib) - ) - return SharedLibrary(name=native_artifact.lib_name, path=path_to_cached_lib) - - def _make_link_request(self, vt, compiled_objects_product): - self.context.log.debug("link target: {}".format(vt.target)) - - deps = self.native_deps(vt.target) - - all_compiled_object_files = [] - for dep_tgt in deps: - if compiled_objects_product.get(dep_tgt): - self.context.log.debug("dep_tgt: {}".format(dep_tgt)) - object_files = self._retrieve_single_product_at_target_base( - compiled_objects_product, dep_tgt - ) - self.context.log.debug("object_files: {}".format(object_files)) - object_file_paths = object_files.file_paths() - self.context.log.debug("object_file_paths: {}".format(object_file_paths)) - all_compiled_object_files.extend(object_file_paths) - - external_lib_dirs = [] - external_lib_names = [] - for ext_dep in self.packaged_native_deps(vt.target): - external_lib_dirs.append( - os.path.join(get_buildroot(), ext_dep._sources_field.rel_path, ext_dep.lib_relpath) - ) - - native_lib_names = ext_dep.native_lib_names - if isinstance(native_lib_names, dict): - # `native_lib_names` is a dictionary mapping the string representation of the Platform - # ('darwin' vs. 'linux') to a list of strings. We use the Enum's `.value` to get the - # underlying string value to lookup the relevant key in the dictionary. - native_lib_names = native_lib_names[self.platform.value] - external_lib_names.extend(native_lib_names) - - link_request = LinkSharedLibraryRequest( - linker=self.linker(vt.target), - object_files=tuple(all_compiled_object_files), - native_artifact=vt.target.ctypes_native_library, - output_dir=vt.results_dir, - external_lib_dirs=tuple(external_lib_dirs), - external_lib_names=tuple(external_lib_names), - ) - - self.context.log.debug(repr(link_request)) - - return link_request - - def _execute_link_request(self, link_request): - object_files = link_request.object_files - - if len(object_files) == 0: - raise self.LinkSharedLibrariesError( - "No object files were provided in request {}!".format(link_request) - ) - - linker = link_request.linker - native_artifact = link_request.native_artifact - output_dir = link_request.output_dir - resulting_shared_lib_path = os.path.join( - output_dir, native_artifact.as_shared_lib(self.platform) - ) - - self.context.log.debug("resulting_shared_lib_path: {}".format(resulting_shared_lib_path)) - # We are executing in the results_dir, so get absolute paths for everything. - cmd = ( - [linker.exe_filename] - + match(self.platform, {Platform.darwin: ["-Wl,-dylib"], Platform.linux: ["-shared"]}) - + list(linker.extra_args) - + ["-o", os.path.abspath(resulting_shared_lib_path)] - + ["-L{}".format(lib_dir) for lib_dir in link_request.external_lib_dirs] - + ["-l{}".format(lib_name) for lib_name in link_request.external_lib_names] - + [os.path.abspath(obj) for obj in object_files] - ) - - self.context.log.info("selected linker exe name: '{}'".format(linker.exe_filename)) - self.context.log.debug("linker argv: {}".format(cmd)) - - env = linker.invocation_environment_dict - self.context.log.debug("linker invocation environment: {}".format(env)) - - with self.context.new_workunit( - name="link-shared-libraries", labels=[WorkUnitLabel.LINKER] - ) as workunit: - try: - process = subprocess.Popen( - cmd, - cwd=output_dir, - stdout=workunit.output("stdout"), - stderr=workunit.output("stderr"), - env=env, - ) - except OSError as e: - workunit.set_outcome(WorkUnit.FAILURE) - raise self.LinkSharedLibrariesError( - "Error invoking the native linker with command {cmd} and environment {env} " - "for request {req}: {err}.".format(cmd=cmd, env=env, req=link_request, err=e), - e, - ) - - rc = process.wait() - if rc != 0: - workunit.set_outcome(WorkUnit.FAILURE) - raise self.LinkSharedLibrariesError( - "Error linking native objects with command {cmd} and environment {env} " - "for request {req}. Exit code was: {rc}.".format( - cmd=cmd, env=env, req=link_request, rc=rc - ) - ) - - return SharedLibrary(name=native_artifact.lib_name, path=resulting_shared_lib_path) diff --git a/src/python/pants/backend/native/tasks/native_compile.py b/src/python/pants/backend/native/tasks/native_compile.py deleted file mode 100644 index 3b6ac804957..00000000000 --- a/src/python/pants/backend/native/tasks/native_compile.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import os -import subprocess -from abc import ABCMeta, abstractmethod -from collections import defaultdict -from dataclasses import dataclass -from typing import Any, Optional - -from pants.backend.native.tasks.native_task import NativeTask -from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError -from pants.base.workunit import WorkUnit, WorkUnitLabel -from pants.util.memo import memoized_method, memoized_property -from pants.util.meta import classproperty -from pants.util.objects import TypeConstraint - - -@dataclass(frozen=True) -class NativeCompileRequest: - compiler: Any - include_dirs: Any - sources: Any - compiler_options: Any - output_dir: Any - header_file_extensions: Any - - -# TODO(#5950): perform all process execution in the v2 engine! -@dataclass(frozen=True) -class ObjectFiles: - root_dir: Any - filenames: Any - - def file_paths(self): - return [os.path.join(self.root_dir, fname) for fname in self.filenames] - - -class NativeCompile(NativeTask, metaclass=ABCMeta): - # `NativeCompile` will use the `source_target_constraint` to determine what targets have "sources" - # to compile, and the `dependent_target_constraint` to determine which dependent targets to - # operate on for `strict_deps` calculation. - # NB: `source_target_constraint` must be overridden. - source_target_constraint: Optional[TypeConstraint] = None - - @classproperty - @abstractmethod - def workunit_label(cls): - """A string describing the work being done during compilation. - - `NativeCompile` will use `workunit_label` as the name of the workunit when executing the - compiler process. - - :rtype: str - """ - - @classmethod - def product_types(cls): - return [ObjectFiles] - - @property - def cache_target_dirs(self): - return True - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("NativeCompile", 1)] - - class NativeCompileError(TaskError): - """Raised for errors in this class's logic. - - Subclasses are advised to create their own exception class. - """ - - def execute(self): - object_files_product = self.context.products.get(ObjectFiles) - source_targets = self.context.targets(self.source_target_constraint.satisfied_by) - - with self.invalidated(source_targets, invalidate_dependents=True) as invalidation_check: - for vt in invalidation_check.all_vts: - if not vt.valid: - compile_request = self._make_compile_request(vt) - self.context.log.debug("compile_request: {}".format(compile_request)) - self._compile(compile_request) - - object_files = self.collect_cached_objects(vt) - self._add_product_at_target_base(object_files_product, vt.target, object_files) - - # This may be calculated many times for a target, so we memoize it. - @memoized_method - def _include_dirs_for_target(self, target): - return os.path.join(get_buildroot(), target.address.spec_path) - - @dataclass(frozen=True) - class NativeSourcesByType: - rel_root: Any - headers: Any - sources: Any - - def get_sources_headers_for_target(self, target): - """Return a list of file arguments to provide to the compiler. - - NB: result list will contain both header and source files! - - :raises: :class:`NativeCompile.NativeCompileError` if there is an error processing the sources. - """ - # Get source paths relative to the target base so the exception message with the target and - # paths makes sense. - target_relative_sources = target.sources_relative_to_target_base() - rel_root = target_relative_sources.rel_root - - # Unique file names are required because we just dump object files into a single directory, and - # the compiler will silently just produce a single object file if provided non-unique filenames. - # TODO: add some shading to file names so we can remove this check. - # NB: It shouldn't matter if header files have the same name, but this will raise an error in - # that case as well. We won't need to do any shading of header file names. - seen_filenames = defaultdict(list) - for src in target_relative_sources: - seen_filenames[os.path.basename(src)].append(src) - duplicate_filename_err_msgs = [] - for fname, source_paths in seen_filenames.items(): - if len(source_paths) > 1: - duplicate_filename_err_msgs.append( - "filename: {}, paths: {}".format(fname, source_paths) - ) - if duplicate_filename_err_msgs: - raise self.NativeCompileError( - "Error in target '{}': source files must have a unique filename within a '{}' target. " - "Conflicting filenames:\n{}".format( - target.address.spec, target.alias(), "\n".join(duplicate_filename_err_msgs) - ) - ) - - return [os.path.join(get_buildroot(), rel_root, src) for src in target_relative_sources] - - @abstractmethod - def get_compile_settings(self): - """Return an instance of NativeBuildStep. - - NB: Subclasses will be queried for the compile settings once and the result cached. - """ - - @memoized_property - def _compile_settings(self): - return self.get_compile_settings() - - @abstractmethod - def get_compiler(self, native_library_target): - """An instance of `_CompilerMixin` which can be invoked to compile files. - - NB: Subclasses will be queried for the compiler instance once and the result cached. - - :return: :class:`pants.backend.native.config.environment._CompilerMixin` - """ - - def _compiler(self, native_library_target): - return self.get_compiler(native_library_target) - - def _make_compile_request(self, versioned_target): - target = versioned_target.target - - include_dirs = [] - for dep in self.native_deps(target): - source_lib_base_dir = os.path.join(get_buildroot(), dep._sources_field.rel_path) - include_dirs.append(source_lib_base_dir) - for ext_dep in self.packaged_native_deps(target): - external_lib_include_dir = os.path.join( - get_buildroot(), ext_dep._sources_field.rel_path, ext_dep.include_relpath - ) - self.context.log.debug( - "ext_dep: {}, external_lib_include_dir: {}".format( - ext_dep, external_lib_include_dir - ) - ) - include_dirs.append(external_lib_include_dir) - - sources_and_headers = self.get_sources_headers_for_target(target) - compiler_option_sets = self._compile_settings.native_build_step.get_compiler_option_sets_for_target( - target - ) - self.context.log.debug( - "target: {}, compiler_option_sets: {}".format(target, compiler_option_sets) - ) - - compile_request = NativeCompileRequest( - compiler=self._compiler(target), - include_dirs=include_dirs, - sources=sources_and_headers, - compiler_options=( - self._compile_settings.native_build_step.get_merged_args_for_compiler_option_sets( - compiler_option_sets - ) - ), - output_dir=versioned_target.results_dir, - header_file_extensions=self._compile_settings.header_file_extensions, - ) - - self.context.log.debug(repr(compile_request)) - - return compile_request - - def _iter_sources_minus_headers(self, compile_request): - for s in compile_request.sources: - if not s.endswith(tuple(compile_request.header_file_extensions)): - yield s - - class _HeaderOnlyLibrary(Exception): - pass - - def _make_compile_argv(self, compile_request): - """Return a list of arguments to use to compile sources. - - Subclasses can override and append. - """ - - sources_minus_headers = list(self._iter_sources_minus_headers(compile_request)) - if len(sources_minus_headers) == 0: - raise self._HeaderOnlyLibrary() - - compiler = compile_request.compiler - compiler_options = compile_request.compiler_options - # We are going to execute in the target output, so get absolute paths for everything. - buildroot = get_buildroot() - # TODO: add -v to every compiler and linker invocation! - argv = ( - [compiler.exe_filename] - + list(compiler.extra_args) - + - # TODO: If we need to produce static libs, don't add -fPIC! (could use Variants -- see #5788). - ["-c", "-fPIC"] - + list(compiler_options) - + [ - "-I{}".format(os.path.join(buildroot, inc_dir)) - for inc_dir in compile_request.include_dirs - ] - + [os.path.join(buildroot, src) for src in sources_minus_headers] - ) - - self.context.log.info("selected compiler exe name: '{}'".format(compiler.exe_filename)) - self.context.log.debug("compile argv: {}".format(argv)) - - return argv - - def _compile(self, compile_request): - """Perform the process of compilation, writing object files to the request's 'output_dir'. - - NB: This method must arrange the output files so that `collect_cached_objects()` can collect all - of the results (or vice versa)! - """ - - try: - argv = self._make_compile_argv(compile_request) - except self._HeaderOnlyLibrary: - self.context.log.debug("{} is a header-only library".format(compile_request)) - return - - compiler = compile_request.compiler - output_dir = compile_request.output_dir - env = compiler.invocation_environment_dict - - with self.context.new_workunit( - name=self.workunit_label, labels=[WorkUnitLabel.COMPILER] - ) as workunit: - try: - process = subprocess.Popen( - argv, - cwd=output_dir, - stdout=workunit.output("stdout"), - stderr=workunit.output("stderr"), - env=env, - ) - except OSError as e: - workunit.set_outcome(WorkUnit.FAILURE) - raise self.NativeCompileError( - "Error invoking '{exe}' with command {cmd} and environment {env} for request {req}: {err}".format( - exe=compiler.exe_filename, cmd=argv, env=env, req=compile_request, err=e - ) - ) - - rc = process.wait() - if rc != 0: - workunit.set_outcome(WorkUnit.FAILURE) - raise self.NativeCompileError( - "Error in '{section_name}' with command {cmd} and environment {env} for request {req}. " - "Exit code was: {rc}.".format( - section_name=self.workunit_label, - cmd=argv, - env=env, - req=compile_request, - rc=rc, - ) - ) - - def collect_cached_objects(self, versioned_target): - """Scan `versioned_target`'s results directory and return the output files from that - directory. - - :return: :class:`ObjectFiles` - """ - return ObjectFiles(versioned_target.results_dir, os.listdir(versioned_target.results_dir)) diff --git a/src/python/pants/backend/native/tasks/native_task.py b/src/python/pants/backend/native/tasks/native_task.py deleted file mode 100644 index af72333ff57..00000000000 --- a/src/python/pants/backend/native/tasks/native_task.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from abc import abstractmethod - -from pants.backend.native.config.environment import CppToolchain, CToolchain -from pants.backend.native.subsystems.native_build_settings import NativeBuildSettings -from pants.backend.native.subsystems.native_build_step import NativeBuildStep -from pants.backend.native.subsystems.native_toolchain import ( - NativeToolchain, - ToolchainVariantRequest, -) -from pants.backend.native.targets.native_library import NativeLibrary -from pants.backend.native.targets.packaged_native_library import PackagedNativeLibrary -from pants.build_graph.dependency_context import DependencyContext -from pants.task.task import Task -from pants.util.collections import assert_single_element -from pants.util.memo import memoized_method, memoized_property -from pants.util.meta import classproperty -from pants.util.objects import Exactly, SubclassesOf - - -class NativeTask(Task): - @classproperty - @abstractmethod - def source_target_constraint(cls): - """Return a type constraint which is used to filter "source" targets for this task. - - This is used to make it clearer which tasks act on which targets, since the compile and link - tasks work on different target sets (just C and just C++ in the compile tasks, and both in the - link task). - - :return: :class:`pants.util.objects.TypeConstraint` - """ - - @classproperty - def dependent_target_constraint(cls): - """Return a type constraint which is used to filter dependencies for a target. - - This is used to make native_deps() calculation automatic and declarative. - - :return: :class:`pants.util.objects.TypeConstraint` - """ - return SubclassesOf(NativeLibrary) - - @classproperty - def packaged_dependent_constraint(cls): - """Return a type constraint which is used to filter 3rdparty dependencies for a target. - - This is used to make packaged_native_deps() automatic and declarative. - - :return: :class:`pants.util.objects.TypeConstraint` - """ - return Exactly(PackagedNativeLibrary) - - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + ( - # We use a globally-scoped dependency on NativeBuildSettings because the toolchain and - # dependency calculation need to be the same for both compile and link tasks (and subscoping - # would break that). - NativeBuildSettings, - NativeToolchain.scoped(cls), - ) - - @classmethod - def prepare(cls, options, round_manager): - super().prepare(options, round_manager) - # Allow the deferred_sources_mapping to take place first - round_manager.optional_data("deferred_sources") - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("NativeTask", 0)] - - @memoized_property - def _native_build_settings(self): - return NativeBuildSettings.global_instance() - - # TODO(#7183): remove this global subsystem dependency! - @memoized_property - def _native_build_step(self): - return NativeBuildStep.global_instance() - - @memoized_property - def _native_toolchain(self): - return NativeToolchain.scoped_instance(self) - - def _toolchain_variant_request(self, variant): - return ToolchainVariantRequest(toolchain=self._native_toolchain, variant=variant) - - def get_c_toolchain_variant(self, native_library_target): - return self._get_toolchain_variant(CToolchain, native_library_target) - - def get_cpp_toolchain_variant(self, native_library_target): - return self._get_toolchain_variant(CppToolchain, native_library_target) - - def _get_toolchain_variant(self, toolchain_type, native_library_target): - selected_variant = self._native_build_step.get_toolchain_variant_for_target( - native_library_target - ) - return self._request_single( - toolchain_type, self._toolchain_variant_request(selected_variant) - ) - - @memoized_method - def native_deps(self, target): - return self.strict_deps_for_target( - target, predicate=self.dependent_target_constraint.satisfied_by - ) - - @memoized_method - def packaged_native_deps(self, target): - return self.strict_deps_for_target( - target, predicate=self.packaged_dependent_constraint.satisfied_by - ) - - def strict_deps_for_target(self, target, predicate=None): - """Get the dependencies of `target` filtered by `predicate`, accounting for 'strict_deps'. - - If 'strict_deps' is on, instead of using the transitive closure of dependencies, targets will - only be able to see their immediate dependencies declared in the BUILD file. The 'strict_deps' - setting is obtained from the result of `get_compile_settings()`. - - NB: This includes the current target in the result. - """ - if self._native_build_settings.get_strict_deps_value_for_target(target): - strict_deps = target.strict_dependencies(DependencyContext()) - if predicate: - filtered_deps = list(filter(predicate, strict_deps)) - else: - filtered_deps = strict_deps - deps = [target] + filtered_deps - else: - deps = self.context.build_graph.transitive_subgraph_of_addresses( - [target.address], predicate=predicate - ) - - # Filter out the beginning target depending on whether it matches the predicate. - # TODO: There should be a cleaner way to do this. - deps = filter(predicate, deps) - - return deps - - def _add_product_at_target_base(self, product_mapping, target, value): - product_mapping.add(target, target.target_base).append(value) - - def _retrieve_single_product_at_target_base(self, product_mapping, target): - self.context.log.debug("product_mapping: {}".format(product_mapping)) - self.context.log.debug("target: {}".format(target)) - product = product_mapping.get(target) - single_base_dir = assert_single_element(product.keys()) - single_product = assert_single_element(product[single_base_dir]) - return single_product - - # TODO(#5869): delete this when we can request Subsystems from options in tasks! - def _request_single(self, product, subject): - # NB: This is not supposed to be exposed to Tasks yet -- see #4769 to track the status of - # exposing v2 products in v1 tasks. - return self.context._scheduler.product_request(product, [subject])[0] diff --git a/src/python/pants/backend/python/register.py b/src/python/pants/backend/python/register.py index 629386b59f4..3ea3f95e68b 100644 --- a/src/python/pants/backend/python/register.py +++ b/src/python/pants/backend/python/register.py @@ -48,9 +48,6 @@ ) from pants.backend.python.targets.python_tests import PythonTests as PythonTestsV1 from pants.backend.python.targets.unpacked_whls import UnpackedWheels as UnpackedWheelsV1 -from pants.backend.python.tasks.build_local_python_distributions import ( - BuildLocalPythonDistributions, -) from pants.backend.python.tasks.gather_sources import GatherSources from pants.backend.python.tasks.local_python_distribution_artifact import ( LocalPythonDistributionArtifact, @@ -105,7 +102,6 @@ def build_file_aliases(): def register_goals(): task(name="interpreter", action=SelectInterpreter).install("pyprep") - task(name="build-local-dists", action=BuildLocalPythonDistributions).install("pyprep") task(name="requirements", action=ResolveRequirements).install("pyprep") task(name="sources", action=GatherSources).install("pyprep") task(name="py", action=PythonRun).install("run") diff --git a/src/python/pants/backend/python/subsystems/BUILD b/src/python/pants/backend/python/subsystems/BUILD index 59d186ee271..119117a7ba7 100644 --- a/src/python/pants/backend/python/subsystems/BUILD +++ b/src/python/pants/backend/python/subsystems/BUILD @@ -6,7 +6,6 @@ python_library( '3rdparty/python:dataclasses', '3rdparty/python:pex', 'src/python/pants/backend/native/subsystems', - 'src/python/pants/backend/native/targets', 'src/python/pants/backend/python/targets', 'src/python/pants/base:build_environment', 'src/python/pants/base:deprecated', diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py index 67c7875e0e5..55601f64803 100644 --- a/src/python/pants/backend/python/subsystems/python_native_code.py +++ b/src/python/pants/backend/python/subsystems/python_native_code.py @@ -1,49 +1,23 @@ # Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -import logging import os from dataclasses import dataclass -from textwrap import dedent from typing import Dict, Tuple -from pants.backend.native.subsystems.native_toolchain import NativeToolchain -from pants.backend.native.targets.native_library import NativeLibrary -from pants.backend.python.targets.python_distribution import PythonDistribution -from pants.base.exceptions import IncompatiblePlatformsError from pants.engine.rules import SubsystemRule, rule -from pants.python import pex_build_util -from pants.python.python_setup import PythonSetup from pants.subsystem.subsystem import Subsystem -from pants.util.memo import memoized_property -from pants.util.objects import SubclassesOf from pants.util.strutil import safe_shlex_join, safe_shlex_split -logger = logging.getLogger(__name__) - class PythonNativeCode(Subsystem): """A subsystem which exposes components of the native backend to the python backend.""" options_scope = "python-native-code" - default_native_source_extensions = [".c", ".cpp", ".cc"] - - class PythonNativeCodeError(Exception): - pass - @classmethod def register_options(cls, register): super().register_options(register) - - register( - "--native-source-extensions", - type=list, - default=cls.default_native_source_extensions, - fingerprint=True, - advanced=True, - help="The extensions recognized for native source files in `python_dist()` sources.", - ) # TODO(#7735): move the --cpp-flags and --ld-flags to a general subprocess support subsystem. register( "--cpp-flags", @@ -62,80 +36,6 @@ def register_options(cls, register): help="Override the `LDFLAGS` environment variable for any forked subprocesses.", ) - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + (NativeToolchain.scoped(cls), PythonSetup,) - - @memoized_property - def _native_source_extensions(self): - return self.get_options().native_source_extensions - - @memoized_property - def native_toolchain(self): - return NativeToolchain.scoped_instance(self) - - @memoized_property - def _python_setup(self): - return PythonSetup.global_instance() - - def pydist_has_native_sources(self, target): - return target.has_sources(extension=tuple(self._native_source_extensions)) - - @memoized_property - def _native_target_matchers(self): - return { - SubclassesOf(PythonDistribution): self.pydist_has_native_sources, - SubclassesOf(NativeLibrary): NativeLibrary.produces_ctypes_native_library, - } - - def _any_targets_have_native_sources(self, targets): - # TODO(#5949): convert this to checking if the closure of python requirements has any - # platform-specific packages (maybe find the platforms there too?). - for tgt in targets: - for type_constraint, target_predicate in self._native_target_matchers.items(): - if type_constraint.satisfied_by(tgt) and target_predicate(tgt): - return True - return False - - def check_build_for_current_platform_only(self, targets): - """Performs a check of whether the current target closure has native sources and if so, - ensures that Pants is only targeting the current platform. - - :param targets: a list of :class:`Target` objects. - :return: a boolean value indicating whether the current target closure has native sources. - :raises: :class:`pants.base.exceptions.IncompatiblePlatformsError` - """ - # TODO(#5949): convert this to checking if the closure of python requirements has any - # platform-specific packages (maybe find the platforms there too?). - if not self._any_targets_have_native_sources(targets): - return False - - platforms_with_sources = pex_build_util.targets_by_platform(targets, self._python_setup) - platform_names = list(platforms_with_sources.keys()) - - if not platform_names or platform_names == ["current"]: - return True - - bad_targets = set() - for platform, targets in platforms_with_sources.items(): - if platform == "current": - continue - bad_targets.update(targets) - - raise IncompatiblePlatformsError( - dedent( - """\ - Pants doesn't currently support cross-compiling native code. - The following targets set platforms arguments other than ['current'], which is unsupported for this reason. - Please either remove the platforms argument from these targets, or set them to exactly ['current']. - Bad targets: - {} - """.format( - "\n".join(sorted(target.address.reference() for target in bad_targets)) - ) - ) - ) - @dataclass(frozen=True) class PexBuildEnvironment: @@ -161,7 +61,4 @@ def create_pex_native_build_environment( def rules(): - return [ - SubsystemRule(PythonNativeCode), - create_pex_native_build_environment, - ] + return [SubsystemRule(PythonNativeCode), create_pex_native_build_environment] diff --git a/src/python/pants/backend/python/tasks/BUILD b/src/python/pants/backend/python/tasks/BUILD index 5adcad9fcb4..388b16e9f75 100644 --- a/src/python/pants/backend/python/tasks/BUILD +++ b/src/python/pants/backend/python/tasks/BUILD @@ -8,8 +8,6 @@ python_library( '3rdparty/python/twitter/commons:twitter.common.dirutil', 'src/python/pants/backend/native/config', 'src/python/pants/backend/native/subsystems', - 'src/python/pants/backend/native/targets', - 'src/python/pants/backend/native/tasks', 'src/python/pants/backend/python:interpreter_cache', 'src/python/pants/backend/python/lint/isort', 'src/python/pants/backend/python/rules', # For util.py. diff --git a/src/python/pants/backend/python/tasks/build_local_python_distributions.py b/src/python/pants/backend/python/tasks/build_local_python_distributions.py deleted file mode 100644 index 5b0bd1ce0c0..00000000000 --- a/src/python/pants/backend/python/tasks/build_local_python_distributions.py +++ /dev/null @@ -1,318 +0,0 @@ -# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import glob -import os -import re -import shutil -import sysconfig -from pathlib import Path - -from pex.interpreter import PythonInterpreter - -from pants.backend.native.targets.native_library import NativeLibrary -from pants.backend.native.tasks.link_shared_libraries import SharedLibrary -from pants.backend.python.subsystems.python_native_code import PythonNativeCode -from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary -from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TargetDefinitionException, TaskError -from pants.base.workunit import WorkUnitLabel -from pants.build_graph.address import Address -from pants.python.pex_build_util import is_local_python_dist -from pants.python.python_requirement import PythonRequirement -from pants.python.setup_py_runner import SetupPyRunner -from pants.task.task import Task -from pants.util.collections import assert_single_element -from pants.util.dirutil import safe_mkdir_for, split_basename_and_dirname -from pants.util.memo import memoized_property -from pants.util.strutil import safe_shlex_join - - -# TODO: make this a SimpleCodegenTask!!! -class BuildLocalPythonDistributions(Task): - """Create python distributions (.whl) from python_dist targets.""" - - options_scope = "python-create-distributions" - - # NB: these are all the immediate subdirectories of the target's results directory. - # This contains any modules from a setup_requires(). - _SETUP_REQUIRES_SITE_SUBDIR = "setup_requires_site" - # This will contain the sources used to build the python_dist(). - _DIST_SOURCE_SUBDIR = "python_dist_subdir" - - setup_requires_pex_filename = "setup-requires.pex" - - # This defines the output directory when building the dist, so we know where the output wheel is - # located. It is a subdirectory of `_DIST_SOURCE_SUBDIR`. - _DIST_OUTPUT_DIR = "dist" - - @classmethod - def product_types(cls): - # Note that we don't actually place the products in the product map. We stitch - # them into the build graph instead. This is just to force the round engine - # to run this task when dists need to be built. - return [PythonRequirementLibrary, "local_wheels"] - - @classmethod - def prepare(cls, options, round_manager): - round_manager.require_data(PythonInterpreter) - round_manager.optional_product(SharedLibrary) - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("BuildLocalPythonDistributions", 3)] - - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + ( - SetupPyRunner.Factory.scoped(cls), - PythonNativeCode.scoped(cls), - ) - - class BuildLocalPythonDistributionsError(TaskError): - pass - - @memoized_property - def _python_native_code_settings(self): - return PythonNativeCode.scoped_instance(self) - - def _build_setup_py_runner(self, extra_reqs=None, interpreter=None, pex_file_path=None): - return SetupPyRunner.Factory.create( - scope=self, extra_reqs=extra_reqs, interpreter=interpreter, pex_file_path=pex_file_path - ) - - # TODO: This should probably be made into an @classproperty (see PR #5901). - @property - def cache_target_dirs(self): - return True - - def _get_setup_requires_to_resolve(self, dist_target): - if not dist_target.setup_requires: - return None - - reqs_to_resolve = set() - - for setup_req_lib_addr in dist_target.setup_requires: - for maybe_req_lib in self.context.build_graph.resolve(setup_req_lib_addr): - if isinstance(maybe_req_lib, PythonRequirementLibrary): - for req in maybe_req_lib.requirements: - reqs_to_resolve.add(req) - - if not reqs_to_resolve: - return None - - return reqs_to_resolve - - @classmethod - def _get_output_dir(cls, results_dir): - return os.path.join(results_dir, cls._DIST_SOURCE_SUBDIR) - - @classmethod - def _get_dist_dir(cls, results_dir): - return os.path.join(cls._get_output_dir(results_dir), cls._DIST_OUTPUT_DIR) - - def execute(self): - dist_targets = self.context.targets(is_local_python_dist) - - if dist_targets: - interpreter = self.context.products.get_data(PythonInterpreter) - shared_libs_product = self.context.products.get(SharedLibrary) - - with self.invalidated(dist_targets, invalidate_dependents=True) as invalidation_check: - for vt in invalidation_check.invalid_vts: - self._prepare_and_create_dist(interpreter, shared_libs_product, vt) - - local_wheel_products = self.context.products.get("local_wheels") - for vt in invalidation_check.all_vts: - dist = self._get_whl_from_dir(vt.results_dir) - req_lib_addr = Address.parse(f"{vt.target.address.spec}__req_lib") - self._inject_synthetic_dist_requirements(dist, req_lib_addr) - # Make any target that depends on the dist depend on the synthetic req_lib, - # for downstream consumption. - for dependent in self.context.build_graph.dependents_of(vt.target.address): - self.context.build_graph.inject_dependency(dependent, req_lib_addr) - dist_dir, dist_base = split_basename_and_dirname(dist) - local_wheel_products.add(vt.target, dist_dir).append(dist_base) - - def _get_native_artifact_deps(self, target): - native_artifact_targets = [] - if target.dependencies: - for dep_tgt in target.dependencies: - if not NativeLibrary.produces_ctypes_native_library(dep_tgt): - raise TargetDefinitionException( - target, - "Target '{}' is invalid: the only dependencies allowed in python_dist() targets " - "are C or C++ targets with a ctypes_native_library= kwarg.".format( - dep_tgt.address.spec - ), - ) - native_artifact_targets.append(dep_tgt) - return native_artifact_targets - - def _copy_sources(self, dist_tgt, dist_target_dir): - # Copy sources and setup.py over to vt results directory for packaging. - # NB: The directory structure of the destination directory needs to match 1:1 - # with the directory structure that setup.py expects. - all_sources = list(dist_tgt.sources_relative_to_target_base()) - for src_relative_to_target_base in all_sources: - src_rel_to_results_dir = os.path.join(dist_target_dir, src_relative_to_target_base) - safe_mkdir_for(src_rel_to_results_dir) - abs_src_path = os.path.join( - get_buildroot(), dist_tgt.address.spec_path, src_relative_to_target_base - ) - shutil.copyfile(abs_src_path, src_rel_to_results_dir) - - def _add_artifacts(self, dist_target_dir, shared_libs_product, native_artifact_targets): - all_shared_libs = [] - for tgt in native_artifact_targets: - product_mapping = shared_libs_product.get(tgt) - base_dir = assert_single_element(product_mapping.keys()) - shared_lib = assert_single_element(product_mapping[base_dir]) - all_shared_libs.append(shared_lib) - - for shared_lib in all_shared_libs: - basename = os.path.basename(shared_lib.path) - # NB: We convert everything to .so here so that the setup.py can just - # declare .so to build for either platform. - resolved_outname = re.sub(r"\..*\Z", ".so", basename) - dest_path = os.path.join(dist_target_dir, resolved_outname) - safe_mkdir_for(dest_path) - shutil.copyfile(shared_lib.path, dest_path) - - return all_shared_libs - - def _prepare_and_create_dist(self, interpreter, shared_libs_product, versioned_target): - dist_target = versioned_target.target - - native_artifact_deps = self._get_native_artifact_deps(dist_target) - - results_dir = versioned_target.results_dir - - dist_output_dir = self._get_output_dir(results_dir) - - all_native_artifacts = self._add_artifacts( - dist_output_dir, shared_libs_product, native_artifact_deps - ) - - # TODO: remove the triplication all of this validation across _get_native_artifact_deps(), - # check_build_for_current_platform_only(), and len(all_native_artifacts) > 0! - is_platform_specific = ( - # We are including a platform-specific shared lib in this dist, so mark it as such. - len(all_native_artifacts) > 0 - or self._python_native_code_settings.check_build_for_current_platform_only( - # NB: This doesn't reach into transitive dependencies, but that doesn't matter currently. - [dist_target] - + dist_target.dependencies - ) - ) - - versioned_target_fingerprint = versioned_target.cache_key.hash - - setup_requires_dir = os.path.join(results_dir, self._SETUP_REQUIRES_SITE_SUBDIR) - setup_reqs_to_resolve = self._get_setup_requires_to_resolve(dist_target) - if setup_reqs_to_resolve: - self.context.log.debug( - "python_dist target(s) with setup_requires detected. " - "Installing setup requirements: {}\n\n".format( - [req.key for req in setup_reqs_to_resolve] - ) - ) - - pex_file_path = os.path.join( - setup_requires_dir, f"setup-py-runner-{versioned_target_fingerprint}.pex" - ) - setup_py_runner = self._build_setup_py_runner( - interpreter=interpreter, extra_reqs=setup_reqs_to_resolve, pex_file_path=pex_file_path - ) - self.context.log.debug(f"Using pex file as setup.py interpreter: {setup_py_runner}") - - self._create_dist( - dist_target, - dist_output_dir, - setup_py_runner, - versioned_target_fingerprint, - is_platform_specific, - ) - - # NB: "snapshot" refers to a "snapshot release", not a Snapshot. - def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_specific): - """Create a command line to generate a wheel via `setup.py`. - - Note that distutils will convert `snapshot_fingerprint` into a string suitable for a version - tag. Currently for versioned target fingerprints, this seems to convert all punctuation into - '.' and downcase all ASCII chars. See https://www.python.org/dev/peps/pep-0440/ for further - information on allowed version names. - - NB: adds a '+' before the fingerprint to the build tag! - """ - egg_info_snapshot_tag_args = ["egg_info", f"--tag-build=+{snapshot_fingerprint}"] - bdist_whl_args = ["bdist_wheel"] - if is_platform_specific: - platform = sysconfig.get_platform().replace(".", "_").replace("-", "_") - platform_args = ["--plat-name", platform] - else: - platform_args = [] - - dist_dir_args = ["--dist-dir", self._DIST_OUTPUT_DIR] - - return egg_info_snapshot_tag_args + bdist_whl_args + platform_args + dist_dir_args - - def _create_dist( - self, dist_tgt, dist_target_dir, setup_py_runner, snapshot_fingerprint, is_platform_specific - ): - """Create a .whl file for the specified python_distribution target.""" - self._copy_sources(dist_tgt, dist_target_dir) - - setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv( - snapshot_fingerprint, is_platform_specific - ) - - cmd = safe_shlex_join(setup_py_runner.cmdline(setup_py_snapshot_version_argv)) - with self.context.new_workunit( - "setup.py", cmd=cmd, labels=[WorkUnitLabel.TOOL] - ) as workunit: - try: - setup_py_runner.run_setup_command( - source_dir=Path(dist_target_dir), - setup_command=setup_py_snapshot_version_argv, - stdout=workunit.output("stdout"), - stderr=workunit.output("stderr"), - ) - except SetupPyRunner.CommandFailure as e: - raise self.BuildLocalPythonDistributionsError( - f"Installation of python distribution from target {dist_tgt} into directory " - f"{dist_target_dir} failed using the host system's compiler and linker: {e}" - ) - - # TODO: convert this into a SimpleCodegenTask, which does the exact same thing as this method! - def _inject_synthetic_dist_requirements(self, dist, req_lib_addr): - """Inject a synthetic requirements library that references a local wheel. - - :param dist: Path of the locally built wheel to reference. - :param req_lib_addr: :class: `Address` to give to the synthetic target. - :return: a :class: `PythonRequirementLibrary` referencing the locally-built wheel. - """ - whl_dir, base = split_basename_and_dirname(dist) - whl_metadata = base.split("-") - req_name = "==".join([whl_metadata[0], whl_metadata[1]]) - req = PythonRequirement(req_name, repository=whl_dir) - self.context.build_graph.inject_synthetic_target( - req_lib_addr, PythonRequirementLibrary, requirements=[req] - ) - - @classmethod - def _get_whl_from_dir(cls, install_dir): - """Return the absolute path of the whl in a setup.py install directory.""" - dist_dir = cls._get_dist_dir(install_dir) - dists = glob.glob(os.path.join(dist_dir, "*.whl")) - if len(dists) == 0: - raise cls.BuildLocalPythonDistributionsError( - "No distributions were produced by python_create_distribution task.\n" - "dist_dir: {}, install_dir: {}".format(dist_dir, install_dir) - ) - if len(dists) > 1: - # TODO: is this ever going to happen? - raise cls.BuildLocalPythonDistributionsError( - "Ambiguous local python distributions found: {}".format(dists) - ) - return dists[0] diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 9cdb326878d..87678f1576e 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -274,12 +274,7 @@ def register_bootstrap_options(cls, register): "--backend-packages", advanced=True, type=list, - default=[ - "pants.backend.python", - "pants.backend.jvm", - "pants.backend.native", - "pants.cache", - ], + default=["pants.backend.python", "pants.backend.jvm", "pants.cache"], help=( "Register v1 tasks from these backends. The backend packages must be present on " "the PYTHONPATH, typically because they are in the Pants core dist, in a " diff --git a/testprojects/src/python/BUILD b/testprojects/src/python/BUILD index f6b2ed7b813..bba5e746847 100644 --- a/testprojects/src/python/BUILD +++ b/testprojects/src/python/BUILD @@ -13,7 +13,6 @@ target( ':no_build_file_directory', ':plugins_directory', ':print_env_directory', - ':python_distribution_directory', ':python_targets_directory', ':sources_directory', ':unicode_directory', @@ -65,15 +64,6 @@ files( sources = ['print_env/**/*'], ) -files( - name = 'python_distribution_directory', - sources = ['python_distribution/**/*'], - dependencies = [ - 'testprojects/3rdparty:python_directory', - ], -) - - files( name = 'python_targets_directory', sources = ['python_targets/**/*'], diff --git a/testprojects/src/python/python_distribution/ctypes/BUILD b/testprojects/src/python/python_distribution/ctypes/BUILD deleted file mode 100644 index a19b724dd36..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/BUILD +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -ctypes_compatible_c_library( - name='c_library', - sources=['some_math.h', 'some_math.c', 'src-subdir/add_three.h', 'src-subdir/add_three.c'], - ctypes_native_library=native_artifact(lib_name='asdf-c_ctypes'), -) - -ctypes_compatible_cpp_library( - name='cpp_library', - sources=['some_more_math.hpp', 'some_more_math.cpp'], - ctypes_native_library=native_artifact(lib_name='asdf-cpp_ctypes'), -) - -python_dist( - sources=[ - 'setup.py', - 'ctypes_python_pkg/__init__.py', - 'ctypes_python_pkg/ctypes_wrapper.py', - ], - dependencies=[ - ':c_library', - ':cpp_library', - ], -) - -python_binary( - name='bin', - sources=['main.py'], - dependencies=[ - ':ctypes', - ], - platforms=['current'], -) - -python_binary( - name='with_platforms', - sources=['main.py'], - dependencies=[ - ':ctypes', - 'testprojects/3rdparty/python:numpy', - ], - platforms=[ - "linux-x86_64", - "macosx-10.13-x86_64", - ], -) diff --git a/testprojects/src/python/python_distribution/ctypes/__init__.py b/testprojects/src/python/python_distribution/ctypes/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/__init__.py b/testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/ctypes_wrapper.py b/testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/ctypes_wrapper.py deleted file mode 100644 index dd50bb9a1f0..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/ctypes_python_pkg/ctypes_wrapper.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import ctypes -import os - - -def get_generated_shared_lib(lib_name): - # These are the same filenames as in setup.py. - filename = 'lib{}.so'.format(lib_name) - # The data files are in the root directory, but we are in ctypes_python_pkg/. - rel_path = os.path.join(os.path.dirname(__file__), '..', filename) - return os.path.normpath(rel_path) - - -asdf_c_lib_path = get_generated_shared_lib('asdf-c_ctypes') -asdf_cpp_lib_path = get_generated_shared_lib('asdf-cpp_ctypes') - -asdf_c_lib = ctypes.CDLL(asdf_c_lib_path) -asdf_cpp_lib = ctypes.CDLL(asdf_cpp_lib_path) - - -def f(x): - added = asdf_c_lib.add_three(x) - multiplied = asdf_cpp_lib.multiply_by_three(added) - return multiplied diff --git a/testprojects/src/python/python_distribution/ctypes/main.py b/testprojects/src/python/python_distribution/ctypes/main.py deleted file mode 100644 index deab79b8017..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from ctypes_python_pkg.ctypes_wrapper import f - -if __name__ == "__main__": - x = 3 - print("x={}, f(x)={}".format(x, f(x))) diff --git a/testprojects/src/python/python_distribution/ctypes/setup.py b/testprojects/src/python/python_distribution/ctypes/setup.py deleted file mode 100644 index f90e302327c..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - - -setup( - name='ctypes_test', - version='0.0.1', - packages=find_packages(), - # Declare two files at the top-level directory (denoted by ''). - data_files=[('', ['libasdf-c_ctypes.so', 'libasdf-cpp_ctypes.so'])], -) diff --git a/testprojects/src/python/python_distribution/ctypes/some_math.c b/testprojects/src/python/python_distribution/ctypes/some_math.c deleted file mode 100644 index a5a28435c49..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/some_math.c +++ /dev/null @@ -1,3 +0,0 @@ -#include "some_math.h" - -int add_two(int x) { return x + 2; } diff --git a/testprojects/src/python/python_distribution/ctypes/some_math.h b/testprojects/src/python/python_distribution/ctypes/some_math.h deleted file mode 100644 index af394ad0504..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/some_math.h +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef __SOME_MATH_H__ -#define __SOME_MATH_H__ - -int add_two(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes/some_more_math.cpp b/testprojects/src/python/python_distribution/ctypes/some_more_math.cpp deleted file mode 100644 index ef1d5835eee..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/some_more_math.cpp +++ /dev/null @@ -1,5 +0,0 @@ -#include "some_more_math.hpp" - -int mangled_function(int x) { return x ^ 3; } - -extern "C" int multiply_by_three(int x) { return mangled_function(x * 3); } diff --git a/testprojects/src/python/python_distribution/ctypes/some_more_math.hpp b/testprojects/src/python/python_distribution/ctypes/some_more_math.hpp deleted file mode 100644 index 31953f8b9ba..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/some_more_math.hpp +++ /dev/null @@ -1,8 +0,0 @@ -#ifndef __SOME_MORE_MATH_HPP__ -#define __SOME_MORE_MATH_HPP__ - -int mangled_function(int); - -extern "C" int multiply_by_three(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.c b/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.c deleted file mode 100644 index 21d7f8824a8..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.c +++ /dev/null @@ -1,3 +0,0 @@ -#include "add_three.h" - -int add_three(int x) { return x + 3; } diff --git a/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.h b/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.h deleted file mode 100644 index 963a5460242..00000000000 --- a/testprojects/src/python/python_distribution/ctypes/src-subdir/add_three.h +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef __ADD_THREE_H__ -#define __ADD_THREE_H__ - -int add_three(int x); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_interop/BUILD b/testprojects/src/python/python_distribution/ctypes_interop/BUILD deleted file mode 100644 index 4b3e9dedd45..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/BUILD +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_dist( - sources=[ - 'setup.py', - 'ctypes_python_pkg/__init__.py', - 'ctypes_python_pkg/ctypes_wrapper.py', - ], - dependencies=[ - 'testprojects/src/python/python_distribution/ctypes_interop/some-more-math', - 'testprojects/src/python/python_distribution/ctypes_interop/wrapped-math', - ], -) - -python_binary( - name='bin', - sources=['main.py'], - dependencies=[ - ':ctypes_interop', - ], - platforms=['current'], -) diff --git a/testprojects/src/python/python_distribution/ctypes_interop/__init__.py b/testprojects/src/python/python_distribution/ctypes_interop/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/__init__.py b/testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/ctypes_wrapper.py b/testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/ctypes_wrapper.py deleted file mode 100644 index afa5fefdea1..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/ctypes_python_pkg/ctypes_wrapper.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import ctypes -import logging -import os - - -logger = logging.getLogger(__name__) - - -def get_generated_shared_lib(lib_name): - # These are the same filenames as in setup.py. - filename = 'lib{}.so'.format(lib_name) - # The data files are in the root directory, but we are in ctypes_python_pkg/. - rel_path = os.path.join(os.path.dirname(__file__), '..', filename) - return os.path.normpath(rel_path) - - -cpp_math_lib_path = get_generated_shared_lib('some-more-math') -c_wrapped_math_lib_path = get_generated_shared_lib('wrapped-math') - -cpp_math_lib = ctypes.CDLL(cpp_math_lib_path) -c_wrapped_math_lib = ctypes.CDLL(c_wrapped_math_lib_path) - - -def f(x): - some_cpp_math_result = cpp_math_lib.add_two(x) + cpp_math_lib.multiply_by_three(x) - logger.debug('some_cpp_math_result: {}'.format(some_cpp_math_result)) - some_c_wrapped_math_result = ( - c_wrapped_math_lib.add_two(x) + - c_wrapped_math_lib.multiply_by_three(x) + - c_wrapped_math_lib.wrapped_function(x)) - logger.debug('some_c_wrapped_math_result: {}'.format(some_c_wrapped_math_result)) - return some_cpp_math_result * some_c_wrapped_math_result diff --git a/testprojects/src/python/python_distribution/ctypes_interop/main.py b/testprojects/src/python/python_distribution/ctypes_interop/main.py deleted file mode 100644 index deab79b8017..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from ctypes_python_pkg.ctypes_wrapper import f - -if __name__ == "__main__": - x = 3 - print("x={}, f(x)={}".format(x, f(x))) diff --git a/testprojects/src/python/python_distribution/ctypes_interop/setup.py b/testprojects/src/python/python_distribution/ctypes_interop/setup.py deleted file mode 100644 index b4b5488d952..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - - -setup( - name='ctypes_interop_test', - version='0.0.1', - packages=find_packages(), - # Declare two files at the top-level directory (denoted by ''). - data_files=[('', ['libsome-more-math.so', 'libwrapped-math.so'])], -) diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-math/BUILD b/testprojects/src/python/python_distribution/ctypes_interop/some-math/BUILD deleted file mode 100644 index bdaaae68059..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-math/BUILD +++ /dev/null @@ -1 +0,0 @@ -ctypes_compatible_c_library() diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.c b/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.c deleted file mode 100644 index a5a28435c49..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.c +++ /dev/null @@ -1,3 +0,0 @@ -#include "some_math.h" - -int add_two(int x) { return x + 2; } diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.h b/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.h deleted file mode 100644 index af394ad0504..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-math/some_math.h +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef __SOME_MATH_H__ -#define __SOME_MATH_H__ - -int add_two(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/BUILD b/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/BUILD deleted file mode 100644 index 97d0f7c2687..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/BUILD +++ /dev/null @@ -1,6 +0,0 @@ -ctypes_compatible_cpp_library( - dependencies=[ - 'testprojects/src/python/python_distribution/ctypes_interop/some-math', - ], - ctypes_native_library=native_artifact(lib_name='some-more-math'), -) diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.cpp b/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.cpp deleted file mode 100644 index ef15aff91df..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.cpp +++ /dev/null @@ -1,12 +0,0 @@ -#ifdef __cplusplus -extern "C" { -#endif -#include "some_math.h" -#ifdef __cplusplus -} -#endif -#include "some_more_math.hpp" - -int mangled_function(int x) { return add_two(x) ^ 3; } - -extern "C" int multiply_by_three(int x) { return mangled_function(x * 3); } diff --git a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.hpp b/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.hpp deleted file mode 100644 index bca3b0dd47a..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/some-more-math/some_more_math.hpp +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef __SOME_MORE_MATH_HPP__ -#define __SOME_MORE_MATH_HPP__ - -int mangled_function(int); - -#ifdef __cplusplus -extern "C" { -#endif - int multiply_by_three(int); -#ifdef __cplusplus -} -#endif - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/BUILD b/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/BUILD deleted file mode 100644 index a1174229b04..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/BUILD +++ /dev/null @@ -1,10 +0,0 @@ -ctypes_compatible_c_library( - dependencies=[ - 'testprojects/src/python/python_distribution/ctypes_interop/some-more-math', - ], - ctypes_native_library=native_artifact(lib_name='wrapped-math'), - # Turning on strict_deps brings the transitive closure of all the native dependencies into the - # compile and link tasks. In this case, this target also implicitly depends on the "some-math" C - # library, depended on by the "some-more-math" C++ library which we explicitly depend on above. - strict_deps=False, -) diff --git a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.c b/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.c deleted file mode 100644 index 16076563e41..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.c +++ /dev/null @@ -1,5 +0,0 @@ -#include "some_math.h" -#include "some_more_math.hpp" -#include "wrapped_math.h" - -int wrapped_function(int x) { return add_two(multiply_by_three(x)); }; diff --git a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.h b/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.h deleted file mode 100644 index f6854b82400..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_interop/wrapped-math/wrapped_math.h +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef __WRAPPED_MATH_H__ -#define __WRAPPED_MATH_H__ - -int wrapped_function(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/BUILD b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/BUILD deleted file mode 100644 index 55a5a13081e..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/BUILD +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -ctypes_compatible_cpp_library( - name='cpp_library', - sources=['some_more_math.hpp', 'some_more_math.cpp'], - ctypes_native_library=native_artifact(lib_name='asdf-cpp_ctypes-with-extra-compiler-flags'), - compiler_option_sets={'asdf'}, -) - -python_dist( - name="ctypes", - sources=[ - 'setup.py', - 'ctypes_python_pkg/__init__.py', - 'ctypes_python_pkg/ctypes_wrapper.py', - ], - dependencies=[ - ':cpp_library', - ], -) - -python_binary( - name='bin', - sources=['main.py'], - dependencies=[ - ':ctypes', - ], - platforms=['current'], -) diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/__init__.py b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/__init__.py b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/ctypes_wrapper.py b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/ctypes_wrapper.py deleted file mode 100644 index d44acec2b35..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/ctypes_python_pkg/ctypes_wrapper.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import ctypes -import os - - -def get_generated_shared_lib(lib_name): - # These are the same filenames as in setup.py. - filename = 'lib{}.so'.format(lib_name) - # The data files are in the root directory, but we are in ctypes_python_pkg/. - rel_path = os.path.join(os.path.dirname(__file__), '..', filename) - return os.path.normpath(rel_path) - - -asdf_cpp_lib_path = get_generated_shared_lib('asdf-cpp_ctypes-with-extra-compiler-flags') -asdf_cpp_lib = ctypes.CDLL(asdf_cpp_lib_path) - - -def f(x): - multiplied = asdf_cpp_lib.multiply_by_something(42) - return multiplied diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/main.py b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/main.py deleted file mode 100644 index deab79b8017..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from ctypes_python_pkg.ctypes_wrapper import f - -if __name__ == "__main__": - x = 3 - print("x={}, f(x)={}".format(x, f(x))) diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/setup.py b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/setup.py deleted file mode 100644 index 5cdbe826446..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/setup.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - - -setup( - name='ctypes_test', - version='0.0.1', - packages=find_packages(), - data_files=[('', ['libasdf-cpp_ctypes-with-extra-compiler-flags.so'])], -) diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.cpp b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.cpp deleted file mode 100644 index 575fda07e8f..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.cpp +++ /dev/null @@ -1,8 +0,0 @@ -#include "some_more_math.hpp" -#include - -int mangled_function(int x) { - return x * SOMETHING; -} - -extern "C" int multiply_by_something(int x) { return mangled_function(x * 3); } diff --git a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.hpp b/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.hpp deleted file mode 100644 index 20ac0ee43a8..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags/some_more_math.hpp +++ /dev/null @@ -1,18 +0,0 @@ -#ifndef __SOME_MORE_MATH_HPP__ -#define __SOME_MORE_MATH_HPP__ - -#ifdef _ASDF - #if _ASDF == 0 - #define SOMETHING 800000 - #else - #define SOMETHING 100000 - #endif -#else - #define SOMETHING -1 -#endif - -int mangled_function(int); - -extern "C" int multiply_by_something(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/BUILD b/testprojects/src/python/python_distribution/ctypes_with_third_party/BUILD deleted file mode 100644 index 2808a5134d7..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/BUILD +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -## Third party integration testing. - -ctypes_compatible_cpp_library( - name='cpp_library_with_third_party', - sources=['some_more_math.hpp', 'some_more_math_with_third_party.cpp'], - ctypes_native_library=native_artifact(lib_name='asdf-cpp_ctypes-with-third-party'), - dependencies=[':rang', ':cereal'], - fatal_warnings=False -) - -python_dist( - name='python_dist_with_third_party_cpp', - sources=[ - 'setup.py', - 'ctypes_python_pkg/__init__.py', - 'ctypes_python_pkg/ctypes_wrapper.py', - ], - dependencies=[ - ':cpp_library_with_third_party', - ], -) - -python_binary( - name='bin_with_third_party', - sources=['main.py'], - dependencies=[ - ':python_dist_with_third_party_cpp', - ], -) - -external_native_library( - name='rang', - packages=[ - conan_requirement('rang/3.1.0@rang/stable'), - ], -) - -external_native_library( - name='cereal', - packages=[ - conan_requirement('cereal/1.2.2@conan/stable'), - ], -) diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/__init__.py b/testprojects/src/python/python_distribution/ctypes_with_third_party/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/__init__.py b/testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/ctypes_wrapper.py b/testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/ctypes_wrapper.py deleted file mode 100644 index 26e6221b587..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/ctypes_python_pkg/ctypes_wrapper.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import ctypes -import os - - -def get_generated_shared_lib(lib_name): - # These are the same filenames as in setup.py. - filename = 'lib{}.so'.format(lib_name) - # The data files are in the root directory, but we are in ctypes_python_pkg/. - rel_path = os.path.join(os.path.dirname(__file__), '..', filename) - return os.path.normpath(rel_path) - - -asdf_cpp_lib_path = get_generated_shared_lib('asdf-cpp_ctypes-with-third-party') -asdf_cpp_lib = ctypes.CDLL(asdf_cpp_lib_path) - - -def f(x): - added = x + 3 - multiplied = asdf_cpp_lib.multiply_by_three(added) - return multiplied diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/main.py b/testprojects/src/python/python_distribution/ctypes_with_third_party/main.py deleted file mode 100644 index deab79b8017..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from ctypes_python_pkg.ctypes_wrapper import f - -if __name__ == "__main__": - x = 3 - print("x={}, f(x)={}".format(x, f(x))) diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/setup.py b/testprojects/src/python/python_distribution/ctypes_with_third_party/setup.py deleted file mode 100644 index ff8edd588e1..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - - -setup( - name='ctypes_third_party_test', - version='0.0.1', - packages=find_packages(), - # Declare one shared lib at the top-level directory (denoted by ''). - data_files=[('', ['libasdf-cpp_ctypes-with-third-party.so'])], -) diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math.hpp b/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math.hpp deleted file mode 100644 index 31953f8b9ba..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math.hpp +++ /dev/null @@ -1,8 +0,0 @@ -#ifndef __SOME_MORE_MATH_HPP__ -#define __SOME_MORE_MATH_HPP__ - -int mangled_function(int); - -extern "C" int multiply_by_three(int); - -#endif diff --git a/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math_with_third_party.cpp b/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math_with_third_party.cpp deleted file mode 100644 index de08b7dd742..00000000000 --- a/testprojects/src/python/python_distribution/ctypes_with_third_party/some_more_math_with_third_party.cpp +++ /dev/null @@ -1,65 +0,0 @@ -#include "some_more_math.hpp" - -/* A simple C++11 header-only library for integration testing */ -#include "rang.hpp" - -/** -A C++11 library for integration testing that contains a library archive (.dylib/.so) -in addition to headers. - -This snippet is taken from the README at https://github.com/USCiLab/cereal. - */ -#include -#include -#include -#include - -struct MyRecord -{ - uint8_t x = 1; - uint8_t y = 2; - float z; - - template - void serialize( Archive & ar ) - { - ar( x, y, z ); - } -}; - -struct SomeData -{ - int32_t id; - int data = 3; - - template - void save( Archive & ar ) const - { - ar( data ); - } - - template - void load( Archive & ar ) - { - static int32_t idGen = 0; - id = idGen++; - ar( data ); - } -}; - - -int mangled_function(int x) { - - // cereal testing - MyRecord myRecord; - SomeData myData; - - // rang testing - std::cout << "Testing 3rdparty C++..." - << rang::style::bold << "Test worked!" - << rang::style::reset << std::endl; - - return x ^ 3; -} - -extern "C" int multiply_by_three(int x) { return mangled_function(x * 3); } diff --git a/testprojects/src/python/python_distribution/hello_with_install_requires/BUILD b/testprojects/src/python/python_distribution/hello_with_install_requires/BUILD deleted file mode 100644 index 459ef4c84ed..00000000000 --- a/testprojects/src/python/python_distribution/hello_with_install_requires/BUILD +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_dist( - sources=[ - 'hello_package/hello.py', - 'hello_package/__init__.py', - 'setup.py' - ] -) - -python_binary( - name='main_with_no_conflict', - sources=['main.py'], - dependencies=[ - ':hello_with_install_requires', - ], - platforms=['current'] -) diff --git a/testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/__init__.py b/testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/hello.py b/testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/hello.py deleted file mode 100644 index 75698dfa3e0..00000000000 --- a/testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/hello.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - - -def hello_string(): - return 'hello!' - - -def hello(): - print(hello_string()) diff --git a/testprojects/src/python/python_distribution/hello_with_install_requires/main.py b/testprojects/src/python/python_distribution/hello_with_install_requires/main.py deleted file mode 100644 index 18a4a61e335..00000000000 --- a/testprojects/src/python/python_distribution/hello_with_install_requires/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import pycountry -from hello_package import hello - -if __name__ == "__main__": - hello.hello() - print(pycountry.countries.get(alpha_2="US").name) diff --git a/testprojects/src/python/python_distribution/hello_with_install_requires/setup.py b/testprojects/src/python/python_distribution/hello_with_install_requires/setup.py deleted file mode 100644 index 25a69191d20..00000000000 --- a/testprojects/src/python/python_distribution/hello_with_install_requires/setup.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from setuptools import find_packages, setup - - -setup( - name='hello_with_install_requires', - version='1.0.0', - packages=find_packages(), - install_requires=['pycountry==17.1.2'] -) diff --git a/testprojects/src/python/python_distribution/setup_requires/BUILD b/testprojects/src/python/python_distribution/setup_requires/BUILD deleted file mode 100644 index 96e26698b08..00000000000 --- a/testprojects/src/python/python_distribution/setup_requires/BUILD +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_dist( - name='checksummed_version_dist', - setup_requires=[ - # NB: This particular requirement is selected because it is unlikely to already be available to - # the python interpreter. - 'testprojects/3rdparty/python:checksumdir', - ], -) - -# NB: The default `sources` for this target is 'test_*.py', so its default sources don't intersect -# with the `python_dist()`'s default sources. -python_tests( - name='tests', - dependencies=[ - ':checksummed_version_dist', - ], -) diff --git a/testprojects/src/python/python_distribution/setup_requires/__init__.py b/testprojects/src/python/python_distribution/setup_requires/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/python/python_distribution/setup_requires/setup.py b/testprojects/src/python/python_distribution/setup_requires/setup.py deleted file mode 100644 index 8ee1e8becb7..00000000000 --- a/testprojects/src/python/python_distribution/setup_requires/setup.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from pathlib import Path -from setuptools import find_packages, setup - -# NB: In this example project, we import a module added via `setup_requires` in our BUILD file, and -# make some modifications to the `setup.py` project. These get picked up and tested in -# `test_setup_requires.py`. -from checksumdir import dirhash - -this_dir_hash = dirhash('.', 'sha256') - -checksum_module_dir = Path('checksum') -checksum_module_dir.mkdir() -checksum_module_dir.joinpath('__init__.py').write_text(f"""\ -checksum = '{this_dir_hash}' -""") - -setup( - name='checksummed_version_dist', - version='0.0.1', - packages=find_packages(), -) diff --git a/testprojects/src/python/python_distribution/setup_requires/test_setup_requires.py b/testprojects/src/python/python_distribution/setup_requires/test_setup_requires.py deleted file mode 100644 index 891b7ea4d55..00000000000 --- a/testprojects/src/python/python_distribution/setup_requires/test_setup_requires.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from checksum import checksum - - -# TODO: validate the checksum's value itself? -def test_dist_version(): - assert checksum is not None - assert isinstance(checksum, str) - assert len(checksum) == 64 diff --git a/testprojects/tests/python/BUILD b/testprojects/tests/python/BUILD index f880aa37c44..11a52423502 100644 --- a/testprojects/tests/python/BUILD +++ b/testprojects/tests/python/BUILD @@ -9,14 +9,6 @@ target( ], ) -files( - name = 'example_test_directory', - sources = ['example_test/**/*'], - dependencies = [ - 'testprojects/src/python:python_distribution_directory', - ], -) - files( name = 'owners_integration_target', sources = ['pants/BUILD'], diff --git a/testprojects/tests/python/example_test/python_distribution/BUILD b/testprojects/tests/python/example_test/python_distribution/BUILD deleted file mode 100644 index d118ca44311..00000000000 --- a/testprojects/tests/python/example_test/python_distribution/BUILD +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_tests( - dependencies=[ - 'testprojects/src/python/python_distribution/hello_with_install_requires', - ], -) diff --git a/testprojects/tests/python/example_test/python_distribution/test_hello.py b/testprojects/tests/python/example_test/python_distribution/test_hello.py deleted file mode 100644 index 27e5fea4cb0..00000000000 --- a/testprojects/tests/python/example_test/python_distribution/test_hello.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import unittest - -from hello_package import hello - - -class HelloTest(unittest.TestCase): - def test_hello_import(self): - self.assertEqual("hello!", hello.hello_string()) From 357d0a581358ec5bf2c4326b14f78221223cc354 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Fri, 26 Jun 2020 20:17:31 -0700 Subject: [PATCH 2/4] Fix config file # Rust tests will be skipped. Delete if not intended. [ci skip-rust-tests] --- build-support/bin/release.sh | 1 - pants.toml | 1 - pants.travis-ci.toml | 4 ---- 3 files changed, 6 deletions(-) diff --git a/build-support/bin/release.sh b/build-support/bin/release.sh index fc3a9dbe756..539dd57bc92 100755 --- a/build-support/bin/release.sh +++ b/build-support/bin/release.sh @@ -150,7 +150,6 @@ function execute_packaged_pants_with_internal_backends() { --pythonpath="['pants-plugins/src/python']" \ --backend-packages="[\ 'pants.backend.jvm',\ - 'pants.backend.native',\ 'pants.backend.python',\ 'pants.cache',\ 'internal_backend.repositories',\ diff --git a/pants.toml b/pants.toml index ea999b13fd4..bae4aad7b53 100644 --- a/pants.toml +++ b/pants.toml @@ -29,7 +29,6 @@ backend_packages.add = [ ] backend_packages2.add = [ "pants.backend.awslambda.python", - "pants.backend.native", "pants.backend.python", "pants.backend.python.lint.black", "pants.backend.python.lint.docformatter", diff --git a/pants.travis-ci.toml b/pants.travis-ci.toml index c7e7d8e505c..a378274dcff 100644 --- a/pants.travis-ci.toml +++ b/pants.travis-ci.toml @@ -28,7 +28,3 @@ timeout_maximum = 590 [pytest] # NB: We don't set a max test timeout for v2, because tests run concurrently. - -[libc] -# Currently, we only need to search for a libc installation to test the native toolchain. -enable_libc_search = true From c8a11eec07969a3c592be1931eb9dd50318b49d2 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Fri, 26 Jun 2020 20:43:03 -0700 Subject: [PATCH 3/4] Remove tensorflow test folder # Rust tests will be skipped. Delete if not intended. [ci skip-rust-tests] --- examples/tests/python/example_test/BUILD | 10 ---------- .../python/example_test/tensorflow_custom_op/BUILD | 9 --------- .../tensorflow_custom_op/test_zero_out_op.py | 14 -------------- 3 files changed, 33 deletions(-) delete mode 100644 examples/tests/python/example_test/tensorflow_custom_op/BUILD delete mode 100644 examples/tests/python/example_test/tensorflow_custom_op/test_zero_out_op.py diff --git a/examples/tests/python/example_test/BUILD b/examples/tests/python/example_test/BUILD index da5c6bd4ec6..57520ea29de 100644 --- a/examples/tests/python/example_test/BUILD +++ b/examples/tests/python/example_test/BUILD @@ -5,7 +5,6 @@ target( name = 'all_directories', dependencies = [ ':hello_directory', - ':tensorflow_custom_op_directory', ], ) @@ -13,12 +12,3 @@ files( name = 'hello_directory', sources = ['hello/**/*'], ) - -files( - name = 'tensorflow_custom_op_directory', - sources = ['tensorflow_custom_op/**/*'], - dependencies = [ - 'examples/3rdparty:python_directory', - 'examples/src/python/example:tensorflow_custom_op_directory', - ], -) diff --git a/examples/tests/python/example_test/tensorflow_custom_op/BUILD b/examples/tests/python/example_test/tensorflow_custom_op/BUILD deleted file mode 100644 index 37599c4bedb..00000000000 --- a/examples/tests/python/example_test/tensorflow_custom_op/BUILD +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -python_tests( - dependencies=[ - 'examples/3rdparty/python:tensorflow', - 'examples/src/python/example/tensorflow_custom_op', - ], -) diff --git a/examples/tests/python/example_test/tensorflow_custom_op/test_zero_out_op.py b/examples/tests/python/example_test/tensorflow_custom_op/test_zero_out_op.py deleted file mode 100644 index 9da8a19984e..00000000000 --- a/examples/tests/python/example_test/tensorflow_custom_op/test_zero_out_op.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import tensorflow as tf - -from example.tensorflow_custom_op.zero_out_custom_op import zero_out_module - - -# This code is from the guide in https://www.tensorflow.org/guide/extend/op. -class ZeroOutTest(tf.test.TestCase): - def test_zero_out(self): - with self.cached_session(): - result = zero_out_module().zero_out([5, 4, 3, 2, 1]) - self.assertAllEqual(result.eval(), [5, 0, 0, 0, 0]) From 6cd0bdc08f75b338efc2c05ebe388c50d8c362e9 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Mon, 29 Jun 2020 11:17:00 -0700 Subject: [PATCH 4/4] Remove v1 ipex because it depends on v1 backend/native --- src/python/pants/backend/python/register.py | 2 - src/python/pants/backend/python/rules/BUILD | 1 - .../backend/python/subsystems/ipex/BUILD | 7 - .../python/subsystems/ipex/ipex_launcher.py | 106 --------- .../python/tasks/python_binary_create.py | 224 ------------------ src/python/pants/python/BUILD | 1 - src/python/pants/python/pex_build_util.py | 129 +--------- .../python/pex_build_util_test_integration.py | 50 ---- 8 files changed, 3 insertions(+), 517 deletions(-) delete mode 100644 src/python/pants/backend/python/subsystems/ipex/BUILD delete mode 100644 src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py delete mode 100644 src/python/pants/backend/python/tasks/python_binary_create.py delete mode 100644 src/python/pants/python/pex_build_util_test_integration.py diff --git a/src/python/pants/backend/python/register.py b/src/python/pants/backend/python/register.py index 3ea3f95e68b..e53a5d92b25 100644 --- a/src/python/pants/backend/python/register.py +++ b/src/python/pants/backend/python/register.py @@ -54,7 +54,6 @@ ) from pants.backend.python.tasks.pytest_prep import PytestPrep from pants.backend.python.tasks.pytest_run import PytestRun -from pants.backend.python.tasks.python_binary_create import PythonBinaryCreate from pants.backend.python.tasks.python_bundle import PythonBundle from pants.backend.python.tasks.python_repl import PythonRepl from pants.backend.python.tasks.python_run import PythonRun @@ -109,7 +108,6 @@ def register_goals(): task(name="pytest", action=PytestRun).install("test") task(name="py", action=PythonRepl).install("repl") task(name="setup-py", action=SetupPy).install() - task(name="py", action=PythonBinaryCreate).install("binary") task(name="py-wheels", action=LocalPythonDistributionArtifact).install("binary") task(name="py", action=PythonBundle).install("bundle") task(name="unpack-wheels", action=UnpackWheels).install() diff --git a/src/python/pants/backend/python/rules/BUILD b/src/python/pants/backend/python/rules/BUILD index c5e52c70dd3..65d3ba2d277 100644 --- a/src/python/pants/backend/python/rules/BUILD +++ b/src/python/pants/backend/python/rules/BUILD @@ -8,7 +8,6 @@ python_library( 'src/python/pants/backend/python:target_types', 'src/python/pants/backend/python/rules/coverage_plugin:plugin', 'src/python/pants/backend/python/subsystems', - 'src/python/pants/backend/python/subsystems/ipex', 'src/python/pants/build_graph', 'src/python/pants/core', 'src/python/pants/core/goals', diff --git a/src/python/pants/backend/python/subsystems/ipex/BUILD b/src/python/pants/backend/python/subsystems/ipex/BUILD deleted file mode 100644 index cebac0e31a1..00000000000 --- a/src/python/pants/backend/python/subsystems/ipex/BUILD +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -# NB: This target is written into an .ipex file as the main script, and should not have any -# dependencies on another python code! .ipex files should always contain pex and setuptools -# requirements in order to run the main script! -python_library() diff --git a/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py b/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py deleted file mode 100644 index 1e24aa164e4..00000000000 --- a/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -"""Entrypoint script for a "dehydrated" .ipex file generated with --generate-ipex. - -This script will "hydrate" a normal .pex file in the same directory, then execute it. -""" - -import json -import os -import sys -import tempfile - -from pex import resolver -from pex.common import open_zip -from pex.interpreter import PythonInterpreter -from pex.pex_builder import PEXBuilder -from pex.pex_info import PexInfo - -APP_CODE_PREFIX = "user_files/" - - -def _strip_app_code_prefix(path): - if not path.startswith(APP_CODE_PREFIX): - raise ValueError( - "Path {path} in IPEX-INFO did not begin with '{APP_CODE_PREFIX}'.".format( - path=path, APP_CODE_PREFIX=APP_CODE_PREFIX - ) - ) - return path[len(APP_CODE_PREFIX) :] - - -def _log(message): - sys.stderr.write(message + "\n") - - -def modify_pex_info(pex_info, **kwargs): - new_info = json.loads(pex_info.dump()) - new_info.update(kwargs) - return PexInfo.from_json(json.dumps(new_info)) - - -def _hydrate_pex_file(self, hydrated_pex_file): - # We extract source files into a temporary directory before creating the pex. - td = tempfile.mkdtemp() - - with open_zip(self) as zf: - # Populate the pex with the pinned requirements and distribution names & hashes. - bootstrap_info = PexInfo.from_json(zf.read("BOOTSTRAP-PEX-INFO")) - bootstrap_builder = PEXBuilder(pex_info=bootstrap_info, interpreter=PythonInterpreter.get()) - - # Populate the pex with the needed code. - try: - ipex_info = json.loads(zf.read("IPEX-INFO").decode("utf-8")) - for path in ipex_info["code"]: - unzipped_source = zf.extract(path, td) - bootstrap_builder.add_source( - unzipped_source, env_filename=_strip_app_code_prefix(path) - ) - except Exception as e: - raise ValueError( - "Error: {e}. The IPEX-INFO for this .ipex file was:\n{info}".format( - e=e, info=json.dumps(ipex_info, indent=4) - ) - ) - - # Perform a fully pinned intransitive resolve to hydrate the install cache. - resolver_settings = ipex_info["resolver_settings"] - - resolved_distributions = resolver.resolve( - requirements=bootstrap_info.requirements, - cache=bootstrap_info.pex_root, - platform="current", - transitive=False, - interpreter=bootstrap_builder.interpreter, - **resolver_settings - ) - # TODO: this shouldn't be necessary, as we should be able to use the same 'distributions' from - # BOOTSTRAP-PEX-INFO. When the .ipex is executed, the normal pex bootstrap fails to see these - # requirements or recognize that they should be pulled from the cache for some reason. - for resolved_dist in resolved_distributions: - bootstrap_builder.add_distribution(resolved_dist.distribution) - - bootstrap_builder.build(hydrated_pex_file, bytecode_compile=False) - - -def main(self): - filename_base, ext = os.path.splitext(self) - - # If the ipex (this pex) is already named '.pex', ensure the output filename doesn't collide by - # inserting an intermediate '.ipex'! - if ext == ".pex": - hydrated_pex_file = "{filename_base}.ipex.pex".format(filename_base=filename_base) - else: - hydrated_pex_file = "{filename_base}.pex".format(filename_base=filename_base) - - if not os.path.exists(hydrated_pex_file): - _log("Hydrating {} to {}...".format(self, hydrated_pex_file)) - _hydrate_pex_file(self, hydrated_pex_file) - - os.execv(sys.executable, [sys.executable, hydrated_pex_file] + sys.argv[1:]) - - -if __name__ == "__main__": - self = sys.argv[0] - main(self) diff --git a/src/python/pants/backend/python/tasks/python_binary_create.py b/src/python/pants/backend/python/tasks/python_binary_create.py deleted file mode 100644 index 2c5b9d78d9d..00000000000 --- a/src/python/pants/backend/python/tasks/python_binary_create.py +++ /dev/null @@ -1,224 +0,0 @@ -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import os -from typing import cast - -from pex.interpreter import PythonInterpreter -from pex.pex_builder import PEXBuilder -from pex.pex_info import PexInfo - -from pants.backend.python.subsystems.python_native_code import PythonNativeCode -from pants.backend.python.targets.python_binary import PythonBinary -from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary -from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError -from pants.build_graph.target_scopes import Scopes -from pants.python.pex_build_util import ( - PexBuilderWrapper, - has_python_requirements, - has_python_sources, - has_resources, - is_python_target, -) -from pants.task.task import Task -from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_mkdir_for -from pants.util.fileutil import atomic_copy -from pants.util.memo import memoized_property - - -class PythonBinaryCreate(Task): - """Create an executable .pex file.""" - - @classmethod - def register_options(cls, register): - super().register_options(register) - register( - "--include-run-information", - type=bool, - default=False, - help="Include run information in the PEX's PEX-INFO for information like the timestamp the PEX was " - "created and the command line used to create it. This information may be helpful to you, but means " - "that the generated PEX will not be reproducible; that is, future runs of `./pants binary` will not " - "create the same byte-for-byte identical .pex files.", - ) - register( - "--generate-ipex", - type=bool, - default=False, - fingerprint=True, - help='Whether to generate a .ipex file, which will "hydrate" its dependencies when ' - "it is first executed, rather than at build time (the normal pex behavior). " - "This option can reduce the size of a shipped pex file by over 100x for common" - "deps such as tensorflow, but it does require access to the network when " - "first executed.", - ) - register( - "--output-file-extension", - type=str, - default=None, - fingerprint=True, - help="What extension to output the file with. This can be used to differentiate " - "ipex files from others.", - ) - - @classmethod - def subsystem_dependencies(cls): - return super().subsystem_dependencies() + ( - PexBuilderWrapper.Factory, - PythonNativeCode.scoped(cls), - ) - - @memoized_property - def _python_native_code_settings(self): - return PythonNativeCode.scoped_instance(self) - - @classmethod - def product_types(cls): - return ["pex_archives", "deployable_archives"] - - @classmethod - def implementation_version(cls): - return super().implementation_version() + [("PythonBinaryCreate", 2)] - - @property - def cache_target_dirs(self): - return True - - @classmethod - def prepare(cls, options, round_manager): - # See comment below for why we don't use the GatherSources.PYTHON_SOURCES product. - round_manager.require_data(PythonInterpreter) - round_manager.optional_data("python") # For codegen. - round_manager.optional_product(PythonRequirementLibrary) # For local dists. - - @staticmethod - def is_binary(target): - return isinstance(target, PythonBinary) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._distdir = self.get_options().pants_distdir - - @property - def _generate_ipex(self) -> bool: - return cast(bool, self.get_options().generate_ipex) - - def _get_output_pex_filename(self, target_name): - file_ext = self.get_options().output_file_extension - if file_ext is None: - file_ext = ".ipex" if self._generate_ipex else ".pex" - - return f"{target_name}{file_ext}" - - def execute(self): - binaries = self.context.targets(self.is_binary) - - # Check for duplicate binary names, since we write the pexes to /.pex. - names = {} - for binary in binaries: - name = binary.name - if name in names: - raise TaskError( - "Cannot build two binaries with the same name in a single invocation. " - f"{binary} and {names[name]} both have the name {name}." - ) - names[name] = binary - - with self.invalidated(binaries, invalidate_dependents=True) as invalidation_check: - python_deployable_archive = self.context.products.get("deployable_archives") - python_pex_product = self.context.products.get("pex_archives") - for vt in invalidation_check.all_vts: - pex_path = os.path.join( - vt.results_dir, self._get_output_pex_filename(vt.target.name) - ) - if not vt.valid: - self.context.log.debug(f"cache for {vt.target} is invalid, rebuilding") - self._create_binary(vt.target, vt.results_dir) - else: - self.context.log.debug(f"using cache for {vt.target}") - - basename = os.path.basename(pex_path) - python_pex_product.add(vt.target, os.path.dirname(pex_path)).append(basename) - python_deployable_archive.add(vt.target, os.path.dirname(pex_path)).append(basename) - self.context.log.debug( - "created {}".format(os.path.relpath(pex_path, get_buildroot())) - ) - - # Create a copy for pex. - pex_copy = os.path.join(self._distdir, os.path.basename(pex_path)) - safe_mkdir_for(pex_copy) - atomic_copy(pex_path, pex_copy) - self.context.log.info( - "created pex {}".format(os.path.relpath(pex_copy, get_buildroot())) - ) - - def _create_binary(self, binary_tgt, results_dir): - """Create a .pex file for the specified binary target.""" - # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX - # and PYTHON_SOURCES products, because those products are already-built pexes, and there's - # no easy way to merge them into a single pex file (for example, they each have a __main__.py, - # metadata, and so on, which the merging code would have to handle specially). - interpreter = self.context.products.get_data(PythonInterpreter) - with temporary_dir() as tmpdir: - # Create the pex_info for the binary. - build_properties = PexInfo.make_build_properties() - if self.get_options().include_run_information: - run_info_dict = self.context.run_tracker.run_info.get_as_dict() - build_properties.update(run_info_dict) - pex_info = binary_tgt.pexinfo.copy() - pex_info.build_properties = build_properties - - pex_builder = PexBuilderWrapper.Factory.create( - builder=PEXBuilder( - path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True - ), - log=self.context.log, - generate_ipex=self._generate_ipex, - ) - - if binary_tgt.shebang: - self.context.log.info( - "Found Python binary target {} with customized shebang, using it: {}".format( - binary_tgt.name, binary_tgt.shebang - ) - ) - pex_builder.set_shebang(binary_tgt.shebang) - else: - self.context.log.debug(f"No customized shebang found for {binary_tgt.name}") - - # Find which targets provide sources and which specify requirements. - source_tgts = [] - req_tgts = [] - constraint_tgts = [] - for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): - if has_python_sources(tgt) or has_resources(tgt): - source_tgts.append(tgt) - elif has_python_requirements(tgt): - req_tgts.append(tgt) - if is_python_target(tgt): - constraint_tgts.append(tgt) - - # Add interpreter compatibility constraints to pex info. Note that we only add the constraints for the final - # binary target itself, not its dependencies. The upstream interpreter selection tasks will already validate that - # there are no compatibility conflicts among the dependencies and target. If the binary target does not have - # `compatibility` in its BUILD entry, the global --python-setup-interpreter-constraints will be used. - pex_builder.add_interpreter_constraints_from([binary_tgt]) - - # Dump everything into the builder's chroot. - for tgt in source_tgts: - pex_builder.add_sources_from(tgt) - - # We need to ensure that we are resolving for only the current platform if we are - # including local python dist targets that have native extensions. - self._python_native_code_settings.check_build_for_current_platform_only( - self.context.targets() - ) - pex_builder.add_requirement_libs_from(req_tgts, platforms=binary_tgt.platforms) - - # Build the .pex file. - pex_filename = self._get_output_pex_filename(binary_tgt.name) - pex_path = os.path.join(results_dir, pex_filename) - pex_builder.build(pex_path) - return pex_path diff --git a/src/python/pants/python/BUILD b/src/python/pants/python/BUILD index 67a0b9e4e8d..59ca9c15a6a 100644 --- a/src/python/pants/python/BUILD +++ b/src/python/pants/python/BUILD @@ -8,7 +8,6 @@ python_library( '3rdparty/python:setuptools', 'src/python/pants/base:build_environment', 'src/python/pants/base:exceptions', - 'src/python/pants/backend/python/subsystems/ipex', 'src/python/pants/build_graph', 'src/python/pants/option', 'src/python/pants/subsystem', diff --git a/src/python/pants/python/pex_build_util.py b/src/python/pants/python/pex_build_util.py index d98406267f9..9a38785749b 100644 --- a/src/python/pants/python/pex_build_util.py +++ b/src/python/pants/python/pex_build_util.py @@ -1,23 +1,20 @@ # Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -import json import logging import os from collections import defaultdict from pathlib import Path -from typing import Callable, Dict, List, Optional, Sequence, Set, Tuple +from typing import Callable, Dict, List, Optional, Sequence, Set -from pex.interpreter import PythonIdentity, PythonInterpreter +from pex.interpreter import PythonInterpreter from pex.pex_builder import PEXBuilder -from pex.pex_info import PexInfo from pex.platforms import Platform from pex.resolver import resolve from pex.util import DistributionHelper from pex.version import __version__ as pex_version -from pkg_resources import Distribution, get_provider +from pkg_resources import Distribution -from pants.backend.python.subsystems.ipex import ipex_launcher from pants.backend.python.targets.python_binary import PythonBinary from pants.backend.python.targets.python_distribution import PythonDistribution from pants.backend.python.targets.python_library import PythonLibrary @@ -27,7 +24,6 @@ from pants.base.exceptions import TaskError from pants.build_graph.files import Files from pants.build_graph.target import Target -from pants.option.custom_types import UnsetBool from pants.python.python_repos import PythonRepos from pants.python.python_requirement import PythonRequirement from pants.python.python_setup import PythonSetup @@ -35,7 +31,6 @@ from pants.util.collections import assert_single_element from pants.util.contextutil import temporary_file from pants.util.ordered_set import FrozenOrderedSet, OrderedSet -from pants.util.strutil import module_dirname def is_python_target(tgt: Target) -> bool: @@ -417,121 +412,6 @@ def _prepare_inits(self) -> Set[str]: def set_emit_warnings(self, emit_warnings): self._builder.info.emit_warnings = emit_warnings - def _set_major_minor_interpreter_constraint_for_ipex( - self, info: PexInfo, identity: PythonIdentity, - ) -> PexInfo: - interpreter_name = identity.requirement.name - major, minor, _patch = identity.version - major_minor_only_constraint = f"{interpreter_name}=={major}.{minor}.*" - return ipex_launcher.modify_pex_info( - info, interpreter_constraints=[str(major_minor_only_constraint)] - ) - - def _shuffle_underlying_pex_builder(self) -> Tuple[PexInfo, Path]: - """Replace the original builder with a new one, and just pull files from the old chroot.""" - # Ensure that (the interpreter selected to resolve requirements when the ipex is first run) is - # (the exact same interpreter we used to resolve those requirements here). This is the only (?) - # way to ensure that the ipex bootstrap uses the *exact* same interpreter version. - self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex( - self._builder.info, self._builder.interpreter.identity - ) - - # Remove all the original top-level requirements in favor of the transitive == requirements. - self._builder.info = ipex_launcher.modify_pex_info(self._builder.info, requirements=[]) - transitive_reqs = [dist.as_requirement() for dist in self._distributions.values()] - self.add_direct_requirements(transitive_reqs) - - orig_info = self._builder.info.copy() - - orig_chroot = self._builder.chroot() - - # Mutate the PexBuilder object which is manipulated by this subsystem. - self._builder = PEXBuilder(interpreter=self._builder.interpreter) - self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex( - self._builder.info, self._builder.interpreter.identity - ) - - self._distributions = {} - - return (orig_info, Path(orig_chroot.path())) - - def _shuffle_original_build_info_into_ipex(self): - """Create a "dehydrated" ipex file without any of its requirements, and specify that in two. - - *-INFO files. - - See ipex_launcher.py for details of how these files are used. - """ - orig_pex_info, orig_chroot = self._shuffle_underlying_pex_builder() - - # Gather information needed to create IPEX-INFO. - all_code = [str(src) for src in self._all_added_sources_resources] - prefixed_code_paths = [os.path.join(ipex_launcher.APP_CODE_PREFIX, src) for src in all_code] - for src, prefixed in zip(all_code, prefixed_code_paths): - # NB: Need to add under 'source' label for `self._prepare_inits()` to pick it up! - self._builder.chroot().copy( - os.path.join(str(orig_chroot), src), prefixed, label="source" - ) - - python_repos = self._python_repos_subsystem - python_setup = self._python_setup_subsystem - - # NB: self._all_find_links is updated on every call to self._resolve_multi(), and therefore - # includes all of the links from python_repos.repos, as well as any links added within any - # individual requirements from that resolve. - - resolver_settings = dict( - indexes=list(python_repos.indexes), - find_links=list(self._all_find_links), - allow_prereleases=UnsetBool.coerce_bool( - python_setup.resolver_allow_prereleases, default=True - ), - manylinux=python_setup.manylinux, - ) - - # IPEX-INFO: A json mapping interpreted in ipex_launcher.py: - # { - # "code": [], - # "resolver_settings": {}, - # } - ipex_info = dict(code=prefixed_code_paths, resolver_settings=resolver_settings,) - with temporary_file(permissions=0o644) as ipex_info_file: - ipex_info_file.write(json.dumps(ipex_info).encode()) - ipex_info_file.flush() - self._builder.add_resource(filename=ipex_info_file.name, env_filename="IPEX-INFO") - - # BOOTSTRAP-PEX-INFO: The original PEX-INFO, which should be the PEX-INFO in the hydrated .pex - # file that is generated when the .ipex is first executed. - with temporary_file(permissions=0o644) as bootstrap_pex_info_file: - bootstrap_pex_info_file.write(orig_pex_info.dump().encode()) - bootstrap_pex_info_file.flush() - self._builder.add_resource( - filename=bootstrap_pex_info_file.name, env_filename="BOOTSTRAP-PEX-INFO" - ) - - # ipex.py: The special bootstrap script to hydrate the .ipex with the fully resolved - # requirements when it is first executed. - # Extract the file contents of our custom app launcher script from the pants package. - parent_module = module_dirname(module_dirname(ipex_launcher.__name__)) - ipex_launcher_provider = get_provider(parent_module) - ipex_launcher_script = ipex_launcher_provider.get_resource_string( - parent_module, "ipex/ipex_launcher.py" - ) - with temporary_file(permissions=0o644) as ipex_launcher_file: - ipex_launcher_file.write(ipex_launcher_script) - ipex_launcher_file.flush() - # Our .ipex file will use our custom app launcher! - self._builder.set_executable(ipex_launcher_file.name, env_filename="ipex.py") - - # The PEX-INFO we generate shouldn't have any requirements (except pex itself), or they will - # fail to bootstrap because they were unable to find those distributions. Instead, the .pex file - # produced when the .ipex is first executed will read and resolve all those requirements from - # the BOOTSTRAP-PEX-INFO. - self.add_resolved_requirements( - [self._pex_requirement, self._setuptools_requirement], - override_ipex_build_do_actually_add_distribution=True, - ) - def freeze(self) -> None: if self._frozen: return @@ -541,9 +421,6 @@ def freeze(self) -> None: if not dist: self.add_resolved_requirements([self._setuptools_requirement]) - if self._generate_ipex: - self._shuffle_original_build_info_into_ipex() - self._builder.freeze(bytecode_compile=False) self._frozen = True diff --git a/src/python/pants/python/pex_build_util_test_integration.py b/src/python/pants/python/pex_build_util_test_integration.py deleted file mode 100644 index 451b601f305..00000000000 --- a/src/python/pants/python/pex_build_util_test_integration.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -import json -import os -import subprocess - -from pex.interpreter import PythonInterpreter - -from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest -from pants.util.collections import assert_single_element -from pants.util.contextutil import open_zip, temporary_dir - - -class PexBuildUtilIntegrationTest(PantsRunIntegrationTest): - - binary_target_address = "testprojects/src/python/python_targets:test" - - def test_ipex_gets_imprecise_constraint(self) -> None: - cur_interpreter_id = PythonInterpreter.get().identity - interpreter_name = cur_interpreter_id.requirement.name - major, minor, patch = cur_interpreter_id.version - - # Pin the selected interpreter to the one used by pants to execute this test. - cur_interpreter_constraint = f"{interpreter_name}=={major}.{minor}.{patch}" - - # Validate the the .ipex file specifically matches the major and minor versions, but allows - # any patch version. - imprecise_constraint = f"{interpreter_name}=={major}.{minor}.*" - - with temporary_dir() as tmp_dir: - self.do_command( - "--binary-py-generate-ipex", - "binary", - self.binary_target_address, - config={ - "GLOBAL": {"pants_distdir": tmp_dir}, - "python-setup": {"interpreter_constraints": [cur_interpreter_constraint]}, - }, - ) - - pex_path = os.path.join(tmp_dir, "test.ipex") - assert os.path.isfile(pex_path) - pex_execution_result = subprocess.run([pex_path], stdout=subprocess.PIPE, check=True) - assert pex_execution_result.stdout.decode() == "test!\n" - - with open_zip(pex_path) as zf: - info = json.loads(zf.read("PEX-INFO")) - constraint = assert_single_element(info["interpreter_constraints"]) - assert constraint == imprecise_constraint