Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

python312Packages.tensorflow: cleanup dependencies #391992

Merged
merged 3 commits into from
Mar 22, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
115 changes: 47 additions & 68 deletions pkgs/development/python-modules/tensorflow/bin.nix
Original file line number Diff line number Diff line change
@@ -1,40 +1,44 @@
{
stdenv,
lib,
fetchurl,
stdenv,
buildPythonPackage,
isPy3k,
astor,
fetchurl,

# buildInputs
llvmPackages,

# build-system
distutils,

# dependencies
ml-dtypes,
absl-py,
astunparse,
flatbuffers,
gast,
google-pasta,
wrapt,
grpcio,
h5py,
libclang,
numpy,
six,
termcolor,
opt-einsum,
packaging,
protobuf,
absl-py,
grpcio,
mock,
scipy,
distutils,
wheel,
jax,
ml-dtypes,
opt-einsum,
tensorflow-estimator-bin,
requests,
six,
tensorboard,
termcolor,
typing-extensions,
wrapt,
isPy3k,
mock,

config,
cudaSupport ? config.cudaSupport,
cudaPackages,
zlib,
python,
addDriverRunpath,
astunparse,
flatbuffers,
h5py,
llvmPackages,
typing-extensions,
}:

# We keep this binary build for three reasons:
@@ -50,7 +54,6 @@ let
inherit (cudaPackages) cudatoolkit cudnn;

isCudaJetson = cudaSupport && cudaPackages.cudaFlags.isJetsonBuild;
isCudaX64 = cudaSupport && stdenv.hostPlatform.isx86_64;
in
buildPythonPackage rec {
pname = "tensorflow" + lib.optionalString cudaSupport "-gpu";
@@ -68,38 +71,36 @@ buildPythonPackage rec {

buildInputs = [ llvmPackages.openmp ];

build-system = [
distutils
];

nativeBuildInputs =
lib.optionals cudaSupport [ addDriverRunpath ]
++ lib.optionals isCudaJetson [ cudaPackages.autoAddCudaCompatRunpath ];

dependencies = [
absl-py
astunparse
flatbuffers
typing-extensions
distutils
gast
google-pasta
grpcio
h5py
libclang
ml-dtypes
numpy
opt-einsum
packaging
protobuf
numpy
scipy
(if isCudaX64 then jax else ml-dtypes)
termcolor
grpcio
requests
six
astor
absl-py
gast
opt-einsum
google-pasta
wrapt
tensorflow-estimator-bin
tensorboard
h5py
termcolor
typing-extensions
wrapt
] ++ lib.optional (!isPy3k) mock;

build-system =
[
distutils
wheel
]
++ lib.optionals cudaSupport [ addDriverRunpath ]
++ lib.optionals isCudaJetson [ cudaPackages.autoAddCudaCompatRunpath ];

preConfigure = ''
unset SOURCE_DATE_EPOCH

@@ -113,28 +114,6 @@ buildPythonPackage rec {
mv "$f" "$(sed -E 's/(nv[0-9]+)\.0*([0-9]+)/\1.\2/' <<< "$f")"
done

wheel unpack --dest unpacked ./*.whl
rm ./*.whl
(
cd unpacked/tensorflow*
# Adjust dependency requirements:
# - Relax flatbuffers, gast, protobuf, tensorboard, and tensorflow-estimator version requirements that don't match what we have packaged
# - The purpose of python3Packages.libclang is not clear at the moment and we don't have it packaged yet
# - keras and tensorlow-io-gcs-filesystem will be considered as optional for now.
# - numpy was pinned to fix some internal tests: https://github.com/tensorflow/tensorflow/issues/60216
sed -i *.dist-info/METADATA \
-e "/Requires-Dist: flatbuffers/d" \
-e "/Requires-Dist: gast/d" \
-e "/Requires-Dist: keras/d" \
-e "/Requires-Dist: libclang/d" \
-e "/Requires-Dist: protobuf/d" \
-e "/Requires-Dist: tensorboard/d" \
-e "/Requires-Dist: tensorflow-estimator/d" \
-e "/Requires-Dist: tensorflow-io-gcs-filesystem/d" \
-e "s/Requires-Dist: numpy (.*)/Requires-Dist: numpy/"
)
wheel pack ./unpacked/tensorflow*

popd
'';