Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

See bug #2375. Copy data files to the build directory, so the tests run

properly.
  • Loading branch information...
commit 94b412ab8f6b7932079b60b87522c6ffcd554bda 1 parent 1628783
mdehoon authored
Showing with 25 additions and 2 deletions.
  1. +25 −2 setup.py
View
27 setup.py
@@ -161,6 +161,28 @@ def run(self):
self.packages.extend(NUMPY_PACKAGES)
build_py.run(self)
+ # In addition to installing the data files, we also need to make
+ # sure that they are copied to the build directory. Otherwise,
+ # the unit tests will fail because they cannot find the data files
+ # in the build directory.
+ # This is taken care of automatically in Python 2.4 or higher by
+ # using package_data.
+
+ import glob
+ data_files = self.distribution.data_files
+ for entry in data_files:
+ if type(entry) is not type(""):
+ raise ValueError, "data_files must be strings"
+ # Unix- to platform-convention conversion
+ entry = os.sep.join(entry.split("/"))
+ filenames = glob.glob(entry)
+ for filename in filenames:
+ dst = os.path.join(self.build_lib, filename)
+ dstdir = os.path.split(dst)[0]
+ self.mkpath(dstdir)
+ self.copy_file(filename, dst)
+
+
class CplusplusExtension(Extension):
"""Hack-y wrapper around Extension to support C++ and Python2.2.
@@ -500,7 +522,7 @@ def is_Numpy_installed():
# way of handling this, and we need to subclass install_data. This
# code is adapted from the mx.TextTools distribution.
-# We can use install_data instead once we require Python 2.4 or higher.
+# We can use package_data instead once we require Python 2.4 or higher.
class install_data_biopython(install_data):
def finalize_options(self):
@@ -550,5 +572,6 @@ def run (self):
packages=PACKAGES,
ext_modules=EXTENSIONS,
data_files=DATA_FILES,
- package_data = {'Bio.Entrez': ['DTDs/*.dtd']}
+ # package_data = {'Bio.Entrez': ['DTDs/*.dtd']}
+ ## Use this once we require Python version >= 2.4.
)
Please sign in to comment.
Something went wrong with that request. Please try again.