Skip to content

Commit

Permalink
Merge 6e891d4 into 4874d97
Browse files Browse the repository at this point in the history
  • Loading branch information
katherinekolman committed May 20, 2020
2 parents 4874d97 + 6e891d4 commit 19c2748
Show file tree
Hide file tree
Showing 10 changed files with 219 additions and 35 deletions.
79 changes: 60 additions & 19 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
language: python
env:
global:
- PYTHON_VERSION=$TRAVIS_PYTHON_VERSION
- NUMPY_VERSION=1.17
- NUMPY_VERSION=stable
- MAIN_CMD='python setup.py'
- CONDA_DEPENDENCIES='hdf5 rasterio matplotlib numba pyproj coveralls pytest pytest-mock
pytest-cov pytest-qt vispy netcdf4 h5py imageio imageio-ffmpeg ffmpeg
- CONDA_DEPENDENCIES='hdf5 rasterio numba pyproj coveralls pytest pytest-mock
pytest-cov coverage pytest-qt vispy netcdf4 h5py imageio imageio-ffmpeg ffmpeg
pillow pyshp pyqtgraph shapely sqlalchemy pyqt appdirs pyyaml satpy eccodes scikit-image
donfig'
donfig conda-pack'
- PIP_DEPENDENCIES='pytest-xvfb'
- SETUP_XVFB=True
- EVENT_TYPE='push pull_request'
Expand All @@ -21,30 +20,72 @@ matrix:
- env: PYTHON_VERSION=3.7
os: osx
language: generic
- env: PYTHON_VERSION=3.7
os: windows
language: shell
- os: windows
env: PYTHON_VERSION=3.7
language: c
allow_failures:
- os: windows
env: PYTHON_VERSION=3.7
language: c
install:
- git clone --depth 1 git://github.com/astropy/ci-helpers.git
- source ci-helpers/travis/setup_conda.sh
- if [ "${TRAVIS_OS_NAME}" = "windows" ]; then git clone --depth 1 git://github.com/vtkiorg/gl-ci-helpers.git;
- if [ "${TRAVIS_OS_NAME}" == "windows" ]; then
git clone --depth 1 -b feature-travis-windows git://github.com/djhoese/gl-ci-helpers.git;
powershell -Command "Set-ExecutionPolicy RemoteSigned -scope CurrentUser";
powershell gl-ci-helpers/appveyor/install_opengl.ps1;
fi;
- if [ "${TRAVIS_OS_NAME}" = "windows" ]; then powershell -Command "Set-ExecutionPolicy
RemoteSigned -scope CurrentUser"; fi;
- if [ "${TRAVIS_OS_NAME}" = "windows" ]; then powershell gl-ci-helpers/appveyor/install_opengl.ps1;
- if [ "${TRAVIS_OS_NAME}" == "linux" ] || [ "${TRAVIS_OS_NAME}" == "osx" ]; then
conda install -y pygrib matplotlib==3.1.3;
fi;
- if [ "${TRAVIS_OS_NAME}" = "windows" ]; then pip install PyQt5==5.9; fi;
- if [ "${TRAVIS_OS_NAME}" = "linux" ] || [ "${TRAVIS_OS_NAME}" = "osx" ]; then conda
install pygrib; fi;
- pip install -e . --no-deps
before_script:
- export DISPLAY=:99.0
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then ( sudo Xvfb :99 -ac -screen 0 1400x900x24
- if [ "${TRAVIS_OS_NAME}" == "osx" ]; then ( sudo Xvfb :99 -ac -screen 0 1400x900x24
+render +iglx; echo ok )& fi;
script:
- pip install -e .
- pytest -s --cov-report term --cov=uwsift uwsift/tests
# Tests need DirectX to run which is not available on Travis right now
- if [ "${TRAVIS_OS_NAME}" != "windows" ]; then
pytest -s --cov-report term --cov=uwsift uwsift/tests;
fi
# Reinstall SIFT *into* the environment so conda-pack can bundle it
- pip install --no-deps .
# Unstable version
- if [[ $TRAVIS_TAG == "" ]]; then
version=$(python -c "from uwsift import __version__; print(__version__)");
if [[ "${TRAVIS_OS_NAME}" == "windows" ]]; then
ext="zip";
platform="windows";
else
ext="tar.gz";
if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then
platform="darwin";
else
platform="linux";
fi;
fi;
oflag="-o SIFT_${version}dev_${platform}_$(date +%Y%m%d_%H%M%S).${ext}";
else
oflag="";
fi
- python build_conda_pack.py -j -1 $oflag
- ls -l
after_success:
- if [[ $PYTHON_VERSION == 3.7 ]]; then coveralls; fi;
- if [[ $PYTHON_VERSION == "3.7" ]]; then coveralls; fi;
- if [[ $TRAVIS_TAG == "" ]]; then
odir="experimental/";
else
odir="";
fi
- echo "${SFTP_UPLOAD_KEY}" | base64 --decode >/tmp/sftp_rsa
# if this isn't a pull request, upload the new version
# if we made a real release, delete all experimental releases
- if [[ $TRAVIS_PULL_REQUEST_BRANCH == "" ]]; then
curl -k --ftp-create-dirs -T SIFT_*.*.*_*.* --key /tmp/sftp_rsa sftp://sift@ftp.ssec.wisc.edu/${odir};
if [[ $TRAVIS_TAG != "" ]]; then
curl -k -l --key /tmp/sftp_rsa sftp://sift@ftp.ssec.wisc.edu/experimental | grep SIFT_*.*.*_*.* | xargs -I{} -- curl -k -v --key /tmp/sftp_rsa sftp://sift@ftp.ssec.wisc.edu/experimental -Q "DELE {}";
fi;
fi
deploy:
- provider: pypi
user: __token__
Expand Down
76 changes: 76 additions & 0 deletions build_conda_pack.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#!/usr/bin/env python3
"""Create a conda-pack'd SIFT installation tarball.
Note: This script will place extra files in the currently activated python
environment in order to include these files in the produced tarball.
SIFT must be installed in the current environment with::
pip install --no-deps .
Instead of installing it in development mode (`-e`).
Example::
python build_conda_pack.py -c
"""

import os
import sys
import shutil
import subprocess


def get_version():
try:
from uwsift import __version__
return __version__
except ImportError:
raise RuntimeError("Could not determine SIFT version. Is SIFT installed?")


def main():
import argparse
parser = argparse.ArgumentParser(
description="Build SIFT installation tarball (remaining arguments "
"are passed to conda-pack)")
parser.add_argument('--arcroot',
help="Directory name inside the tarball (default: SIFT_X.Y.Z)")
parser.add_argument('-o', '--output',
help="Pathname for bundled file. Default is "
"'SIFT_X.Y.Z_<platform>.<ext>' where platform is "
"'linux', 'darwin', or 'win32' and ext is "
"'.tar.gz' for linux and OSX, '.zip' for Windows.")
args, unknown_args = parser.parse_known_args()

version = get_version()
if args.arcroot is None:
args.arcroot = f"SIFT_{version}"
if args.output is None:
ext = '.zip' if 'win' in sys.platform else '.tar.gz'
args.output = f"SIFT_{version}_{sys.platform}{ext}"

# Copy appropriate wrapper scripts
dst = sys.prefix
script_dir = os.path.realpath(os.path.dirname(__file__))
if 'nux' in sys.platform:
script = os.path.join(script_dir, 'bundle_scripts', 'SIFT.sh')
shutil.copyfile(script, os.path.join(dst, 'SIFT.sh'))
elif 'darwin' in sys.platform:
script = os.path.join(script_dir, 'bundle_scripts', 'SIFT.sh')
shutil.copyfile(script, os.path.join(dst, 'SIFT.command'))
elif 'win' in sys.platform:
script = os.path.join(script_dir, 'bundle_scripts', 'SIFT.bat')
shutil.copyfile(script, os.path.join(dst, 'SIFT.bat'))
else:
raise RuntimeError(f"Unknown platform: {sys.platform}")

subprocess.check_call(['conda-pack', '--arcroot', args.arcroot,
'--output', args.output] + unknown_args)
os.chmod(args.output, 0o755)

# TODO: Do additional risky cleanup to reduce output file size


if __name__ == "__main__":
sys.exit(main())
13 changes: 13 additions & 0 deletions bundle_scripts/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Bundle Scripts

The scripts in this directory are specially constructed to run from a
conda-pack'd bundled installation of SIFT. The types of scripts included are
currently:

1. `SIFT.X` where `X` corresponds to a scripting extension specific to each
platform. This is `.sh` for Linux (CentOS 7+), `.command` for OSX, and
`.bat` for Windows. These scripts are placed in the root directory of
the released bundle.

Note to reuse code as much as possible some scripts may be copied to
the appropriate name rather than existing as separate files.
19 changes: 19 additions & 0 deletions bundle_scripts/SIFT.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
@echo off
REM Initialize SIFT installation if necessary and run SIFT

set base_dir=%~p0

REM Activate the conda environment
call %base_dir%Scripts\activate

REM Create a signal file that we have run conda-unpack
set installed=%base_dir%.installed
if not exist "%installed%" (
echo Running one-time initialization of SIFT installation...
conda-unpack
echo %base_dir% > %installed%
)

echo Running SIFT...

python -m uwsift %*
30 changes: 30 additions & 0 deletions bundle_scripts/SIFT.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env bash
# Usage: SIFT.sh <command line arguments>
# Description: Initialize the SIFT installation if necessary and run SIFT
set -e

# get current base directory for this script
SOURCE="${BASH_SOURCE[0]}"
while [[ -h "$SOURCE" ]] ; do SOURCE="$(readlink "$SOURCE")"; done
BASE="$( cd -P "$( dirname "$SOURCE" )" && pwd )"

# Remove user environment variables that may conflict with installation
unset LD_PRELOAD
unset LD_LIBRARY_PATH
unset DYLD_LIBRARY_PATH
unset PYTHONPATH
export PYTHONNOUSERSITE=1

# Activate the conda-pack'd environment
source $BASE/activate

# Check if we already ran conda-unpack
install_signal="${BASE}/.installed"
if [[ ! -f "${install_signal}" ]]; then
echo "Running one-time initialization of SIFT installation..."
conda-unpack
echo "${BASE}" > "${install_signal}"
echo "Running SIFT..."
fi

python -m uwsift "$@"
6 changes: 5 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,11 @@ def run(self):
python_requires='>=3.6',
extras_require=extras_require,
packages=find_packages(),
# entry_points={},
entry_points={
"console_scripts": [
"SIFT = uwsift.__main__:main",
],
},
cmdclass={
'bump': BumpCommand,
}
Expand Down
16 changes: 10 additions & 6 deletions sift.spec
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import sys
from PyInstaller.compat import is_win, is_darwin, is_linux
from PyInstaller.utils.hooks import collect_submodules
from PyInstaller.utils.hooks import collect_submodules, collect_data_files
import vispy.glsl
import vispy.io
import satpy
Expand All @@ -23,14 +23,16 @@ data_files = [
for shape_dir in ["ne_50m_admin_0_countries", "ne_110m_admin_0_countries", "ne_50m_admin_1_states_provinces_lakes",
"fonts", "colormaps", "grib_definitions"]:
data_files.append((os.path.join("uwsift", "data", shape_dir), os.path.join("sift_data", shape_dir)))
data_files.extend(collect_data_files('pyspectral'))

hidden_imports = [
"vispy.ext._bundled.six",
"vispy.app.backends._pyqt4",
"vispy.app.backends._pyqt5",
"sqlalchemy.ext.baked",
"satpy",
"skimage",
"skimage.measure",
"pyproj.datadir",
] + collect_submodules("rasterio") + collect_submodules('satpy')
if is_win:
hidden_imports += collect_submodules("encodings")
Expand All @@ -51,13 +53,15 @@ def _include_if_exists(binaries, lib_dir, lib_pattern):
# Add missing shared libraries
binaries = []
if is_linux:
lib_dir = sys.executable.replace(os.path.join("bin", "python"), "lib")
bin_idx = sys.executable.rfind("/bin")
lib_dir = os.path.join(sys.executable[:bin_idx], "lib")
binaries += [(os.path.join(lib_dir, 'libfontconfig*.so'), '.')]
if not is_win:
# Add extra pygrib .def files
share_dir = sys.executable.replace(os.path.join("bin", "python"), "share")
lib_dir = sys.executable.replace(os.path.join("bin", "python"), "lib")
bin_dir = sys.executable.replace(os.path.join("bin", "python"), "bin")
bin_idx = sys.executable.rfind("/bin")
share_dir = os.path.join(sys.executable[:bin_idx], "share")
lib_dir = os.path.join(sys.executable[:bin_idx], "lib")
bin_dir = os.path.join(sys.executable[:bin_idx], "bin")
data_files.append((os.path.join(share_dir, 'eccodes'), os.path.join('share', 'eccodes')))
# Add ffmpeg
binaries += [(os.path.join(bin_dir, 'ffmpeg'), '.')]
Expand Down
1 change: 1 addition & 0 deletions uwsift/view/open_file_wizard.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ def collect_selected_ids(self):
if id_items['name'].checkState():
id_dict = {key: id_item.data(QtCore.Qt.UserRole)
for key, id_item in id_items.items() if id_item is not None}
id_dict['modifiers'] = None
selected_ids.append(DatasetID(**id_dict))
return selected_ids

Expand Down
2 changes: 2 additions & 0 deletions uwsift/workspace/guidebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,8 @@ def collect_info(self, info):
z = {}

band_short_name = info.get(Info.DATASET_NAME, '???')
# FIXME: Don't use pure DATASET_NAME since resolution should not be part of the SHORT_NAME
# And/or don't use SHORT_NAME for grouping
if Info.SHORT_NAME not in info:
z[Info.SHORT_NAME] = band_short_name
else:
Expand Down
12 changes: 3 additions & 9 deletions uwsift/workspace/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,21 +30,15 @@
from uwsift.workspace.guidebook import ABI_AHI_Guidebook, Guidebook
from .metadatabase import Resource, Product, Content

from satpy import Scene, available_readers, __version__ as satpy_version
from satpy.dataset import DatasetID

_SATPY_READERS = None # cache: see `available_satpy_readers()` below
SATPY_READER_CACHE_FILE = os.path.join(USER_CACHE_DIR,
'available_satpy_readers.yaml')


LOG = logging.getLogger(__name__)

try:
from satpy import Scene, available_readers, __version__ as satpy_version
from satpy.dataset import DatasetID
except ImportError:
LOG.warning("SatPy is not installed and will not be used for importing.")
Scene = None
DatasetID = None

try:
from skimage.measure import find_contours
except ImportError:
Expand Down

0 comments on commit 19c2748

Please sign in to comment.