Skip to content

Commit

Permalink
Merge pull request #243 from ConnectedSystems/scipy-import-comb
Browse files Browse the repository at this point in the history
Updated import of scipy comb function
  • Loading branch information
jdherman committed May 13, 2019
2 parents dfb1f30 + e184c80 commit 3083615
Show file tree
Hide file tree
Showing 19 changed files with 239 additions and 197 deletions.
35 changes: 22 additions & 13 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
sudo: false
language: python
virtualenv:
system_site_packages: true
env:
matrix:
- DISTRIB="conda" PYTHON_VERSION="3.5" COVERAGE="true"
- DISTRIB="conda" PYTHON_VERSION="3.6" COVERAGE="false"
- DISTRIB="conda" PYTHON_VERSION="3.7" COVERAGE="false"

- DISTRIB="conda" PYTHON_VERSION="3.8" COVERAGE="false"
matrix:
allow_failures:
- env: DISTRIB="conda" PYTHON_VERSION="3.7" COVERAGE="false"
- env: DISTRIB="conda" PYTHON_VERSION="3.8" COVERAGE="false"
addons:
apt:
packages:
Expand All @@ -26,12 +24,23 @@ after_success:
- if [[ "$COVERAGE" == "true" ]]; then coveralls || echo "failed"; fi
notifications:
email: false
deploy:
provider: pypi
user: jdherman
password:
secure: KiUygZVHWMR2a+fk092xeHh8J2c61VGrgq4v1l3bq41MxcZO745H7CLJJ3rfqo+G5/pqy4/tm+q3p8fHhbUmwMcuSC3vjRN6WAYjuNPs0FMVXiIkMQtevv2LdVF1zVKFBdYTuNrfugtZ/GXh/ReCydjEBWTrTaNYeNM4ZRIaj0Q=
on:
tags: true
distributions: sdist bdist_wheel
skip_upload_docs: true

jobs:
include:
- stage: "deploy"
name: "Deploy to PyPI"
if: tag =~ ^v
env: DISTRIB="conda" PYTHON_VERSION="3.6" COVERAGE="false"
install:
- source tests/travis_install.sh
- pip install -r requirements.txt
script: echo "Deploying to PyPI"
deploy:
provider: pypi
user: jdherman
password:
secure: KiUygZVHWMR2a+fk092xeHh8J2c61VGrgq4v1l3bq41MxcZO745H7CLJJ3rfqo+G5/pqy4/tm+q3p8fHhbUmwMcuSC3vjRN6WAYjuNPs0FMVXiIkMQtevv2LdVF1zVKFBdYTuNrfugtZ/GXh/ReCydjEBWTrTaNYeNM4ZRIaj0Q=
on:
tags: true
all_branches: true
skip_cleanup: true
6 changes: 3 additions & 3 deletions examples/rbd_fast/rbd_fast.bat
Original file line number Diff line number Diff line change
Expand Up @@ -44,21 +44,21 @@ REM Sensitivity indices will print to command line. Use ">" to write to file.

salib analyze rbd_fast ^
-p ../../src/SALib/test_functions/params/Ishigami.txt ^
-Y ../data/model_output.txt ^
-X ../data/model_input.txt ^
-Y ../data/model_output.txt ^
--seed=100

REM python -m SALib.analyze.rbd_fast ^
REM -p ../../src/SALib/test_functions/params/Ishigami.txt ^
REM -Y ../data/model_output.txt ^
REM -X ../data/model_input.txt ^
REM -Y ../data/model_output.txt ^
REM --seed=100

REM Options:
REM -p, --paramfile: Your parameter range file (3 columns: parameter name, lower bound, upper bound)
REM
REM -Y, --model-output-file: File of model output values to analyze
REM -X, --model-input-file: File of model input values to analyze
REM -Y, --model-output-file: File of model output values to analyze
REM
REM --delimiter (optional): Model output file delimiter.
REM
Expand Down
2 changes: 1 addition & 1 deletion examples/rbd_fast/rbd_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = rbd_fast.analyze(problem, Y, param_values, print_to_console=False)
Si = rbd_fast.analyze(problem, param_values, Y, print_to_console=False)
# Returns a dictionary with keys 'S1' and 'ST'
# e.g. Si['S1'] contains the first-order index for each parameter, in the
# same order as the parameter file
4 changes: 2 additions & 2 deletions examples/rbd_fast/rbd_fast.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ python -c "from SALib.test_functions import Ishigami; import numpy as np; np.sav

salib analyze rbd_fast \
-p ../../src/SALib/test_functions/params/Ishigami.txt \
-Y ../data/model_output.txt \
-X ../data/model_input.txt \
-Y ../data/model_output.txt \
--seed=100

# python -m SALib.analyze.rbd_fast \
# -p ../../src/SALib/test_functions/params/Ishigami.txt \
# -Y ../data/model_output.txt \
# -X ../data/model_input.txt \
# -Y ../data/model_output.txt \
# --seed=100

# Options:
Expand Down
5 changes: 5 additions & 0 deletions examples/sobol/sobol.bat
Original file line number Diff line number Diff line change
Expand Up @@ -82,3 +82,8 @@ REM
REM --parallel (optional): Flag to enable parallel execution with multiprocessing
REM
REM --processors (optional, int): Number of processors to be used with the parallel option

REM First-order indices expected with Saltelli sampling:
REM x1: 0.3139
REM x2: 0.4424
REM x3: 0.0
7 changes: 6 additions & 1 deletion examples/sobol/sobol.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,15 @@
# Specify which column of the output file to analyze (zero-indexed)
Si = sobol.analyze(problem, Y, calc_second_order=True, conf_level=0.95, print_to_console=True)
# Returns a dictionary with keys 'S1', 'S1_conf', 'ST', and 'ST_conf'
# e.g. Si['S1'] contains the first-order index for each parameter,
# e.g. Si['S1'] contains the first-order index for each parameter,
# in the same order as the parameter file
# The optional second-order indices are now returned in keys 'S2', 'S2_conf'
# These are both upper triangular DxD matrices with nan's in the duplicate
# entries.
# Optional keyword arguments parallel=True and n_processors=(int) for parallel execution
# using multiprocessing

# First-order indices expected with Saltelli sampling:
# x1: 0.3139
# x2: 0.4424
# x3: 0.0
5 changes: 5 additions & 0 deletions examples/sobol/sobol.sh
Original file line number Diff line number Diff line change
Expand Up @@ -82,3 +82,8 @@ salib analyze sobol \
# --parallel (optional): Flag to enable parallel execution with multiprocessing
#
# --processors (optional, int): Number of processors to be used with the parallel option

# First-order indices expected with Saltelli sampling:
# x1: 0.3139
# x2: 0.4424
# x3: 0.0
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ classifier =
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
License :: OSI Approved :: MIT License,
License :: OSI Approved :: MIT License
Intended Audience :: End Users/Desktop
Intended Audience :: Developers
Intended Audience :: Science/Research
Expand Down
19 changes: 9 additions & 10 deletions src/SALib/analyze/rbd_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from ..util import read_param_file, ResultDict


def analyze(problem, Y, X, M=10, print_to_console=False, seed=None):
def analyze(problem, X, Y, M=10, print_to_console=False, seed=None):
"""Performs the Random Balanced Design - Fourier Amplitude Sensitivity Test
(RBD-FAST) on model outputs.
Expand All @@ -23,10 +23,10 @@ def analyze(problem, Y, X, M=10, print_to_console=False, seed=None):
----------
problem : dict
The problem definition
Y : numpy.array
A NumPy array containing the model outputs
X : numpy.array
A NumPy array containing the model inputs
Y : numpy.array
A NumPy array containing the model outputs
M : int
The interference parameter, i.e., the number of harmonics to sum in
the Fourier series decomposition (default 10)
Expand Down Expand Up @@ -58,7 +58,7 @@ def analyze(problem, Y, X, M=10, print_to_console=False, seed=None):
--------
>>> X = latin.sample(problem, 1000)
>>> Y = Ishigami.evaluate(X)
>>> Si = rbd_fast.analyze(problem, Y, X, print_to_console=False)
>>> Si = rbd_fast.analyze(problem, X, Y, print_to_console=False)
"""
if seed:
np.random.seed(seed)
Expand All @@ -73,7 +73,7 @@ def analyze(problem, Y, X, M=10, print_to_console=False, seed=None):
Si['names'] = problem['names']

for i in range(D):
S1 = compute_first_order(permute_outputs(Y, X[:, i]), M)
S1 = compute_first_order(permute_outputs(X[:, i], Y), M)
S1 = unskew_S1(S1, M, N)
Si['S1'][i] = S1
if print_to_console:
Expand All @@ -82,7 +82,7 @@ def analyze(problem, Y, X, M=10, print_to_console=False, seed=None):
return Si


def permute_outputs(Y, X):
def permute_outputs(X, Y):
"""
Permute the output according to one of the inputs as in [_2]
Expand Down Expand Up @@ -126,13 +126,12 @@ def cli_parse(parser):

def cli_action(args):
problem = read_param_file(args.paramfile)
X = np.loadtxt(args.model_input_file,
delimiter=args.delimiter)
Y = np.loadtxt(args.model_output_file,
delimiter=args.delimiter,
usecols=(args.column,))
X = np.loadtxt(args.model_input_file,
delimiter=args.delimiter)

analyze(problem, Y, X, print_to_console=True, seed=args.seed)
analyze(problem, X, Y, print_to_console=True, seed=args.seed)


if __name__ == "__main__":
Expand Down
6 changes: 5 additions & 1 deletion src/SALib/sample/morris/gurobi.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@
from datetime import datetime as dt

import numpy as np
from scipy.misc import comb as nchoosek

try:
from scipy.misc import comb as nchoosek
except ImportError:
from scipy.special import comb as nchoosek

from . strategy import Strategy

Expand Down
3 changes: 2 additions & 1 deletion src/SALib/test_functions/Ishigami.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@


# Non-monotonic Ishigami Function (3 parameters)
# First-order indices:
# Using Saltelli sampling with a sample size of ~1000
# the expected first-order indices would be:
# x1: 0.3139
# x2: 0.4424
# x3: 0.0
Expand Down
4 changes: 0 additions & 4 deletions test-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
nose
pre-commit
pytest
# Add requirements only needed for your unittests and during development here.
# They will be installed automatically when running `python setup.py test`.
# ATTENTION: Don't remove pytest-cov and pytest as they are needed.
pytest-cov
recommonmark

0 comments on commit 3083615

Please sign in to comment.