Skip to content

Commit

Permalink
Auto-merged develop -> beta (No conflicts, CI passed)
Browse files Browse the repository at this point in the history
  • Loading branch information
TravisCI committed May 31, 2018
2 parents 34f44b1 + ca80837 commit 472a06d
Show file tree
Hide file tree
Showing 7 changed files with 142 additions and 89 deletions.
7 changes: 4 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,16 @@ before_install:
- sudo apt-get -qq update
- sudo apt-get -qq install gfortran libblas-dev liblapack-dev
- sudo apt-get -qq install openmpi-bin openmpi-common openssh-client openssh-server libopenmpi1.3 libopenmpi-dbg libopenmpi-dev >/dev/null
- sudo bash CI/install.sh # Only install texlive and extensions if we need them
- travis_wait 30 "sudo bash CI/install.sh >/dev/null"
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start" #some tests require a display
- sleep 3 # give xvfb some time to start

# I'm attempting to remove this..?
#- pip install --global-option=build_ext --global-option="-I/usr/include/suitesparse" cvxpy cvxopt
install:
- travis_retry travis_wait pip install -r requirements.txt
- pip install --global-option=build_ext --global-option="-I/usr/include/suitesparse" cvxpy cvxopt >/dev/null
- pip install ipython mpi4py zmq coverage msgpack-python cython psutil >/dev/null
- pip install ipython mpi4py zmq coverage msgpack-python cython dill psutil >/dev/null
- pip install -e . >/dev/null

cache:
Expand Down
73 changes: 32 additions & 41 deletions CI/install.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/bin/bash
# This script needs to be run as admin
sudo apt-get update

##An example of how to search for a file in apt packages
## (useful for debugging TravisCI build errors)
Expand All @@ -19,46 +20,36 @@
apt-get install libsuitesparse-dev
cp /usr/lib/liblapack.so /usr/lib/libsuitesparseconfig.so

sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update

#Latex is no longer needed!
#echo "Checking if pdflatex install is needed"
#
#if [ "$ReportA" == "True" ]; then
# apt-get -qq install texlive-full
#fi
#
#if [ "$Drivers" == "True" ]; then
# apt-get -qq install texlive-latex-base
#fi
#
#if [ "$ReportA" == "True" ] || [ "$Drivers" == "True" ]; then
# echo "Installing pdflatex requirements"
# pushd /usr/share/texmf-texlive/
# wget http://mirrors.ctan.org/install/macros/latex/contrib/etoolbox.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/adjustbox.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/collectbox.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/pdfcomment.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/datetime2.tds.zip
# wget http://mirrors.ctan.org/install/macros/generic/tracklang.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/bezos.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/hyperref.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/oberdiek.tds.zip
# wget http://mirrors.ctan.org/install/macros/generic/ifxetex.tds.zip
# wget http://mirrors.ctan.org/install/macros/latex/contrib/standalone.tds.zip
# unzip -o etoolbox.tds.zip
# unzip -o adjustbox.tds.zip
# unzip -o collectbox.tds.zip
# unzip -o pdfcomment.tds.zip
# unzip -o datetime2.tds.zip
# unzip -o tracklang.tds.zip
# unzip -o bezos.tds.zip
# unzip -o hyperref.tds.zip
# unzip -o oberdiek.tds.zip
# unzip -o ifxetex.tds.zip
# unzip -o standalone.tds.zip
# texhash
# popd
#else
# echo "pdflatex is not required for these tests (ReportA is not set to \"True\")"
#fi
sudo update-alternatives --remove-all gcc
sudo update-alternatives --remove-all g++
sudo apt-get install gcc-4.8
sudo apt-get install g++-4.8
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 20
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 20
sudo update-alternatives --config gcc
sudo update-alternatives --config g++
sudo apt-get update
sudo apt-get upgrade -y
sudo apt-get dist-upgrade

export CXX=g++

sudo apt remove cmake

# Install the following version of CMAKE
version=3.11
build=1
mkdir ~/temp
cd ~/temp
wget https://cmake.org/files/v$version/cmake-$version.$build.tar.gz
tar -xzvf cmake-$version.$build.tar.gz
cd cmake-$version.$build/
./bootstrap
make -j4
sudo make install
cd ..
rm -r temp
cmake --version
1 change: 0 additions & 1 deletion optional-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
### Optional Requirements ###
cvxpy
mpi4py
deap
psutil
Expand Down
82 changes: 52 additions & 30 deletions packages/pygsti/construction/datasetconstruction.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
manually incrementing seeds between those calls.
aliasDict : dict, optional
A dictionary mapping single gate labels into tuples of one or more
A dictionary mapping single gate labels into tuples of one or more
other gate labels which translate the given gate strings before values
are computed using `gatesetOrDataset`. The resulting Dataset, however,
contains the *un-translated* gate strings as keys.
Expand All @@ -78,7 +78,7 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
comm : mpi4py.MPI.Comm, optional
When not ``None``, an MPI communicator for distributing the computation
across multiple processors and ensuring that the *same* dataset is
across multiple processors and ensuring that the *same* dataset is
generated on each processor.
Expand All @@ -89,7 +89,7 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
"""
TOL = 1e-10

if isinstance(gatesetOrDataset, _ds.DataSet):
dsGen = gatesetOrDataset
gsGen = None
Expand All @@ -111,42 +111,64 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
for s in gatestring_list ]
all_probs = gsGen.bulk_probs(trans_gatestring_list)
#all_dprobs = gsGen.bulk_dprobs(gatestring_list) #DEBUG - not needed here!!!

for k,s in enumerate(gatestring_list):

#print("DB GEN %d of %d (len %d)" % (k,len(gatestring_list),len(s)))
trans_s = _gstrc.translate_gatestring(s, aliasDict)
if gsGen:
ps = all_probs[trans_s]

if sampleError in ("binomial","multinomial"):
#Adjust to probabilities if needed (and warn if not close to in-bounds)
for ol in ps:
for ol in ps:
if ps[ol] < 0:
if ps[ol] < -TOL: _warnings.warn("Clipping probs < 0 to 0")
ps[ol] = 0.0
elif ps[ol] > 1:
elif ps[ol] > 1:
if ps[ol] > (1+TOL): _warnings.warn("Clipping probs > 1 to 1")
ps[ol] = 1.0
else:
ps = _collections.OrderedDict([ (ol,frac) for ol,frac
in dsGen[trans_s].fractions.items()])

if gsGen and sampleError in ("binomial","multinomial"):
#Check that sum ~= 1 (and nudge if needed) since binomial and
# multinomial random calls assume this.
psum = sum(ps.values())
if psum > 1:
if psum > 1+TOL: _warnings.warn("Adjusting sum(probs) > 1 to 1")
extra_p = (psum-1.0) * (1.000000001) # to sum < 1+eps (numerical prec insurance)
for lbl in ps:
if extra_p > 0:
x = min(ps[lbl],extra_p)
ps[lbl] -= x; extra_p -= x
else: break
#TODO: add adjustment if psum < 1?
assert(1.-TOL <= sum(ps.values()) <= 1.+TOL)

adjusted = False
if psum > 1+TOL:
adjusted = True
_warnings.warn("Adjusting sum(probs) > 1 to 1")
if psum < 1-TOL:
adjusted = True
_warnings.warn("Adjusting sum(probs) < 1 to 1")
# The following while loop is needed if the data generating gateset
# is bad enough that one loop over the probabilities is not enough
OVERTOL = 1.0 + TOL
UNDERTOL = 1.0 - TOL
normalized = lambda : UNDERTOL <= sum(ps.values()) <= OVERTOL
while not normalized():
if psum > 1:
extra_p = (psum-1.0) * OVERTOL
for lbl in ps:
if extra_p > 0:
x = min(ps[lbl],extra_p)
ps[lbl] -= x; extra_p -= x
else: break
elif psum < 1:
needed_p = abs((psum-1.0) * UNDERTOL)
for lbl in ps:
if needed_p > 0:
x = min(ps[lbl], needed_p)
ps[lbl] += x # ADD here rather than subtract
needed_p -= x
else: break
psum = sum(ps.values())
assert normalized(), 'psum={}'.format(psum)
if adjusted:
_warnings.warn('Adjustment finished')

if nSamples is None and dsGen is not None:
N = dsGen[trans_s].total #use the number of samples from the generating dataset
#Note: total() accounts for other intermediate-measurment branches automatically
Expand All @@ -155,27 +177,27 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
N = nSamples[k] #try to treat nSamples as a list
except:
N = nSamples #if not indexable, nSamples should be a single number

#Weight the number of samples according to a WeightedGateString
if isinstance(s, _gs.WeightedGateString):
nWeightedSamples = int(round(s.weight * N))
else:
nWeightedSamples = N

counts = {} #don't use an ordered dict here - add_count_dict will sort keys
labels = sorted(list(ps.keys())) # "outcome labels" - sort for consistent generation
if sampleError == "binomial":
assert(len(labels) == 2)
ol0,ol1 = labels[0], labels[1]
counts[ol0] = rndm.binomial(nWeightedSamples, ps[ol0])
counts[ol1] = nWeightedSamples - counts[ol0]

elif sampleError == "multinomial":
countsArray = rndm.multinomial(nWeightedSamples,
[ps[ol] for ol in labels], size=1) # well-ordered list of probs
sorted([ps[ol] for ol in labels]), size=1) # well-ordered list of probs
for i,ol in enumerate(labels):
counts[ol] = countsArray[0,i]

else:
for outcomeLabel,p in ps.items():
pc = _np.clip(p,0,1)
Expand All @@ -184,13 +206,13 @@ def generate_fake_data(gatesetOrDataset, gatestring_list, nSamples,
elif sampleError == "round":
counts[outcomeLabel] = int(round(nWeightedSamples*pc))
else: raise ValueError("Invalid sample error parameter: '%s' Valid options are 'none', 'round', 'binomial', or 'multinomial'" % sampleError)

dataset.add_count_dict(s, counts)
dataset.done_adding_data()

if comm is not None: # broadcast to non-root procs
dataset = comm.bcast(dataset if (comm.Get_rank() == 0) else None,root=0)

return dataset


Expand All @@ -199,20 +221,20 @@ def merge_outcomes(dataset,label_merge_dict):
Creates a DataSet which merges certain outcomes in input DataSet;
used, for example, to aggregate a 2-qubit 4-outcome DataSet into a 1-qubit 2-outcome
DataSet.
Parameters
----------
dataset : DataSet object
The input DataSet whose results will be compiled according to the rules
The input DataSet whose results will be compiled according to the rules
set forth in label_merge_dict
label_merge_dict : dictionary
The dictionary whose keys define the new DataSet outcomes, and whose items
The dictionary whose keys define the new DataSet outcomes, and whose items
are lists of input DataSet outcomes that are to be summed together. For example,
if a two-qubit DataSet has outcome labels "00", "01", "10", and "11", and
we want to ''trace out'' the second qubit, we could use label_merge_dict =
{'0':['00','01'],'1':['10','11']}.
Returns
-------
merged_dataset : DataSet object
Expand Down
21 changes: 21 additions & 0 deletions packages/pygsti/tools/profile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import cProfile
from mpi4py import MPI

def profile(filename=None, comm=MPI.COMM_WORLD):
def prof_decorator(f):
def wrap_f(*args, **kwargs):
pr = cProfile.Profile()
pr.enable()
result = f(*args, **kwargs)
pr.disable()

if filename is None:
pr.print_stats()
else:
filename_r = filename #+ ".{}".format(comm.rank)
pr.dump_stats(filename_r)

return result
return wrap_f
return prof_decorator

3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,6 @@ plotly
ply
nose
msgpack # For IO specifically.. make optional?
# Since this is needed to complete unit tests, I'm making it fully required - LSaldyt
cvxopt
cvxpy~=0.2.28
44 changes: 30 additions & 14 deletions test/checkDocs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,47 @@
import os.path, importlib, pkgutil, sys
import pygsti

from pprint import pprint

from inspect import *
import inspect

missing = []

def check_args_in_docstring(item):
args, _, kwargs, _ = getargspec(item)
if kwargs is None:
kwargs = []
for arg in args + [k for k, v in kwargs]:
if item.__doc__ is None or arg not in item.__doc__:
missing.append(item)

def check_function(f):
pass
print('Checking function')
check_args_in_docstring(f)

def check_class(c):
pass
check(c)

def check_method(m):
pass
check_args_in_docstring(m)

def check_module(module):
def check(module):
for member in getmembers(module):
if isfunction(member):
pass
if ismethod(member):
pass
if isclass(member):
pass
if ismodule(member):
check_module(member)
print(member)
if 'pygsti' in str(member[1]):
name, member = member
if isfunction(member):
check_function(member)
if ismethod(member):
check_method(member)
if isclass(member):
check_class(member)
if ismodule(member):
check(member)

def main(args):
check_module(pygsti)
check(pygsti)
pprint(missing)
return 0

if __name__ == '__main__':
Expand Down

0 comments on commit 472a06d

Please sign in to comment.