Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Continuing #2057, s/assert/raise AssertionError/g #3023

Merged
1 commit merged into from Apr 23, 2013
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 4 additions & 2 deletions pandas/core/config.py
Expand Up @@ -313,8 +313,10 @@ def __doc__(self):

class option_context(object):
def __init__(self, *args):
assert len(args) % 2 == 0 and len(args) >= 2, \
"Need to invoke as option_context(pat,val,[(pat,val),..))."
if not ( len(args) % 2 == 0 and len(args) >= 2):
errmsg = "Need to invoke as option_context(pat,val,[(pat,val),..))."
raise AssertionError(errmsg)

ops = zip(args[::2], args[1::2])
undo = []
for pat, val in ops:
Expand Down
3 changes: 2 additions & 1 deletion pandas/core/frame.py
Expand Up @@ -709,7 +709,8 @@ def __unicode__(self):
self.info(buf=buf, verbose=verbose)

value = buf.getvalue()
assert type(value) == unicode
if not type(value) == unicode:
raise AssertionError()

return value

Expand Down
12 changes: 8 additions & 4 deletions pandas/core/panel.py
Expand Up @@ -618,7 +618,8 @@ def get_value(self, *args):
value : scalar value
"""
# require an arg for each axis
assert(len(args) == self._AXIS_LEN)
if not ((len(args) == self._AXIS_LEN)):
raise AssertionError()

# hm, two layers to the onion
frame = self._get_item_cache(args[0])
Expand All @@ -642,7 +643,8 @@ def set_value(self, *args):
otherwise a new object
"""
# require an arg for each axis and the value
assert(len(args) == self._AXIS_LEN + 1)
if not ((len(args) == self._AXIS_LEN + 1)):
raise AssertionError()

try:
frame = self._get_item_cache(args[0])
Expand Down Expand Up @@ -685,7 +687,8 @@ def __setitem__(self, key, value):
**self._construct_axes_dict_for_slice(self._AXIS_ORDERS[1:]))
mat = value.values
elif isinstance(value, np.ndarray):
assert(value.shape == shape[1:])
if not ((value.shape == shape[1:])):
raise AssertionError()
mat = np.asarray(value)
elif np.isscalar(value):
dtype, value = _infer_dtype_from_scalar(value)
Expand Down Expand Up @@ -1481,7 +1484,8 @@ def _prep_ndarray(self, values, copy=True):
else:
if copy:
values = values.copy()
assert(values.ndim == self._AXIS_LEN)
if not ((values.ndim == self._AXIS_LEN)):
raise AssertionError()
return values

@staticmethod
Expand Down
10 changes: 7 additions & 3 deletions pandas/core/series.py
Expand Up @@ -1126,7 +1126,8 @@ def __unicode__(self):
else:
result = u'Series([], dtype: %s)' % self.dtype

assert type(result) == unicode
if not ( type(result) == unicode):
raise AssertionError()
return result

def __repr__(self):
Expand Down Expand Up @@ -1194,7 +1195,9 @@ def to_string(self, buf=None, na_rep='NaN', float_format=None,
the_repr = self._get_repr(float_format=float_format, na_rep=na_rep,
length=length, dtype=dtype, name=name)

assert type(the_repr) == unicode
# catch contract violations
if not type(the_repr) == unicode:
raise AssertionError("expected unicode string")

if buf is None:
return the_repr
Expand All @@ -1212,7 +1215,8 @@ def _get_repr(self, name=False, print_header=False, length=True, dtype=True,
length=length, dtype=dtype, na_rep=na_rep,
float_format=float_format)
result = formatter.to_string()
assert type(result) == unicode
if not ( type(result) == unicode):
raise AssertionError()
return result

def __iter__(self):
Expand Down
6 changes: 4 additions & 2 deletions pandas/io/date_converters.py
Expand Up @@ -46,10 +46,12 @@ def _maybe_cast(arr):


def _check_columns(cols):
assert(len(cols) > 0)
if not ((len(cols) > 0)):
raise AssertionError()

N = len(cols[0])
for c in cols[1:]:
assert(len(c) == N)
if not ((len(c) == N)):
raise AssertionError()

return N
19 changes: 12 additions & 7 deletions pandas/io/parsers.py
Expand Up @@ -579,7 +579,8 @@ def _clean_options(self, options, engine):

# type conversion-related
if converters is not None:
assert(isinstance(converters, dict))
if not (isinstance(converters, dict)):
raise AssertionError()
else:
converters = {}

Expand Down Expand Up @@ -1474,7 +1475,8 @@ def _rows_to_cols(self, content):
if self._implicit_index:
col_len += len(self.index_col)

assert(self.skip_footer >= 0)
if not ((self.skip_footer >= 0)):
raise AssertionError()

if col_len != zip_len and self.index_col is not False:
row_num = -1
Expand Down Expand Up @@ -1768,12 +1770,15 @@ def __init__(self, f, colspecs, filler, thousands=None):
self.filler = filler # Empty characters between fields.
self.thousands = thousands

assert isinstance(colspecs, (tuple, list))
if not ( isinstance(colspecs, (tuple, list))):
raise AssertionError()

for colspec in colspecs:
assert isinstance(colspec, (tuple, list))
assert len(colspec) == 2
assert isinstance(colspec[0], int)
assert isinstance(colspec[1], int)
if not ( isinstance(colspec, (tuple, list)) and
len(colspec) == 2 and
isinstance(colspec[0], int) and
isinstance(colspec[1], int) ):
raise AssertionError()

def next(self):
line = next(self.f)
Expand Down
9 changes: 6 additions & 3 deletions pandas/sparse/array.py
Expand Up @@ -25,7 +25,8 @@ def _sparse_op_wrap(op, name):
"""
def wrapper(self, other):
if isinstance(other, np.ndarray):
assert(len(self) == len(other))
if not ((len(self) == len(other))):
raise AssertionError()
if not isinstance(other, SparseArray):
other = SparseArray(other, fill_value=self.fill_value)
return _sparse_array_op(self, other, op, name)
Expand Down Expand Up @@ -129,7 +130,8 @@ def __new__(cls, data, sparse_index=None, kind='integer', fill_value=None,
fill_value=fill_value)
else:
values = data
assert(len(values) == sparse_index.npoints)
if not ((len(values) == sparse_index.npoints)):
raise AssertionError()

# Create array, do *not* copy data by default
if copy:
Expand Down Expand Up @@ -275,7 +277,8 @@ def take(self, indices, axis=0):
-------
taken : ndarray
"""
assert(axis == 0)
if not ((axis == 0)):
raise AssertionError()
indices = np.asarray(indices, dtype=int)

n = len(self)
Expand Down
4 changes: 3 additions & 1 deletion pandas/sparse/frame.py
Expand Up @@ -709,7 +709,9 @@ def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='',

def _join_index(self, other, how, lsuffix, rsuffix):
if isinstance(other, Series):
assert(other.name is not None)
if not (other.name is not None):
raise AssertionError()

other = SparseDataFrame({other.name: other},
default_fill_value=self.default_fill_value)

Expand Down
3 changes: 2 additions & 1 deletion pandas/sparse/panel.py
Expand Up @@ -71,7 +71,8 @@ def __init__(self, frames, items=None, major_axis=None, minor_axis=None,
default_kind=default_kind)
frames = new_frames

assert(isinstance(frames, dict))
if not (isinstance(frames, dict)):
raise AssertionError()

self.default_fill_value = fill_value = default_fill_value
self.default_kind = kind = default_kind
Expand Down
9 changes: 6 additions & 3 deletions pandas/sparse/series.py
Expand Up @@ -110,7 +110,8 @@ def __new__(cls, data, index=None, sparse_index=None, kind='block',
if isinstance(data, SparseSeries) and index is None:
index = data.index
elif index is not None:
assert(len(index) == len(data))
if not (len(index) == len(data)):
raise AssertionError()

sparse_index = data.sp_index
values = np.asarray(data)
Expand All @@ -128,7 +129,8 @@ def __new__(cls, data, index=None, sparse_index=None, kind='block',
fill_value=fill_value)
else:
values = data
assert(len(values) == sparse_index.npoints)
if not (len(values) == sparse_index.npoints):
raise AssertionError()
else:
if index is None:
raise Exception('must pass index!')
Expand Down Expand Up @@ -446,7 +448,8 @@ def sparse_reindex(self, new_index):
-------
reindexed : SparseSeries
"""
assert(isinstance(new_index, splib.SparseIndex))
if not (isinstance(new_index, splib.SparseIndex)):
raise AssertionError()

new_values = self.sp_index.to_int_index().reindex(self.sp_values,
self.fill_value,
Expand Down
9 changes: 6 additions & 3 deletions pandas/stats/ols.py
Expand Up @@ -619,7 +619,8 @@ def _set_window(self, window_type, window, min_periods):
self._window_type = scom._get_window_type(window_type)

if self._is_rolling:
assert(window is not None)
if not ((window is not None)):
raise AssertionError()
if min_periods is None:
min_periods = window
else:
Expand Down Expand Up @@ -1196,7 +1197,8 @@ def _nobs_raw(self):
return result.astype(int)

def _beta_matrix(self, lag=0):
assert(lag >= 0)
if not ((lag >= 0)):
raise AssertionError()

betas = self._beta_raw

Expand Down Expand Up @@ -1257,7 +1259,8 @@ def _filter_data(lhs, rhs, weights=None):
Cleaned lhs and rhs
"""
if not isinstance(lhs, Series):
assert(len(lhs) == len(rhs))
if not ((len(lhs) == len(rhs))):
raise AssertionError()
lhs = Series(lhs, index=rhs.index)

rhs = _combine_rhs(rhs)
Expand Down
12 changes: 8 additions & 4 deletions pandas/stats/plm.py
Expand Up @@ -101,8 +101,10 @@ def _prepare_data(self):
y_regressor = y

if weights is not None:
assert(y_regressor.index.equals(weights.index))
assert(x_regressor.index.equals(weights.index))
if not ((y_regressor.index.equals(weights.index))):
raise AssertionError()
if not ((x_regressor.index.equals(weights.index))):
raise AssertionError()

rt_weights = np.sqrt(weights)
y_regressor = y_regressor * rt_weights
Expand Down Expand Up @@ -169,7 +171,8 @@ def _convert_x(self, x):
# .iteritems
iteritems = getattr(x, 'iteritems', x.items)
for key, df in iteritems():
assert(isinstance(df, DataFrame))
if not ((isinstance(df, DataFrame))):
raise AssertionError()

if _is_numeric(df):
x_converted[key] = df
Expand Down Expand Up @@ -637,7 +640,8 @@ def _y_predict_raw(self):
return (betas * x).sum(1)

def _beta_matrix(self, lag=0):
assert(lag >= 0)
if not ((lag >= 0)):
raise AssertionError()

index = self._y_trans.index
major_labels = index.labels[0]
Expand Down
29 changes: 19 additions & 10 deletions pandas/tools/merge.py
Expand Up @@ -404,14 +404,17 @@ def _validate_specification(self):
elif self.left_on is not None:
n = len(self.left_on)
if self.right_index:
assert(len(self.left_on) == self.right.index.nlevels)
if not ((len(self.left_on) == self.right.index.nlevels)):
raise AssertionError()
self.right_on = [None] * n
elif self.right_on is not None:
n = len(self.right_on)
if self.left_index:
assert(len(self.right_on) == self.left.index.nlevels)
if not ((len(self.right_on) == self.left.index.nlevels)):
raise AssertionError()
self.left_on = [None] * n
assert(len(self.right_on) == len(self.left_on))
if not ((len(self.right_on) == len(self.left_on))):
raise AssertionError()


def _get_join_indexers(left_keys, right_keys, sort=False, how='inner'):
Expand All @@ -424,7 +427,8 @@ def _get_join_indexers(left_keys, right_keys, sort=False, how='inner'):
-------

"""
assert(len(left_keys) == len(right_keys))
if not ((len(left_keys) == len(right_keys))):
raise AssertionError()

left_labels = []
right_labels = []
Expand Down Expand Up @@ -537,8 +541,9 @@ def _left_join_on_index(left_ax, right_ax, join_keys, sort=False):
left_indexer = None

if len(join_keys) > 1:
assert(isinstance(right_ax, MultiIndex) and
len(join_keys) == right_ax.nlevels)
if not ((isinstance(right_ax, MultiIndex) and
len(join_keys) == right_ax.nlevels) ):
raise AssertionError()

left_tmp, right_indexer = \
_get_multiindex_indexer(join_keys, right_ax,
Expand Down Expand Up @@ -637,7 +642,8 @@ def __init__(self, data_list, join_index, indexers, axis=1, copy=True):
if axis <= 0: # pragma: no cover
raise MergeError('Only axis >= 1 supported for this operation')

assert(len(data_list) == len(indexers))
if not ((len(data_list) == len(indexers))):
raise AssertionError()

self.units = []
for data, indexer in zip(data_list, indexers):
Expand Down Expand Up @@ -925,7 +931,8 @@ def __init__(self, objs, axis=0, join='outer', join_axes=None,
axis = 1 if axis == 0 else 0

self._is_series = isinstance(sample, Series)
assert(0 <= axis <= sample.ndim)
if not ((0 <= axis <= sample.ndim)):
raise AssertionError()

# note: this is the BlockManager axis (since DataFrame is transposed)
self.axis = axis
Expand Down Expand Up @@ -1084,7 +1091,8 @@ def _concat_single_item(self, objs, item):
to_concat.append(item_values)

# this method only gets called with axis >= 1
assert(self.axis >= 1)
if not ((self.axis >= 1)):
raise AssertionError()
return com._concat_compat(to_concat, axis=self.axis - 1)

def _get_result_dim(self):
Expand All @@ -1103,7 +1111,8 @@ def _get_new_axes(self):
continue
new_axes[i] = self._get_comb_axis(i)
else:
assert(len(self.join_axes) == ndim - 1)
if not ((len(self.join_axes) == ndim - 1)):
raise AssertionError()

# ufff...
indices = range(ndim)
Expand Down
3 changes: 2 additions & 1 deletion pandas/tools/pivot.py
Expand Up @@ -300,7 +300,8 @@ def _get_names(arrs, names, prefix='row'):
else:
names.append('%s_%d' % (prefix, i))
else:
assert(len(names) == len(arrs))
if not ((len(names) == len(arrs))):
raise AssertionError()
if not isinstance(names, list):
names = list(names)

Expand Down