Skip to content

Commit

Permalink
Fix arpack error (#21)
Browse files Browse the repository at this point in the history
* Use more general exception handling

* Fix mypy

Co-authored-by: Michal Klein <>
  • Loading branch information
michalk8 committed Mar 12, 2021
1 parent c4d9d74 commit d468bd4
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ disallow_any_generics = True

strict_optional = True
strict_equality = True
warn_return_any = True
warn_return_any = False
warn_unreachable = True
check_untyped_defs = True
; because of docrep
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
rev: v0.790
hooks:
- id: mypy
additional_dependencies: [numpy>=1.15.0, scipy>=1.5.0]
additional_dependencies: [numpy>=1.2.0, scipy>=1.6.0]
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
Expand Down
8 changes: 4 additions & 4 deletions pygpcca/_gpcca.py
Original file line number Diff line number Diff line change
Expand Up @@ -1040,9 +1040,9 @@ def optimize(
f"Clustering {n} data points into {max(m_list)} clusters is always perfectly crisp. "
f"Thus m={max(m_list)} won't be included in the search for the optimal cluster number."
)
opt_idx = np.argmax(crispness_list[:-1])
opt_idx = int(np.argmax(crispness_list[:-1]))
else:
opt_idx = np.argmax(crispness_list)
opt_idx = int(np.argmax(crispness_list))
else:
raise ValueError("Clustering wasn't successful. Try different cluster numbers.")
self._m_opt = min(m_list) + opt_idx
Expand Down Expand Up @@ -1156,7 +1156,7 @@ def macrostate_assignment(self) -> OArray:
Copyright (c) 2015, 2014 Computational Molecular Biology Group,
Freie Universitaet Berlin (GER).
"""
return None if self.memberships is None else np.argmax(self.memberships, axis=1)
return None if self.memberships is None else np.argmax(self.memberships, axis=1) # type: ignore[return-value]

@property
def macrostate_sets(self) -> Optional[List[np.ndarray]]:
Expand Down Expand Up @@ -1251,7 +1251,7 @@ def stationary_probability(self) -> OArray:
"""
try:
return stationary_distribution(self._P)
except ValueError as e:
except Exception as e: # noqa: B902
warnings.warn(f"Stationary distribution couldn't be calculated. Reason: {e}.")
return None

Expand Down
9 changes: 5 additions & 4 deletions pygpcca/_sort_real_schur.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,8 @@ def sort_real_schur(
sk = s[k]
if s[k + 1] - sk == 2: # if the block is 2x2
Q, R = normalize(Q, R, slice(sk, s[k + 1]), inplace=True) # normalize it
p[k] = R[sk, sk] + np.lib.scimath.sqrt(R[sk + 1, sk] * R[sk, sk + 1]) # store the eigenvalues
# store the eigenvalues
p[k] = R[sk, sk] + np.lib.scimath.sqrt(R[sk + 1, sk] * R[sk, sk + 1]) # type: ignore[attr-defined]
else: # (the one with the positive imaginary part is sufficient)
assert s[k + 1] - sk == 1 # debug
p[k] = R[s[k], s[k]] # if the block is 1x1, only store the eigenvalue
Expand Down Expand Up @@ -259,7 +260,7 @@ def rot(X: np.ndarray) -> np.ndarray:
tau = (X[0, 1] + X[1, 0]) / (X[0, 0] - X[1, 1])
off = (tau ** 2 + 1) ** 0.5
v = [tau - off, tau + off]
w = np.argmin(np.abs(v))
w = int(np.argmin(np.abs(v)))
c = 1.0 / (1.0 + v[w] ** 2) ** 0.5 # ... the cosine and sine as given in Section 2.3.1
s = v[w] * c

Expand Down Expand Up @@ -380,10 +381,10 @@ def select(p: Union[List[str], np.ndarray], z: str) -> Tuple[float, int]:
Block that is next in the ordering.
"""
if z == "LM":
pos = np.argmax(np.abs(p))
pos = int(np.argmax(np.abs(p)))
return np.abs(p[pos]), pos
elif z == "LR":
pos = np.argmax(np.real(p))
pos = int(np.argmax(np.real(p)))
return np.real(p[pos]), pos
else:
raise NotImplementedError(z)
Expand Down
10 changes: 5 additions & 5 deletions pygpcca/_sorted_schur.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ def _initialize_matrix(M: "petsc4py.PETSc.Mat", P: Union[np.ndarray, spmatrix])
Returns
-------
Nothing, just initializes `M`. If `P` is an :class:`numpy.ndarray`,
`M` will also be dense. If `P` is a :class:`scipy.sparse.spmatrx`,
`M` will also be dense. If `P` is a :class:`scipy.sparse.spmatrix`,
`M` will become a CSR matrix regardless of `P`'s sparse format.
"""
if issparse(P):
if not isspmatrix_csr(P):
warnings.warn("Only CSR sparse matrices are supported, converting.")
P = csr_matrix(P)
M.createAIJ(size=P.shape, csr=(P.indptr, P.indices, P.data))
M.createAIJ(size=P.shape, csr=(P.indptr, P.indices, P.data)) # type: ignore[union-attr]
else:
M.createDense(list(np.shape(P)), array=P)

Expand Down Expand Up @@ -283,10 +283,10 @@ def sorted_krylov_schur(
eigenvalues_error.append(eigenval_error)

# convert lists with eigenvalues and errors to arrays (while keeping excess eigenvalues and errors)
eigenvalues = np.asarray(eigenvalues)
eigenvalues_error = np.asarray(eigenvalues_error)
eigenvalues = np.asarray(eigenvalues) # type: ignore[assignment]
eigenvalues_error = np.asarray(eigenvalues_error) # type: ignore[assignment]

return R, Q, eigenvalues, eigenvalues_error
return R, Q, eigenvalues, eigenvalues_error # type: ignore[return-value]


@d.dedent
Expand Down
4 changes: 2 additions & 2 deletions pygpcca/utils/_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def assert_array(
uniform: Optional[bool] = None,
ndim: Optional[int] = None,
size: Optional[int] = None,
dtype: Optional[Union[type, np.dtype]] = None,
dtype: Optional[Union[type, np.dtype]] = None, # type: ignore[type-arg]
kind: Optional[str] = None,
) -> None:
"""
Expand Down Expand Up @@ -99,7 +99,7 @@ def ensure_ndarray_or_sparse(
uniform: Optional[bool] = None,
ndim: Optional[int] = None,
size: Optional[int] = None,
dtype: Optional[Union[type, np.dtype]] = None,
dtype: Optional[Union[type, np.dtype]] = None, # type: ignore[type-arg]
kind: Optional[str] = None,
) -> Union[np.ndarray, spmatrix]:
"""
Expand Down

0 comments on commit d468bd4

Please sign in to comment.