-
Notifications
You must be signed in to change notification settings - Fork 984
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Replace pure python loops with numpy where possible in channels.py. #5839
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -50,11 +50,9 @@ def kraus_to_choi(kraus_operators: Sequence[np.ndarray]) -> np.ndarray: | |
Choi matrix of the channel specified by kraus_operators. | ||
""" | ||
d = np.prod(kraus_operators[0].shape, dtype=np.int64) | ||
c = np.zeros((d, d), dtype=np.complex128) | ||
for k in kraus_operators: | ||
v = np.reshape(k, d) | ||
c += np.outer(v, v.conj()) | ||
return c | ||
k = np.asarray(kraus_operators) | ||
flat_ops = k.reshape((-1, d)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. As a matter of principle, I think we should prefer errors to be raised as soon as they occur and not propagate far away from the cause since if that happens the error is harder to diagnose and fix. For this reason, I'd rather we be explicit about the expected array shape and say BTW: This choi_rank = len(kraus_operators)
k = np.reshape(kraus_operators, (choi_rank, d))
return np.einsum('bi,bj->ij',, k, k.conj()) This makes it clear that |
||
return np.einsum('bi,bj->ij', flat_ops, flat_ops.conj()) | ||
|
||
|
||
def choi_to_kraus(choi: np.ndarray, atol: float = 1e-10) -> Sequence[np.ndarray]: | ||
|
@@ -105,7 +103,8 @@ def choi_to_kraus(choi: np.ndarray, atol: float = 1e-10) -> Sequence[np.ndarray] | |
|
||
w = np.maximum(w, 0) | ||
u = np.sqrt(w) * v | ||
return [k.reshape(d, d) for k in u.T if np.linalg.norm(k) > atol] | ||
keep = np.linalg.norm(u.T, axis=-1) > atol | ||
return [k.reshape(d, d) for k, keep_i in zip(u.T, keep) if keep_i] | ||
|
||
|
||
def kraus_to_superoperator(kraus_operators: Sequence[np.ndarray]) -> np.ndarray: | ||
|
@@ -140,10 +139,9 @@ def kraus_to_superoperator(kraus_operators: Sequence[np.ndarray]) -> np.ndarray: | |
Superoperator matrix of the channel specified by kraus_operators. | ||
""" | ||
d_out, d_in = kraus_operators[0].shape | ||
m = np.zeros((d_out * d_out, d_in * d_in), dtype=np.complex128) | ||
for k in kraus_operators: | ||
m += np.kron(k, k.conj()) | ||
return m | ||
ops_arr = np.asarray(kraus_operators) | ||
m = np.einsum('bij,bkl->ikjl', ops_arr, ops_arr.conj()) | ||
return m.reshape((d_out * d_out, d_in * d_in)) | ||
|
||
|
||
def superoperator_to_kraus(superoperator: np.ndarray, atol: float = 1e-10) -> Sequence[np.ndarray]: | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think you can achieve the effect of
asarray
followed byreshape
withreshape
alone