Skip to content

Commit

Permalink
style: str.format to f-string
Browse files Browse the repository at this point in the history
  • Loading branch information
nvictus committed Sep 11, 2023
1 parent c3cb389 commit 1c12c08
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 21 deletions.
4 changes: 1 addition & 3 deletions src/cooler/balance.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ def _balance_cisonly(

else:
warnings.warn(
"Iteration limit reached without convergence on {}.".format(
chroms[cid]
),
f"Iteration limit reached without convergence on {chroms[cid]}.",
ConvergenceWarning,
)

Expand Down
2 changes: 1 addition & 1 deletion src/cooler/cli/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def parse_bins(arg):
)
except pd.parser.CParserError as e:
raise ValueError(
f'Failed to parse bins file "{arg}": {str(e)}'
f'Failed to parse bins file "{arg}": {e!s}'
) from e

chromtable = (
Expand Down
6 changes: 3 additions & 3 deletions src/cooler/create/_ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -800,7 +800,7 @@ def __init__(
if f.exists2(c1, c2) and f.exists2(c2, c1):
raise RuntimeError(
"Pairs are not triangular: found blocks "
+ "'{0}|{1}'' and '{1}|{0}'".format(c1, c2)
+ f"'{c1}|{c2}'' and '{c2}|{c1}'"
)

# dumb heuristic to prevent excessively large chunks on one worker
Expand All @@ -816,8 +816,8 @@ def __init__(
self.n_chunks = max(self.n_chunks, n_chunks)
if self.n_chunks > old_n:
logger.info(
"Pairs file has {} lines. "
"Increasing max-split to {}.".format(n_lines, self.n_chunks)
f"Pairs file has {n_lines} lines. "
f"Increasing max-split to {self.n_chunks}."
)

# all requested contigs will be placed in the output matrix
Expand Down
4 changes: 2 additions & 2 deletions src/cooler/fileops.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,10 +353,10 @@ def tree_widget(group, expand, level):
import ipytree
except ImportError as error:
raise ImportError(
"{}: Run `pip install ipytree` or `conda install ipytree`"
f"{error}: Run `pip install ipytree` or `conda install ipytree`"
"to get the required ipytree dependency for displaying the tree "
"widget. If using jupyterlab, you also need to run "
"`jupyter labextension install ipytree`".format(error)
"`jupyter labextension install ipytree`"
) from None

result = ipytree.Tree()
Expand Down
14 changes: 7 additions & 7 deletions src/cooler/reduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,8 @@ def merge_coolers(
for col in columns:
if col not in pixel_dtypes:
raise ValueError(
"Pixel value column '{}' not found in "
"input '{}'.".format(col, clr.filename)
f"Pixel value column '{col}' not found in "
f"input '{clr.filename}'."
)
else:
dtype_map[col].append(pixel_dtypes[col])
Expand Down Expand Up @@ -473,8 +473,8 @@ def get_multiplier_sequence(resolutions, bases=None):
for i, p in enumerate(pred):
if p == -1 and resn[i] not in bases:
raise ValueError(
"Resolution {} cannot be derived from "
"the base resolutions: {}.".format(resn[i], bases)
f"Resolution {resn[i]} cannot be derived from "
f"the base resolutions: {bases}."
)

return resn, pred, mult
Expand Down Expand Up @@ -683,8 +683,8 @@ def coarsen_cooler(
for col in columns:
if col not in input_dtypes:
raise ValueError(
"Pixel value column '{}' not found in "
"input '{}'.".format(col, clr.filename)
f"Pixel value column '{col}' not found in "
f"input '{clr.filename}'."
)
else:
dtypes.setdefault(col, input_dtypes[col])
Expand Down Expand Up @@ -867,7 +867,7 @@ def legacy_zoomify(input_uri, outfile, nproc, chunksize, lock=None):
logger.info(f"quad tile cover: {2 ** n_zooms}")
logger.info(
"Copying base matrix to level "
+ "{0} and producing {0} new zoom levels ".format(n_zooms)
+ f"{n_zooms} and producing {n_zooms} new zoom levels "
+ "counting down to 0..."
)

Expand Down
8 changes: 3 additions & 5 deletions src/cooler/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,7 @@ def fetch_chromsizes(db, **kwargs):
"""
return read_chromsizes(
"http://hgdownload.cse.ucsc.edu/goldenPath/{}/database/chromInfo.txt.gz".format(
db
),
f"http://hgdownload.cse.ucsc.edu/goldenPath/{db}/database/chromInfo.txt.gz",
**kwargs
)

Expand Down Expand Up @@ -634,7 +632,7 @@ def _nonempty_scalar(x):
return _scalar_from_dtype(dtype)
else:
raise TypeError(
"Can't handle meta of type " "'{}'".format(type(x).__name__)
"Can't handle meta of type " f"'{type(x).__name__}'"
)

def _empty_series(name, dtype, index=None):
Expand All @@ -661,7 +659,7 @@ def _empty_series(name, dtype, index=None):
elif isinstance(x, (list, tuple)):
if not all(isinstance(i, tuple) and len(i) == 2 for i in x):
raise ValueError(
"Expected iterable of tuples of (name, dtype), " "got {}".format(x)
"Expected iterable of tuples of (name, dtype), " f"got {x}"
)
return pd.DataFrame(
{c: _empty_series(c, d, index=index) for (c, d) in x},
Expand Down

0 comments on commit 1c12c08

Please sign in to comment.