Skip to content

Commit

Permalink
Enable flake8-implicit-str-concat ruff rules
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Apr 29, 2024
1 parent 7463439 commit 534010a
Show file tree
Hide file tree
Showing 34 changed files with 78 additions and 78 deletions.
2 changes: 1 addition & 1 deletion client/src/api/schema/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19398,7 +19398,7 @@ export interface operations {
* @description Allows remote job running mechanisms to get a fresh OIDC token that can be used on remote side to authorize user. It is not meant to represent part of Galaxy's stable, user facing API
*/
parameters: {
/** @description A key used to authenticate this request as acting onbehalf or a job runner for the specified job */
/** @description A key used to authenticate this request as acting on behalf or a job runner for the specified job */
/** @description OIDC provider name */
query: {
job_key: string;
Expand Down
5 changes: 3 additions & 2 deletions lib/galaxy/authnz/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,9 @@ def _parse_oidc_config(self, config_file):
func = getattr(builtins, child.get("Type"))
except AttributeError:
log.error(
"The value of attribute `Type`, `{}`, is not a valid built-in type;" " skipping this node"
).format(child.get("Type"))
"The value of attribute `Type`, `%s`, is not a valid built-in type; skipping this node",
child.get("Type"),
)
continue
self.oidc_config[child.get("Property")] = func(child.get("Value"))
except ImportError:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/celery/base_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def calculate_task_start_time( # type: ignore
update_stmt = (
update(CeleryUserRateLimit)
.where(CeleryUserRateLimit.user_id == user_id)
.values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) "))
.values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', :now)"))
.returning(CeleryUserRateLimit.last_scheduled_time)
)
result = sa_session.execute(update_stmt, {"interval": task_interval_secs, "now": now}).all()
Expand Down
18 changes: 9 additions & 9 deletions lib/galaxy/datatypes/molecules.py
Original file line number Diff line number Diff line change
Expand Up @@ -877,15 +877,15 @@ def get_matcher(self) -> re.Pattern:
"""
pat = (
r"(ATOM|HETATM)\s+"
+ r"(\d+)\s+"
+ r"([A-Z0-9]+)\s+"
+ r"([A-Z0-9]+)\s+"
+ r"(([A-Z]?)\s+)?"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
r"(\d+)\s+"
r"([A-Z0-9]+)\s+"
r"([A-Z0-9]+)\s+"
r"(([A-Z]?)\s+)?"
r"([-+]?\d*\.\d+|\d+)\s+"
r"([-+]?\d*\.\d+|\d+)\s+"
r"([-+]?\d*\.\d+|\d+)\s+"
r"([-+]?\d*\.\d+|\d+)\s+"
r"([-+]?\d*\.\d+|\d+)\s+"
)
return re.compile(pat)

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/qiime2.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def _get_versions(path, uuid):
framework_version = framework_version_line.split(":")[1].strip()
return version, framework_version
except Exception:
raise ValueError("Archive does not contain a correctly formatted" " VERSION file.")
raise ValueError("Archive does not contain a correctly formatted VERSION file.")


def _open_file_in_archive(zip_path, path, uuid):
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/datatypes/tabular.py
Original file line number Diff line number Diff line change
Expand Up @@ -1223,7 +1223,8 @@ def __init__(self, **kwd):
"DESC",
"SRAS",
"PRAS",
"PART_CHROM" "PART_CONTIG",
"PART_CHROM",
"PART_CONTIG",
"PART_OFFSET",
"PART_STRAND",
"FILT",
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1331,7 +1331,7 @@ def clear_working_directory(self):
job = self.get_job()
if not os.path.exists(self.working_directory):
log.warning(
"(%s): Working directory clear requested but %s does " "not exist", self.job_id, self.working_directory
"(%s): Working directory clear requested but %s does not exist", self.job_id, self.working_directory
)
return

Expand Down Expand Up @@ -1535,7 +1535,7 @@ def change_state(self, state, info=False, flush=True, job=None):

if job.state in model.Job.terminal_states:
log.warning(
"(%s) Ignoring state change from '%s' to '%s' for job " "that is already terminal",
"(%s) Ignoring state change from '%s' to '%s' for job that is already terminal",
job.id,
job.state,
state,
Expand Down
12 changes: 7 additions & 5 deletions lib/galaxy/jobs/dynamic_tool_destination.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,11 +1773,13 @@ def get_typo_correction(typo_str, word_set, max_dist):
"--check-config",
dest="check_config",
nargs="?",
help="Use this option to validate tool_destinations.yml."
+ " Optionally, provide the path to the tool_destinations.yml"
+ " that you would like to check, and/or the path to the related"
+ " job_conf.xml. Default: galaxy/config/tool_destinations.yml"
+ "and galaxy/config/job_conf.xml",
help=(
"Use this option to validate tool_destinations.yml."
" Optionally, provide the path to the tool_destinations.yml"
" that you would like to check, and/or the path to the related"
" job_conf.xml. Default: galaxy/config/tool_destinations.yml"
"and galaxy/config/job_conf.xml"
),
)

parser.add_argument("-j", "--job-config", dest="job_config")
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/runners/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,7 @@ def stop_job(self, job_wrapper):
log.debug(msg.format(name=job_name))

def recover(self, job, job_wrapper):
msg = "(name!r/runner!r) is still in {state!s} state, adding to" " the runner monitor queue"
msg = "(name!r/runner!r) is still in {state!s} state, adding to the runner monitor queue"
job_id = job.get_job_runner_external_id()
job_name = self.JOB_NAME_PREFIX + job_wrapper.get_id_tag()
ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/runners/chronos.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def stop_job(self, job_wrapper):
LOGGER.error(msg.format(name=job_name))

def recover(self, job, job_wrapper):
msg = "(name!r/runner!r) is still in {state!s} state, adding to" " the runner monitor queue"
msg = "(name!r/runner!r) is still in {state!s} state, adding to the runner monitor queue"
job_id = job.get_job_runner_external_id()
ajs = AsynchronousJobState(
files_dir=job_wrapper.working_directory,
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3252,7 +3252,7 @@ def add_dataset(self, dataset, parent_id=None, genome_build=None, set_hid=True,
elif not isinstance(dataset, (HistoryDatasetAssociation, HistoryDatasetCollectionAssociation)):
raise TypeError(
"You can only add Dataset and HistoryDatasetAssociation instances to a history"
+ f" ( you tried to add {str(dataset)} )."
f" ( you tried to add {str(dataset)} )."
)
is_dataset = is_hda(dataset)
if parent_id:
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/objectstore/pithos.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def parse_config_xml(config_xml):
log.error(msg)
raise Exception(msg)
except Exception:
log.exception("Malformed PithosObjectStore Configuration XML -- " "unable to continue")
log.exception("Malformed PithosObjectStore Configuration XML, unable to continue")
raise
return r

Expand Down Expand Up @@ -325,7 +325,7 @@ def _size(self, obj, **kwargs) -> int:
try:
return os.path.getsize(self._get_cache_path(path))
except OSError as ex:
log.warning(f"Could not get size of file {path} in local cache," f"will try Pithos. Error: {ex}")
log.warning("Could not get size of file %s in local cache, will try Pithos. Error: %s", path, ex)
try:
file = self.pithos.get_object_info(path)
except ClientError as ce:
Expand Down Expand Up @@ -408,7 +408,7 @@ def _update_from_file(self, obj, **kwargs):
if kwargs.get("create"):
self._create(obj, **kwargs)
if not self._exists(obj, **kwargs):
raise ObjectNotFound(f"objectstore.update_from_file, object does not exist: {obj}, " f"kwargs: {kwargs}")
raise ObjectNotFound(f"objectstore.update_from_file, object does not exist: {obj}, kwargs: {kwargs}")

path = self._construct_path(obj, **kwargs)
cache_path = self._get_cache_path(path)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/objectstore/rucio.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,7 @@ def _register_file(self, rel_path, file_name):
file_name = self._get_cache_path(rel_path)
if not os.path.islink(file_name):
raise ObjectInvalid(
"rucio objectstore._register_file, rucio_register_only " "is set, but file in cache is not a link "
"rucio objectstore._register_file, rucio_register_only is set, but file in cache is not a link "
)
if os.path.islink(file_name):
file_name = os.readlink(file_name)
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/schema/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -1563,14 +1563,14 @@ class CreateHistoryPayload(Model):
default=None,
title="History ID",
description=(
"The encoded ID of the history to copy. " "Provide this value only if you want to copy an existing history."
"The encoded ID of the history to copy. Provide this value only if you want to copy an existing history."
),
)
all_datasets: Optional[bool] = Field(
default=True,
title="All Datasets",
description=(
"Whether to copy also deleted HDAs/HDCAs. Only applies when " "providing a `history_id` to copy from."
"Whether to copy also deleted HDAs/HDCAs. Only applies when providing a `history_id` to copy from."
),
)
archive_source: Optional[str] = Field(
Expand Down Expand Up @@ -3411,7 +3411,7 @@ class ShareWithPayload(Model):
...,
title="User Identifiers",
description=(
"A collection of encoded IDs (or email addresses) of users " "that this resource will be shared with."
"A collection of encoded IDs (or email addresses) of users that this resource will be shared with."
),
)
share_option: Optional[SharingOptions] = Field(
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tool_util/verify/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@
)

UPDATE_TEMPLATE = Template(
"git --work-tree $dir --git-dir $dir/.git fetch && " "git --work-tree $dir --git-dir $dir/.git merge origin/master"
"git --work-tree $dir --git-dir $dir/.git fetch && git --work-tree $dir --git-dir $dir/.git merge origin/master"
)

UPDATE_FAILED_TEMPLATE = Template(
"Warning failed to update test repository $dir - " "update stdout was [$stdout] and stderr was [$stderr]."
"Warning failed to update test repository $dir - update stdout was [$stdout] and stderr was [$stderr]."
)


Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/data_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def _resolve_item_with_primary(item):
if datatype.dataset_content_needs_grooming(path):
err_msg = (
"The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be "
+ "<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed."
"<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed."
)
raise UploadProblemException(err_msg)

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/search/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def build_index(self, tool_cache, toolbox, index_help: bool = True) -> None:
# Add tool document to index (or overwrite if existing)
writer.update_document(**add_doc_kwds)

log.debug(f"Toolbox index of panel {self.panel_view_id}" f" finished {execution_timer}")
log.debug("Toolbox index of panel %s finished %s", self.panel_view_id, execution_timer)

def _get_tools_to_remove(self, tool_cache) -> list:
"""Return list of tool IDs to be removed from index."""
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/util/custom_logging/fluent_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
FluentSender = None


FLUENT_IMPORT_MESSAGE = "The Python fluent package is required to use this " "feature, please install it"
FLUENT_IMPORT_MESSAGE = "The Python fluent package is required to use this feature, please install it"


class FluentTraceLogger:
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/webapps/galaxy/api/cloudauthz.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,9 @@ def create(self, trans, payload, **kwargs):
description = payload.get("description", "")

if not isinstance(config, dict):
log.debug(msg_template.format(f"invalid config type `{type(config)}`, expect `dict`"))
log.debug(msg_template.format(f"invalid config type `{type(config)}`, expected `dict`"))
raise RequestParameterInvalidException(
"Invalid type for the required `config` variable; expect `dict` " f"but received `{type(config)}`."
f"Invalid type for the required `config` variable; expected `dict` but received `{type(config)}`."
)
if authn_id:
try:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/api/job_tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def get_token(
job_id: EncodedDatabaseIdField,
job_key: str = Query(
description=(
"A key used to authenticate this request as acting on" "behalf or a job runner for the specified job"
"A key used to authenticate this request as acting on behalf or a job runner for the specified job"
),
),
provider: str = Query(
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/webapps/galaxy/api/remote_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
TargetQueryParam: str = Query(
default=RemoteFilesTarget.ftpdir,
title="Target source",
description=("The source to load datasets from." " Possible values: ftpdir, userdir, importdir"),
description=("The source to load datasets from. Possible values: ftpdir, userdir, importdir"),
)

FormatQueryParam: Optional[RemoteFilesFormat] = Query(
Expand All @@ -54,7 +54,7 @@
default=None,
title="Recursive",
description=(
"Whether to recursively lists all sub-directories." " This will be `True` by default depending on the `target`."
"Whether to recursively lists all sub-directories. This will be `True` by default depending on the `target`."
),
)

Expand Down
6 changes: 2 additions & 4 deletions lib/galaxy/webapps/galaxy/controllers/visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
UsesItemRatings,
)
from galaxy.structured_app import StructuredApp
from galaxy.util import unicodify
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import GenomeRegion
from galaxy.webapps.base.controller import (
Expand Down Expand Up @@ -263,9 +262,8 @@ def _handle_plugin_error(self, trans, visualization_name, exception):
raise exception
return trans.show_error_message(
"There was an error rendering the visualization. "
+ "Contact your Galaxy administrator if the problem persists."
+ "<br/>Details: "
+ unicodify(exception),
"Contact your Galaxy administrator if the problem persists."
f"<br/>Details: {exception}",
use_panels=False,
)

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/services/library_folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def set_permissions(
trans.app.security_agent.set_all_library_permissions(trans, folder, permissions)
else:
raise RequestParameterInvalidException(
'The mandatory parameter "action" has an invalid value.' 'Allowed values are: "set_permissions"'
'The mandatory parameter "action" has an invalid value. Allowed values are: "set_permissions"'
)
current_permissions = self.folder_manager.get_current_roles(trans, folder)
return LibraryFolderCurrentPermissions(**current_permissions)
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,9 @@ exclude = [

[tool.ruff.lint]
# Enable: pycodestyle errors (E), Pyflakes (F), flake8-bugbear (B),
# flake8-comprehensions (C4), flake8-logging-format (G) and pyupgrade (UP)
select = ["E", "F", "B", "C4", "G", "UP"]
# flake8-comprehensions (C4), flake8-logging-format (G),
# flake8-implicit-str-concat (ISC) and pyupgrade (UP)
select = ["E", "F", "B", "C4", "G", "ISC", "UP"]
# Exceptions:
# B008 Do not perform function calls in argument defaults (for FastAPI Depends and Body)
# B9 flake8-bugbear opinionated warnings
Expand Down
4 changes: 2 additions & 2 deletions scripts/api/upload_to_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
except ImportError:
print(
"Could not import the requests module. See http://docs.python-requests.org/en/latest/"
+ " or install with 'pip install requests'"
" or install with 'pip install requests'"
)
raise

Expand Down Expand Up @@ -46,7 +46,7 @@ def upload_file(base_url, api_key, history_id, filepath, **kwargs):
if len(sys.argv) < 5:
print(
"history_upload.py <api key> <galaxy base url> <history id> <filepath to upload>\n"
+ " (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')"
" (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')"
)
sys.exit(1)

Expand Down
16 changes: 7 additions & 9 deletions scripts/cleanup_datasets/admin_cleanup_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def main():
help="config file (legacy, use --config instead)",
)
parser.add_argument("-d", "--days", dest="days", action="store", type=int, help="number of days (60)", default=60)
parser.add_argument("--tool_id", default=None, help="Text to match against tool_id" "Default: match all")
parser.add_argument("--tool_id", default=None, help="Text to match against tool_id. Default: match all")
parser.add_argument(
"--template",
default=None,
Expand Down Expand Up @@ -121,10 +121,10 @@ def main():
default=False,
)
parser.add_argument(
"--smtp", default=None, help="SMTP Server to use to send email. " "Default: [read from galaxy config file]"
"--smtp", default=None, help="SMTP Server to use to send email. Default: [read from galaxy config file]"
)
parser.add_argument(
"--fromaddr", default=None, help="From address to use to send email. " "Default: [read from galaxy config file]"
"--fromaddr", default=None, help="From address to use to send email. Default: [read from galaxy config file]"
)
populate_config_args(parser)

Expand All @@ -138,14 +138,12 @@ def main():
if args.smtp is not None:
app_properties["smtp_server"] = args.smtp
if app_properties.get("smtp_server") is None:
parser.error("SMTP Server must be specified as an option (--smtp) " "or in the config file (smtp_server)")
parser.error("SMTP Server must be specified as an option (--smtp) or in the config file (smtp_server)")

if args.fromaddr is not None:
app_properties["email_from"] = args.fromaddr
if app_properties.get("email_from") is None:
parser.error(
"From address must be specified as an option " "(--fromaddr) or in the config file " "(email_from)"
)
parser.error("From address must be specified as an option (--fromaddr) or in the config file (email_from)")

scriptdir = os.path.dirname(os.path.abspath(__file__))
template_file = args.template
Expand Down Expand Up @@ -262,15 +260,15 @@ def administrative_delete_datasets(
# Mark the HistoryDatasetAssociation as deleted
hda.deleted = True
app.sa_session.add(hda)
print("Marked HistoryDatasetAssociation id %d as " "deleted" % hda.id)
print("Marked HistoryDatasetAssociation id %d as deleted" % hda.id)
session = app.sa_session()
with transaction(session):
session.commit()

emailtemplate = Template(filename=template_file)
for email, dataset_list in user_notifications.items():
msgtext = emailtemplate.render(email=email, datasets=dataset_list, cutoff=cutoff_days)
subject = "Galaxy Server Cleanup " "- %d datasets DELETED" % len(dataset_list)
subject = "Galaxy Server Cleanup - %d datasets DELETED" % len(dataset_list)
fromaddr = config.email_from
print()
print(f"From: {fromaddr}")
Expand Down
Loading

0 comments on commit 534010a

Please sign in to comment.