Skip to content

Commit

Permalink
Fix UP031 errors - Part 3
Browse files Browse the repository at this point in the history
Also:
- Small code refactorings.
- Add type annotations.
  • Loading branch information
nsoranzo committed Nov 27, 2024
1 parent f12646d commit 01e80fc
Show file tree
Hide file tree
Showing 25 changed files with 70 additions and 111 deletions.
12 changes: 4 additions & 8 deletions lib/galaxy/util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,9 +525,7 @@ def shrink_stream_by_size(
rval = value.read(size)
value.seek(start)
return rval
raise ValueError(
"With the provided join_by value (%s), the minimum size value is %i." % (join_by, min_size)
)
raise ValueError(f"With the provided join_by value ({join_by}), the minimum size value is {min_size}.")
left_index = right_index = int((size - len_join_by) / 2)
if left_index + right_index + len_join_by < size:
if left_larger:
Expand Down Expand Up @@ -566,9 +564,7 @@ def shrink_string_by_size(
return value[:size]
elif end_on_size_error:
return value[-size:]
raise ValueError(
"With the provided join_by value (%s), the minimum size value is %i." % (join_by, min_size)
)
raise ValueError(f"With the provided join_by value ({join_by}), the minimum size value is {min_size}.")
left_index = right_index = int((size - len_join_by) / 2)
if left_index + right_index + len_join_by < size:
if left_larger:
Expand Down Expand Up @@ -1540,7 +1536,7 @@ def nice_size(size: Union[float, int, str, Decimal]) -> str:
return "??? bytes"
size, prefix = metric_prefix(size, 1024)
if prefix == "":
return "%d bytes" % size
return f"{int(size)} bytes"
else:
return f"{size:.1f} {prefix}B"

Expand Down Expand Up @@ -1841,7 +1837,7 @@ def build_url(base_url, port=80, scheme="http", pathspec=None, params=None, dose
parsed_url.scheme = scheme
assert parsed_url.scheme in ("http", "https", "ftp"), f"Invalid URL scheme: {parsed_url.scheme}"
if port != 80:
url = "%s://%s:%d/%s" % (parsed_url.scheme, parsed_url.netloc.rstrip("/"), int(port), parsed_url.path)
url = "{}://{}:{}/{}".format(parsed_url.scheme, parsed_url.netloc.rstrip("/"), int(port), parsed_url.path)
else:
url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}/{parsed_url.path.lstrip('/')}"
if len(pathspec) > 0:
Expand Down
22 changes: 7 additions & 15 deletions lib/galaxy/util/heartbeat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import threading
import time
import traceback
from typing import Dict


def get_current_thread_object_dict():
Expand Down Expand Up @@ -42,7 +43,7 @@ def __init__(self, config, name="Heartbeat Thread", period=20, fname="heartbeat.
self.fname_nonsleeping = None
self.file_nonsleeping = None
self.pid = None
self.nonsleeping_heartbeats = {}
self.nonsleeping_heartbeats: Dict[int, int] = {}
# Event to wait on when sleeping, allows us to interrupt for shutdown
self.wait_event = threading.Event()

Expand All @@ -63,16 +64,16 @@ def open_logs(self):
if self.file is None or self.file.closed:
self.file = open(self.fname, "a")
self.file_nonsleeping = open(self.fname_nonsleeping, "a")
self.file.write("Heartbeat for pid %d thread started at %s\n\n" % (self.pid, time.asctime()))
self.file.write(f"Heartbeat for pid {self.pid} thread started at {time.asctime()}\n\n")
self.file_nonsleeping.write(
"Non-Sleeping-threads for pid %d thread started at %s\n\n" % (self.pid, time.asctime())
f"Non-Sleeping-threads for pid {self.pid} thread started at {time.asctime()}\n\n"
)

def close_logs(self):
if self.file is not None and not self.file.closed:
self.file.write("Heartbeat for pid %d thread stopped at %s\n\n" % (self.pid, time.asctime()))
self.file.write(f"Heartbeat for pid {self.pid} thread stopped at {time.asctime()}\n\n")
self.file_nonsleeping.write(
"Non-Sleeping-threads for pid %d thread stopped at %s\n\n" % (self.pid, time.asctime())
f"Non-Sleeping-threads for pid {self.pid} thread stopped at {time.asctime()}\n\n"
)
self.file.close()
self.file_nonsleeping.close()
Expand Down Expand Up @@ -183,16 +184,7 @@ def print_nonsleeping(self, threads_object_dict):

good_frame = self.get_interesting_stack_frame(tb)
self.file_nonsleeping.write(
'Thread %s\t%s\tnon-sleeping for %d heartbeat(s)\n File %s:%d\n Function "%s"\n %s\n'
% (
thread_id,
object,
self.nonsleeping_heartbeats[thread_id],
good_frame[0],
good_frame[1],
good_frame[2],
good_frame[3],
)
f'Thread {thread_id}\t{object}\tnon-sleeping for {self.nonsleeping_heartbeats[thread_id]} heartbeat(s)\n File {good_frame[0]}:{good_frame[1]}\n Function "{good_frame[2]}"\n {good_frame[3]}\n'
)
all_threads_are_sleeping = False

Expand Down
3 changes: 1 addition & 2 deletions lib/galaxy/util/permutations.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ def __extend_with_matched_combos(single_inputs, multi_inputs, nested):
continue
if len(multi_input_values) != len(first_multi_value):
raise InputMatchedException(
"Received %d inputs for '%s' and %d inputs for '%s', these should be of equal length"
% (len(multi_input_values), multi_input_key, len(first_multi_value), first_multi_input_key)
f"Received {len(multi_input_values)} inputs for '{multi_input_key}' and {len(first_multi_value)} inputs for '{first_multi_input_key}', these should be of equal length"
)

for index, value in enumerate(multi_input_values):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/util/rules_dsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def apply(self, rule, data, sources):

new_rows = []
for index, row in enumerate(data):
new_rows.append(row + ["%d" % (index + start)])
new_rows.append(row + [f"{index + start}"])

return new_rows, sources

Expand Down
7 changes: 3 additions & 4 deletions lib/galaxy/visualization/data_providers/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,9 @@ def get_data(self, columns=None, start_val=0, max_vals=None, skip_comments=True,
# columns is an array of ints for now (should handle column names later)
columns = loads(columns)
for column in columns:
assert (column < self.original_dataset.metadata.columns) and (
column >= 0
), "column index (%d) must be positive and less" % (column) + " than the number of columns: %d" % (
self.original_dataset.metadata.columns
assert column < self.original_dataset.metadata.columns and column >= 0, (
f"column index ({column}) must be positive and less"
f" than the number of columns: {self.original_dataset.metadata.columns}"
)

# set up the response, column lists
Expand Down
14 changes: 7 additions & 7 deletions lib/galaxy/visualization/data_providers/cigar.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,34 +43,34 @@ def get_ref_based_read_seq_and_cigar(read_seq, read_start, ref_seq, ref_seq_star
new_op = "X"
# Include mismatched bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + count]
new_cigar += "%i%s" % (count, new_op)
new_cigar += f"{count}{new_op}"
total_count += count
read_pos += count
ref_seq_pos += count

# If end of read falls outside of ref_seq data, leave as M.
if total_count < op_len:
new_cigar += "%iM" % (op_len - total_count)
new_cigar += "%sM" % (op_len - total_count)
elif op == 1: # Insertion
new_cigar += "%i%s" % (op_len, cigar_ops[op])
new_cigar += f"{op_len}{cigar_ops[op]}"
# Include insertion bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + op_len]
read_pos += op_len
elif op in [2, 3, 6]: # Deletion, Skip, or Padding
ref_seq_pos += op_len
new_cigar += "%i%s" % (op_len, cigar_ops[op])
new_cigar += f"{op_len}{cigar_ops[op]}"
elif op == 4: # Soft clipping
read_pos += op_len
new_cigar += "%i%s" % (op_len, cigar_ops[op])
new_cigar += f"{op_len}{cigar_ops[op]}"
elif op == 5: # Hard clipping
new_cigar += "%i%s" % (op_len, cigar_ops[op])
new_cigar += f"{op_len}{cigar_ops[op]}"
elif op in [7, 8]: # Match or mismatch
if op == 8:
# Include mismatched bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + op_len]
read_pos += op_len
ref_seq_pos += op_len
new_cigar += "%i%s" % (op_len, cigar_ops[op])
new_cigar += f"{op_len}{cigar_ops[op]}"

return (new_read_seq, new_cigar)

Expand Down
12 changes: 5 additions & 7 deletions lib/galaxy/visualization/data_providers/genome.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def get_genome_data(self, chroms_info, **kwargs):
# create a dummy dict if necessary.
if not chrom_data:
chrom_data = {"data": None}
chrom_data["region"] = "%s:%i-%i" % (chrom, 0, chrom_len)
chrom_data["region"] = f"{chrom}:{0}-{chrom_len}"
genome_data.append(chrom_data)

return {"data": genome_data, "dataset_type": self.dataset_type}
Expand Down Expand Up @@ -970,7 +970,7 @@ def _nth_read_iterator(read_iterator, threshold):
pair = paired_pending[qname]
results.append(
[
hash("%i_%s" % (pair["start"], qname)),
hash("{}_{}".format(pair["start"], qname)),
pair["start"],
read.pos + read_len,
qname,
Expand All @@ -997,7 +997,7 @@ def _nth_read_iterator(read_iterator, threshold):
else:
results.append(
[
hash("%i_%s" % (read.pos, qname)),
hash(f"{read.pos}_{qname}"),
read.pos,
read.pos + read_len,
qname,
Expand Down Expand Up @@ -1028,9 +1028,7 @@ def _nth_read_iterator(read_iterator, threshold):
r1 = [read["start"], read["end"], read["cigar"], read["strand"], read["seq"]]
r2 = [read["mate_start"], read["mate_start"]]

results.append(
[hash("%i_%s" % (read_start, qname)), read_start, read_end, qname, r1, r2, [read["mapq"], 125]]
)
results.append([hash(f"{read_start}_{qname}"), read_start, read_end, qname, r1, r2, [read["mapq"], 125]])

# Clean up. TODO: is this needed? If so, we'll need a cleanup function after processing the data.
# bamfile.close()
Expand All @@ -1052,7 +1050,7 @@ def convert_cigar(read, start_field, cigar_field, seq_field):
cigar_ops = "MIDNSHP=X"
read_cigar = ""
for op_tuple in read[cigar_field]:
read_cigar += "%i%s" % (op_tuple[1], cigar_ops[op_tuple[0]])
read_cigar += f"{op_tuple[1]}{cigar_ops[op_tuple[0]]}"
read[cigar_field] = read_cigar

# Choose method for processing reads. Use reference-based compression
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/web/framework/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,7 @@ def wsgi_status(self):
"""
if isinstance(self.status, int):
exception = webob.exc.status_map.get(self.status)
return "%d %s" % (exception.code, exception.title)
return f"{exception.code} {exception.title}"
else:
return self.status

Expand Down
5 changes: 2 additions & 3 deletions lib/galaxy/web/framework/middleware/profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ def pstats_as_html(stats, *sel_list):
rval = []
# Number of function calls, primitive calls, total time
rval.append(
"<div>%d function calls (%d primitive) in %0.3f CPU seconds</div>"
% (stats.total_calls, stats.prim_calls, stats.total_tt)
f"<div>{stats.total_calls} function calls ({stats.prim_calls} primitive) in {stats.total_tt:0.3f} CPU seconds</div>"
)
# Extract functions that match 'sel_list'
funcs, order_message, select_message = get_func_list(stats, sel_list)
Expand Down Expand Up @@ -175,4 +174,4 @@ def func_std_string(func_name):
else:
return name
else:
return "%s:%d(%s)" % func_name
return "{}:{}({})".format(*func_name)
6 changes: 1 addition & 5 deletions lib/galaxy/web/framework/middleware/translogger.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,17 +80,13 @@ def write_log(self, environ, method, req_uri, start, status, bytes):
offset = time.altzone / 60 / 60 * -100
else:
offset = time.timezone / 60 / 60 * -100
if offset >= 0:
offset = "+%0.4d" % (offset)
elif offset < 0:
offset = "%0.4d" % (offset)
d = {
"REMOTE_ADDR": environ.get("REMOTE_ADDR") or "-",
"REMOTE_USER": environ.get("REMOTE_USER") or "-",
"REQUEST_METHOD": method,
"REQUEST_URI": req_uri,
"HTTP_VERSION": environ.get("SERVER_PROTOCOL"),
"time": time.strftime("%d/%b/%Y:%H:%M:%S ", start) + offset,
"time": time.strftime("%d/%b/%Y:%H:%M:%S ", start) + f"{int(offset):+05}",
"status": status.split(None, 1)[0],
"bytes": bytes,
"HTTP_REFERER": environ.get("HTTP_REFERER", "-"),
Expand Down
10 changes: 3 additions & 7 deletions lib/galaxy/web/proxy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def setup_proxy(
host = host[0 : host.index(":")]
scheme = trans.request.scheme
if not self.dynamic_proxy_external_proxy:
proxy_url = "%s://%s:%d" % (scheme, host, self.dynamic_proxy_bind_port)
proxy_url = f"{scheme}://{host}:{self.dynamic_proxy_bind_port}"
else:
proxy_url = f"{scheme}://{host}{proxy_prefix}"
return {
Expand Down Expand Up @@ -160,11 +160,7 @@ def launch_proxy_command(self, config):
args = [
"gxproxy", # Must be on path. TODO: wheel?
"--listenAddr",
"%s:%d"
% (
config.dynamic_proxy_bind_ip,
config.dynamic_proxy_bind_port,
),
f"{config.dynamic_proxy_bind_ip}:{config.dynamic_proxy_bind_port}",
"--listenPath",
"/".join(((config.cookie_path or url_for("/")), config.dynamic_proxy_prefix)),
"--cookieName",
Expand Down Expand Up @@ -197,7 +193,7 @@ def __init__(self, host=None, port=None):
host = DEFAULT_PROXY_TO_HOST
if port is None:
port = sockets.unused_port()
log.info("Obtained unused port %d" % port)
log.info("Obtained unused port %d", port)
self.host = host
self.port = port

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/base/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -1297,7 +1297,7 @@ def _scan_json_block(self, meta, prefix=""):
yield from self._scan_json_block(meta[a], f"{prefix}/{a}")
elif isinstance(meta, list):
for i, a in enumerate(meta):
yield from self._scan_json_block(a, prefix + "[%d]" % (i))
yield from self._scan_json_block(a, prefix + f"[{i}]")
else:
# BUG: Everything is cast to string, which can lead to false positives
# for cross type comparisions, ie "True" == True
Expand Down
17 changes: 5 additions & 12 deletions lib/galaxy/webapps/galaxy/controllers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -539,8 +539,7 @@ def manage_users_and_groups_for_quota(self, trans, payload=None, **kwd):
all_groups.append((group.name, trans.security.encode_id(group.id)))
return {
"title": f"Quota '{quota.name}'",
"message": "Quota '%s' is currently associated with %d user(s) and %d group(s)."
% (quota.name, len(in_users), len(in_groups)),
"message": f"Quota '{quota.name}' is currently associated with {len(in_users)} user(s) and {len(in_groups)} group(s).",
"status": "info",
"inputs": [
build_select_input("in_groups", "Groups", all_groups, in_groups),
Expand Down Expand Up @@ -802,8 +801,7 @@ def manage_users_and_groups_for_role(self, trans, payload=None, **kwd):
all_groups.append((group.name, trans.security.encode_id(group.id)))
return {
"title": f"Role '{role.name}'",
"message": "Role '%s' is currently associated with %d user(s) and %d group(s)."
% (role.name, len(in_users), len(in_groups)),
"message": f"Role '{role.name}' is currently associated with {len(in_users)} user(s) and {len(in_groups)} group(s).",
"status": "info",
"inputs": [
build_select_input("in_groups", "Groups", all_groups, in_groups),
Expand Down Expand Up @@ -889,8 +887,7 @@ def manage_users_and_roles_for_group(self, trans, payload=None, **kwd):
all_roles.append((role.name, trans.security.encode_id(role.id)))
return {
"title": f"Group '{group.name}'",
"message": "Group '%s' is currently associated with %d user(s) and %d role(s)."
% (group.name, len(in_users), len(in_roles)),
"message": f"Group '{group.name}' is currently associated with {len(in_users)} user(s) and {len(in_roles)} role(s).",
"status": "info",
"inputs": [
build_select_input("in_roles", "Roles", all_roles, in_roles),
Expand Down Expand Up @@ -993,11 +990,7 @@ def create_group(self, trans, payload=None, **kwd):
num_in_roles = len(in_roles)
with transaction(trans.sa_session):
trans.sa_session.commit()
message = "Group '%s' has been created with %d associated users and %d associated roles." % (
group.name,
len(in_users),
num_in_roles,
)
message = f"Group '{group.name}' has been created with {len(in_users)} associated users and {num_in_roles} associated roles."
if auto_create_checked:
message += (
"One of the roles associated with this group is the newly created role with the same name."
Expand Down Expand Up @@ -1034,7 +1027,7 @@ def reset_user_password(self, trans, payload=None, **kwd):
trans.sa_session.add(user)
with transaction(trans.sa_session):
trans.sa_session.commit()
return {"message": "Passwords reset for %d user(s)." % len(users)}
return {"message": f"Passwords reset for {len(users)} user(s)."}
else:
return self.message_exception(trans, "Please specify user ids.")

Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/webapps/galaxy/controllers/async.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
STATUS = params.get("STATUS")

if STATUS == "OK":
key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
key = hmac_new(trans.app.config.tool_secret, f"{data.id}:{data.history_id}")
if key != data_secret:
return f"You do not have permission to alter data {data_id}."
if not params.get("GALAXY_URL"):
Expand Down Expand Up @@ -203,10 +203,10 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
trans.sa_session.commit()
# Need to explicitly create the file
data.dataset.object_store.create(data.dataset)
trans.log_event("Added dataset %d to history %d" % (data.id, trans.history.id), tool_id=tool_id)
trans.log_event(f"Added dataset {data.id} to history {trans.history.id}", tool_id=tool_id)

try:
key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
key = hmac_new(trans.app.config.tool_secret, f"{data.id}:{data.history_id}")
galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}"
params.update({"GALAXY_URL": galaxy_url})
params.update({"data_id": data.id})
Expand Down
Loading

0 comments on commit 01e80fc

Please sign in to comment.