Skip to content

Commit

Permalink
refactor: Remove f-string in logging treewide
Browse files Browse the repository at this point in the history
This completes @schroeders- effort in
f48b4f5: 'remove f-string in logging '

Generally this is more consistent, more efficient, and less error-prone than formatting the string directly.
  • Loading branch information
AiyionPrime authored and oroulet committed Feb 22, 2024
1 parent 85b56c7 commit da1f7b1
Show file tree
Hide file tree
Showing 14 changed files with 48 additions and 48 deletions.
8 changes: 4 additions & 4 deletions asyncua/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -744,7 +744,7 @@ async def create_subscription(
new_params = self.get_subscription_revised_params(params, results)
if new_params:
results = await subscription.update(new_params)
_logger.info(f"Result from subscription update: {results}")
_logger.info("Result from subscription update: %s", results)
return subscription

def get_subscription_revised_params( # type: ignore
Expand All @@ -759,7 +759,7 @@ def get_subscription_revised_params( # type: ignore
):
return # type: ignore
_logger.warning(
f"Revised values returned differ from subscription values: {results}"
"Revised values returned differ from subscription values: %s", results
)
revised_interval = results.RevisedPublishingInterval
# Adjust the MaxKeepAliveCount based on the RevisedPublishInterval when necessary
Expand All @@ -769,8 +769,8 @@ def get_subscription_revised_params( # type: ignore
and new_keepalive_count != params.RequestedMaxKeepAliveCount
):
_logger.info(
f"KeepAliveCount will be updated to {new_keepalive_count} "
f"for consistency with RevisedPublishInterval"
"KeepAliveCount will be updated to %s "
"for consistency with RevisedPublishInterval", new_keepalive_count
)
modified_params = ua.ModifySubscriptionParameters()
# copy the existing subscription parameters
Expand Down
16 changes: 8 additions & 8 deletions asyncua/client/ha/ha_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ async def subscribe_data_change(
vs = self.ideal_map[url].get(sub_name)
if not vs:
_logger.warning(
f"The subscription specified for the data_change: {sub_name} doesn't exist in ideal_map"
"The subscription specified for the data_change: %s doesn't exist in ideal_map", sub_name
)
return
vs.subscribe_data_change(nodes, attr, queuesize)
Expand All @@ -261,7 +261,7 @@ async def delete_subscriptions(self, sub_names: List[str]) -> None:
self.ideal_map[url].pop(sub_name)
else:
_logger.warning(
f"No subscription named {sub_name} in ideal_map"
"No subscription named %s in ideal_map", sub_name
)
self.sub_names.remove(sub_name)

Expand Down Expand Up @@ -447,7 +447,7 @@ async def run(self) -> None:
await asyncio.sleep(3)
self.is_running = True
_logger.info(
f"Starting keepalive loop for {server_info.url}, checking every {self.timer}sec"
"Starting keepalive loop for %s, checking every %dsec", server_info.url, self.timer
)
while self.is_running:
if client.uaclient.protocol is None:
Expand Down Expand Up @@ -481,7 +481,7 @@ async def run(self) -> None:
_logger.exception("Unknown exception during keepalive liveness check")
server_info.status = ConnectionStates.NO_DATA

_logger.info(f"ServiceLevel for {server_info.url}: {server_info.status}")
_logger.info("ServiceLevel for %s: %s", server_info.url, server_info.status)
if await event_wait(self.stop_event, self.timer):
self.is_running = False
break
Expand Down Expand Up @@ -511,7 +511,7 @@ async def run(self) -> None:
reconnect = getattr(self, reco_func)
self.is_running = True

_logger.info(f"Starting HaManager loop, checking every {self.timer}sec")
_logger.info("Starting HaManager loop, checking every %dsec", self.timer)
while self.is_running:

# failover happens here
Expand All @@ -535,7 +535,7 @@ async def update_state_warm(self) -> None:
if primary_client != active_client:
# disable monitoring and reporting when the service_level goes below 200
_logger.info(
f"Failing over active client from {active_client} to {primary_client}"
"Failing over active client from %s to %s", active_client, primary_client
)
secondaries = (
set(clients) - {primary_client} if primary_client else set(clients)
Expand All @@ -557,12 +557,12 @@ async def reco_resub(client: Client, force: bool):
or client.uaclient.protocol
and client.uaclient.protocol.state == UASocketProtocol.CLOSED
):
_logger.info(f"Virtually reconnecting and resubscribing {client}")
_logger.info("Virtually reconnecting and resubscribing %s", client)
await self.ha_client.reconnect(client=client)

def log_exception(client: Client, fut: asyncio.Task):
if fut.exception():
_logger.warning(f"Error when reconnecting {client}: {fut.exception()}")
_logger.warning("Error when reconnecting %s: %s", client, fut.exception())

tasks = []
for client in healthy:
Expand Down
38 changes: 19 additions & 19 deletions asyncua/client/ha/reconciliator.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def init_hooks(self) -> None:
setattr(self, hook, lambda **kwargs: None)

async def run(self) -> None:
_logger.info(f"Starting Reconciliator loop, checking every {self.timer}sec")
_logger.info("Starting Reconciliator loop, checking every %dsec", self.timer)
self.is_running = True
while self.is_running:

Expand All @@ -95,7 +95,7 @@ async def run(self) -> None:
await self.reconciliate()
await self.debug_status()
stop = time.time() - start
_logger.info(f"[TIME] Reconciliation: {stop:.2f}sec")
_logger.info("[TIME] Reconciliation: %.2fsec", stop)

if await event_wait(self.stop_event, self.timer):
self.is_running = False
Expand Down Expand Up @@ -154,14 +154,14 @@ async def reconciliate(self) -> None:
targets.add(url)
if not targets:
_logger.info(
f"[PASS] No configuration difference for healthy targets: {valid_urls}"
"[PASS] No configuration difference for healthy targets: %s", valid_urls
)
return
_logger.info(
f"[WORK] Configuration difference found for healthy targets: {targets}"
"[WORK] Configuration difference found for healthy targets: %s", targets
)
except (AttributeError, TypeError, PicklingError) as e:
_logger.warning(f"[WORK] Reconciliator performance impacted: {e}")
_logger.warning("[WORK] Reconciliator performance impacted: %s", e)
targets = set(valid_urls)
# add missing and delete unsubscribed subs
await self.update_subscriptions(real_map, ideal_map, targets)
Expand All @@ -186,7 +186,7 @@ def _subs_to_del(
to_del: List[asyncio.Task] = []
sub_to_del = set(real_map[url]) - set(ideal_map[url])
if sub_to_del:
_logger.info(f"Removing {len(sub_to_del)} subscriptions")
_logger.info("Removing %d subscriptions", len(sub_to_del))
for sub_name in sub_to_del:
sub_handle = self.name_to_subscription[url][sub_name]
task = asyncio.create_task(sub_handle.delete())
Expand All @@ -202,7 +202,7 @@ def _subs_to_add(
to_add: List[asyncio.Task] = []
sub_to_add = set(ideal_map[url]) - set(real_map[url])
if sub_to_add:
_logger.info(f"Adding {len(sub_to_add)} subscriptions")
_logger.info("Adding %d subscriptions", len(sub_to_add))
client = self.ha_client.get_client_by_url(url)
for sub_name in sub_to_add:
vs = ideal_map[url][sub_name]
Expand Down Expand Up @@ -238,8 +238,8 @@ async def update_nodes(
# in case the previous create_subscription request failed
if not real_sub:
_logger.warning(
f"Can't create nodes for {url} since underlying "
f"subscription for {sub_name} doesn't exist"
"Can't create nodes for %s since underlying "
"subscription for %s doesn't exist", url, sub_name
)
continue
vs_real = real_map[url][sub_name]
Expand All @@ -263,7 +263,7 @@ def _nodes_to_add(
monitoring = vs_real.monitoring
node_to_add = set(vs_ideal.nodes) - set(vs_real.nodes)
if node_to_add:
_logger.info(f"Adding {len(node_to_add)} Nodes")
_logger.info("Adding %d Nodes", len(node_to_add))
# hack to group subscription by NodeAttributes
attr_to_nodes = defaultdict(list)
for node in node_to_add:
Expand Down Expand Up @@ -309,7 +309,7 @@ def _nodes_to_del(
node_to_del = set(vs_real.nodes) - set(vs_ideal.nodes)
real_sub: Subscription = self.name_to_subscription[url].get(sub_name)
if node_to_del:
_logger.info(f"Removing {len(node_to_del)} Nodes")
_logger.info("Removing %d Nodes", len(node_to_del))
for batch_nodes in batch(node_to_del, self.BATCH_MI_SIZE):
node_handles = [self.node_to_handle[url][node] for node in batch_nodes]
task = asyncio.create_task(real_sub.unsubscribe(node_handles))
Expand Down Expand Up @@ -341,7 +341,7 @@ async def update_subscription_modes(
# in case the previous create_subscription request failed
if not real_sub:
_logger.warning(
f"Can't change modes for {url} since underlying subscription for {sub_name} doesn't exist"
"Can't change modes for %s since underlying subscription for %s doesn't exist", url, sub_name
)
continue
vs_real = real_map[url][sub_name]
Expand All @@ -351,7 +351,7 @@ async def update_subscription_modes(
ideal_val = getattr(vs_ideal, attr)
real_val = getattr(vs_real, attr)
if ideal_val != real_val:
_logger.info(f"Changing {attr} for {sub_name} to {ideal_val}")
_logger.info("Changing %s for %s to %s", attr, sub_name, ideal_val)
set_func = getattr(real_sub, func)
task = asyncio.create_task(set_func(ideal_val))
task.add_done_callback(
Expand All @@ -375,15 +375,15 @@ def change_mode(
**kwargs,
) -> None:
if fut.exception():
_logger.warning(f"Can't {action.value} on {url}: {fut.exception()}")
_logger.warning("Can't %s on %s: %s", action.value, url, fut.exception())
return
sub_name = kwargs["sub_name"]
vs = self.real_map[url][sub_name]
setattr(vs, action.name.lower(), val)

def add_to_map(self, url: str, action: Method, fut: asyncio.Task, **kwargs) -> None:
if fut.exception():
_logger.warning(f"Can't {action.value} on {url}: {fut.exception()}")
_logger.warning("Can't %s on %s: %s", action.value, url, fut.exception())
self.hook_add_to_map_error(url=url, action=action, fut=fut, **kwargs)
return

Expand All @@ -401,11 +401,11 @@ def add_to_map(self, url: str, action: Method, fut: asyncio.Task, **kwargs) -> N
if isinstance(handle, ua.StatusCode):
# a StatusCode is returned, the request has failed.
vs.unsubscribe([node])
_logger.info(f"Node {node} subscription failed: {handle}")
_logger.info("Node %s subscription failed: %s", node, handle)
# The node is invalid, remove it from both maps
if handle.name == "BadNodeIdUnknown":
_logger.warning(
f"WARNING: Abandoning {node} because it returned {handle} from {url}"
"WARNING: Abandoning %s because it returned %s from %s", node, handle, url
)
real_vs = self.ha_client.ideal_map[url][sub_name]
real_vs.unsubscribe([node])
Expand All @@ -418,13 +418,13 @@ def del_from_map(
) -> None:
if fut.exception():
# log exception but continues to delete local resources
_logger.warning(f"Can't {action.value} on {url}: {fut.exception()}")
_logger.warning("Can't %s on %s: %s", action.value, url, fut.exception())
sub_name = kwargs["sub_name"]

if action == Method.DEL_SUB:
self.real_map[url].pop(sub_name)
self.name_to_subscription[url].pop(sub_name)
_logger.warning(f"In del_from_map del sub: {fut.result()}")
_logger.warning("In del_from_map del sub: %s", fut.result())

if action == Method.DEL_MI:
nodes = kwargs["nodes"]
Expand Down
4 changes: 2 additions & 2 deletions asyncua/common/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def receive_from_header_and_body(self, header, body):
return msg
if header.MessageType == ua.MessageType.Error:
msg = struct_from_binary(ua.ErrorMessage, body)
_logger.warning(f"Received an error: {msg}")
_logger.warning("Received an error: %s", msg)
return msg
raise ua.UaError(f"Unsupported message type {header.MessageType}")

Expand All @@ -444,7 +444,7 @@ def _receive(self, msg):
return None
if msg.MessageHeader.ChunkType == ua.ChunkType.Abort:
err = struct_from_binary(ua.ErrorMessage, ua.utils.Buffer(msg.Body))
_logger.warning(f"Message {msg} aborted: {err}")
_logger.warning("Message %s aborted: %s", msg, err)
# specs Part 6, 6.7.3 say that aborted message shall be ignored
# and SecureChannel should not be closed
self._incoming_parts = []
Expand Down
4 changes: 2 additions & 2 deletions asyncua/common/copy_node_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,6 @@ async def _read_and_copy_attrs(node_type: asyncua.Node, struct: Any, addnode: ua
else:
setattr(struct, name, variant.Value)
else:
_logger.warning(f"Instantiate: while copying attributes from node type {str(node_type)},"
f" attribute {str(name)}, statuscode is {str(results[idx].StatusCode)}")
_logger.warning("Instantiate: while copying attributes from node type %s,"
" attribute %s, statuscode is %s", str(node_type), str(name), str(results[idx].StatusCode))
addnode.NodeAttributes = struct
4 changes: 2 additions & 2 deletions asyncua/common/statemachine.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ async def init(self, statemachine: Node):
elif dn.Text == "EffectiveDisplayName":
self._current_state_effective_display_name_node = await self._current_state_node.get_child(["EffectiveDisplayName"])
else:
_logger.warning(f"{await statemachine.read_browse_name()} CurrentState Unknown property: {dn.Text}")
_logger.warning("%s CurrentState Unknown property: %s", await statemachine.read_browse_name(), dn.Text)
if self._optionals:
self._last_transition_node = await statemachine.get_child(["LastTransition"])
last_transition_props = await self._last_transition_node.get_properties()
Expand All @@ -169,7 +169,7 @@ async def init(self, statemachine: Node):
elif dn.Text == "TransitionTime":
self._last_transition_transitiontime_node = await self._last_transition_node.get_child(["TransitionTime"])
else:
_logger.warning(f"{await statemachine.read_browse_name()} LastTransition Unknown property: {dn.Text}")
_logger.warning("%s LastTransition Unknown property: %s", await statemachine.read_browse_name(), dn.Text)
self._evgen = await self._server.get_event_generator(self.evtype, self._state_machine_node)

async def change_state(self, state: State, transition: Transition = None, event_msg: Union[str, ua.LocalizedText] = None, severity: int = 500):
Expand Down
2 changes: 1 addition & 1 deletion asyncua/common/structures104.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ class {struct_name}{base_class}:
uatype = struct_name
else:
if log_error:
_logger.error(f"Unknown datatype for field: {sfield} in structure:{struct_name}, please report")
_logger.error("Unknown datatype for field: %s in structure:%s, please report", sfield, struct_name)
raise RuntimeError(f"Unknown datatype for field: {sfield} in structure:{struct_name}, please report")

if sfield.ValueRank >= 0:
Expand Down
2 changes: 1 addition & 1 deletion asyncua/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def fields_with_resolved_types(
try:
field.type = resolved_fieldtypes[field.name]
except KeyError:
_logger.info(f"could not resolve fieldtype for field={field} of class_or_instance={class_or_instance}")
_logger.info("could not resolve fieldtype for field=%s of class_or_instance=%s", field, class_or_instance)
pass

return fields_
2 changes: 1 addition & 1 deletion asyncua/common/xmlexporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -522,7 +522,7 @@ async def _all_fields_to_etree(self, struct_el: Et.Element, val: Any) -> None:
dtype = ua.enums_datatypes[cls]
break
self.logger.debug(
f"could not find field type {field.type} in registered types"
"could not find field type %s in registered types", field.type
)
return
await self.member_to_etree(
Expand Down
6 changes: 3 additions & 3 deletions asyncua/server/address_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,9 @@ def _add_node(self, item: ua.AddNodesItem, user: User, check: bool = True) -> ua
if ref.ReferenceTypeId == ua.NodeId(ua.ObjectIds.HasProperty):
if item.BrowseName.Name == ref.BrowseName.Name:
self.logger.warning(
f"AddNodesItem: Requested Browsename {item.BrowseName.Name}"
f" already exists in Parent Node. ParentID:{item.ParentNodeId} --- "
f"ItemId:{item.RequestedNewNodeId}"
"AddNodesItem: Requested Browsename %s"
" already exists in Parent Node. ParentID:%s --- "
"ItemId:%s", item.BrowseName.Name, item.ParentNodeId, item.RequestedNewNodeId
)
result.StatusCode = ua.StatusCode(ua.StatusCodes.BadBrowseNameDuplicated)
return result
Expand Down
2 changes: 1 addition & 1 deletion asyncua/server/binary_server_asyncio.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def data_received(self, data):
return
if header.header_size + header.body_size <= header.header_size:
# malformed header prevent invalid access of your buffer
_logger.error(f'Got malformed header {header}')
_logger.error('Got malformed header %s', header)
self.transport.close()
return
else:
Expand Down
2 changes: 1 addition & 1 deletion asyncua/server/history_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ async def save_event(self, event):
await self._db.execute(f'DELETE FROM "{table}" WHERE Time < ?', (date_limit.isoformat(' '),))
await self._db.commit()
except aiosqlite.Error as e:
self.logger.error(f"Historizing SQL Delete Old Data Error for events from {event.SourceNode}: {e}")
self.logger.error("Historizing SQL Delete Old Data Error for events from %s: %s", event.SourceNode, e)

async def read_event_history(self, source_id, start, end, nb_values, evfilter):
table = self._get_table_name(source_id)
Expand Down
4 changes: 2 additions & 2 deletions asyncua/server/user_managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ async def add_role(self, certificate_path: Path, user_role: UserRole, name: str,
user = User(role=user_role, name=name)

if name in self._trusted_certificates:
logging.warning(f"certificate with name {name} "
f"attempted to be added multiple times, only the last version will be kept.")
logging.warning("certificate with name %s "
"attempted to be added multiple times, only the last version will be kept.", name)
self._trusted_certificates[name] = {'certificate': uacrypto.der_from_x509(certificate), 'user': user}

def get_user(self, iserver, username=None, password=None, certificate=None):
Expand Down

0 comments on commit da1f7b1

Please sign in to comment.