From 173e37e776d503ce3bc70178ab64ae2714d8fb91 Mon Sep 17 00:00:00 2001 From: Alexander Senier Date: Tue, 24 Aug 2021 10:01:34 +0200 Subject: [PATCH] Remove extend method in RecordFluxError Ref. #748 --- rflx/cli.py | 2 +- rflx/declaration.py | 28 +- rflx/error.py | 18 +- rflx/expression.py | 198 +++++---- rflx/identifier.py | 20 +- rflx/model/message.py | 780 +++++++++++++++++++++-------------- rflx/model/model.py | 117 +++--- rflx/model/session.py | 208 ++++++---- rflx/model/type_.py | 352 ++++++++++------ rflx/pyrflx/error.py | 2 +- rflx/pyrflx/typevalue.py | 29 +- rflx/specification/parser.py | 307 ++++++++------ rflx/statement.py | 26 +- rflx/typing_.py | 46 +-- tests/property/strategies.py | 13 +- 15 files changed, 1273 insertions(+), 873 deletions(-) diff --git a/rflx/cli.py b/rflx/cli.py index 9111ac325..6471f01fa 100644 --- a/rflx/cli.py +++ b/rflx/cli.py @@ -186,7 +186,7 @@ def parse(files: Sequence[Path], skip_verification: bool = False) -> Model: for f in files: if not f.is_file(): - error.append(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR) + error.extend([(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR, None)]) continue present_files.append(Path(f)) diff --git a/rflx/declaration.py b/rflx/declaration.py index 15107510c..b63f44a54 100644 --- a/rflx/declaration.py +++ b/rflx/declaration.py @@ -147,18 +147,22 @@ def check_type( if ID(r.field) == self.expression.selector and r.sdu.is_compatible(declaration_type): break else: - error.append( - f'invalid renaming to "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, - ) - error.append( - f'refinement for message "{self.expression.prefix.type_.identifier}"' - " would make operation legal", - Subsystem.MODEL, - Severity.INFO, - self.location, + error.extend( + [ + ( + f'invalid renaming to "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ), + ( + f'refinement for message "{self.expression.prefix.type_.identifier}"' + " would make operation legal", + Subsystem.MODEL, + Severity.INFO, + self.location, + ), + ], ) return error + self.expression.check_type(rty.OPAQUE) diff --git a/rflx/error.py b/rflx/error.py index 05b418c00..712a80495 100644 --- a/rflx/error.py +++ b/rflx/error.py @@ -138,20 +138,6 @@ def __iadd__(self: Self, other: object) -> Self: def errors(self) -> Deque["BaseError.Entry"]: return self.__errors - def append( - self, message: str, subsystem: Subsystem, severity: Severity, location: Location = None - ) -> None: - self.__errors.append(BaseError.Entry(message, subsystem, severity, location)) - if get_fail_after() > 0 and len(self.__errors) >= get_fail_after(): - raise self - - def appendleft( - self, message: str, subsystem: Subsystem, severity: Severity, location: Location = None - ) -> None: - self.__errors.appendleft(BaseError.Entry(message, subsystem, severity, location)) - if get_fail_after() > 0 and len(self.__errors) >= get_fail_after(): - raise self - def extend( self, entries: Union[List[Tuple[str, Subsystem, Severity, Optional[Location]]], "BaseError"], @@ -220,7 +206,7 @@ def _fail( severity: Severity = Severity.ERROR, location: Location = None, ) -> NoReturn: - error.append(message, subsystem, severity, location) + error.extend([(message, subsystem, severity, location)]) error.propagate() assert False @@ -232,5 +218,5 @@ def warn( location: Location = None, ) -> None: e = RecordFluxError() - e.append(message, subsystem, severity, location) + e.extend([(message, subsystem, severity, location)]) print(e) diff --git a/rflx/expression.py b/rflx/expression.py index f4ea8e6f0..b34632b4f 100644 --- a/rflx/expression.py +++ b/rflx/expression.py @@ -1188,11 +1188,15 @@ def _check_type_subexpr(self) -> RecordFluxError: error = self.prefix.prefix.check_type_instance(rty.Message) else: error = RecordFluxError() - error.append( - "invalid prefix for attribute Present", - Subsystem.MODEL, - Severity.ERROR, - self.location, + error.extend( + [ + ( + "invalid prefix for attribute Present", + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ] ) return error @@ -1218,11 +1222,15 @@ def __init__(self, prefix: Union[StrID, Expr], negative: bool = False) -> None: def _check_type_subexpr(self) -> RecordFluxError: error = self.prefix.check_type_instance(rty.Composite) if not isinstance(self.prefix, (Variable, Selected)): - error.append( - "prefix of attribute Head must be a name", - Subsystem.MODEL, - Severity.ERROR, - self.prefix.location, + error.extend( + [ + ( + "prefix of attribute Head must be a name", + Subsystem.MODEL, + Severity.ERROR, + self.prefix.location, + ) + ], ) return error @@ -1331,14 +1339,18 @@ def _check_type_subexpr(self) -> RecordFluxError: if self.selector in self.prefix.type_.fields: self.type_ = self.prefix.type_.field_types[self.selector] else: - error.append( - f'invalid field "{self.selector}" for {self.prefix.type_}', - Subsystem.MODEL, - Severity.ERROR, - self.location, - ) error.extend( - _similar_field_names(self.selector, self.prefix.type_.fields, self.location) + [ + ( + f'invalid field "{self.selector}" for {self.prefix.type_}', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ), + *_similar_field_names( + self.selector, self.prefix.type_.fields, self.location + ), + ] ) self.type_ = rty.Any() else: @@ -1405,19 +1417,27 @@ def _check_type_subexpr(self) -> RecordFluxError: if self.type_ != rty.Undefined(): if len(self.args) < len(self.argument_types): - error.append( - "missing function arguments", - Subsystem.MODEL, - Severity.ERROR, - self.location, + error.extend( + [ + ( + "missing function arguments", + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if len(self.args) > len(self.argument_types): - error.append( - "too many function arguments", - Subsystem.MODEL, - Severity.ERROR, - self.location, + error.extend( + [ + ( + "too many function arguments", + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) return error @@ -2035,26 +2055,38 @@ def _check_type_subexpr(self) -> RecordFluxError: if self.argument_types: error += self.argument.prefix.check_type(tuple(self.argument_types)) else: - error.append( - f'invalid conversion to "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, + error.extend( + [ + ( + f'invalid conversion to "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if isinstance(self.argument.prefix.type_, rty.Message): - error.append( - f'refinement for message "{self.argument.prefix.type_.identifier}"' - " would make operation legal", - Subsystem.MODEL, - Severity.INFO, - self.location, + error.extend( + [ + ( + f'refinement for message "{self.argument.prefix.type_.identifier}"' + " would make operation legal", + Subsystem.MODEL, + Severity.INFO, + self.location, + ) + ], ) else: - error.append( - "invalid argument for conversion, expected message field", - Subsystem.MODEL, - Severity.ERROR, - self.argument.location, + error.extend( + [ + ( + "invalid argument for conversion, expected message field", + Subsystem.MODEL, + Severity.ERROR, + self.argument.location, + ) + ], ) return error @@ -2224,13 +2256,17 @@ def _check_type_subexpr(self) -> RecordFluxError: for i, (field, expr) in enumerate(self.field_values.items()): if field not in self.type_.fields: - error.append( - f'invalid field "{field}" for {self.type_}', - Subsystem.MODEL, - Severity.ERROR, - field.location, + error.extend( + [ + ( + f'invalid field "{field}" for {self.type_}', + Subsystem.MODEL, + Severity.ERROR, + field.location, + ), + *_similar_field_names(field, self.type_.fields, field.location), + ] ) - error.extend(_similar_field_names(field, self.type_.fields, field.location)) continue field_type = self.type_.field_types[field] @@ -2249,27 +2285,37 @@ def _check_type_subexpr(self) -> RecordFluxError: } if not field_combinations: - error.append( - f'invalid position for field "{field}" of {self.type_}', - Subsystem.MODEL, - Severity.ERROR, - field.location, + error.extend( + [ + ( + f'invalid position for field "{field}" of {self.type_}', + Subsystem.MODEL, + Severity.ERROR, + field.location, + ) + ], ) break if field_combinations and all(len(c) > len(self.field_values) for c in field_combinations): - error.append( - f"missing fields for {self.type_}", - Subsystem.MODEL, - Severity.ERROR, - self.location, - ) - error.append( - "possible next fields: " - + ", ".join(unique(c[len(self.field_values)] for c in sorted(field_combinations))), - Subsystem.MODEL, - Severity.INFO, - self.location, + error.extend( + [ + ( + f"missing fields for {self.type_}", + Subsystem.MODEL, + Severity.ERROR, + self.location, + ), + ( + "possible next fields: " + + ", ".join( + unique(c[len(self.field_values)] for c in sorted(field_combinations)) + ), + Subsystem.MODEL, + Severity.INFO, + self.location, + ), + ], ) return error @@ -2435,20 +2481,20 @@ def _entity_name(expr: Expr) -> str: def _similar_field_names( field: ID, fields: Iterable[ID], location: Optional[Location] -) -> RecordFluxError: +) -> List[Tuple[str, Subsystem, Severity, Optional[Location]]]: field_similarity = sorted( ((f, difflib.SequenceMatcher(None, str(f), str(field)).ratio()) for f in sorted(fields)), key=lambda x: x[1], reverse=True, ) similar_fields = [f for f, s in field_similarity if s >= 0.5] - - error = RecordFluxError() if similar_fields: - error.append( - "similar field names: " + ", ".join(str(f) for f in similar_fields), - Subsystem.MODEL, - Severity.INFO, - location, - ) - return error + return [ + ( + "similar field names: " + ", ".join(str(f) for f in similar_fields), + Subsystem.MODEL, + Severity.INFO, + location, + ) + ] + return [] diff --git a/rflx/identifier.py b/rflx/identifier.py index 8b8e2ee28..56b0b7838 100644 --- a/rflx/identifier.py +++ b/rflx/identifier.py @@ -25,19 +25,23 @@ def __init__( error = RecordFluxError() if not self._parts: - error.append("empty identifier", Subsystem.ID, Severity.ERROR, location) + error.extend([("empty identifier", Subsystem.ID, Severity.ERROR, location)]) elif "" in self._parts: - error.append( - f'empty part in identifier "{self}"', Subsystem.ID, Severity.ERROR, location + error.extend( + [(f'empty part in identifier "{self}"', Subsystem.ID, Severity.ERROR, location)] ) else: for c in [" ", ".", ":"]: if any(c in part for part in self._parts): - error.append( - f'"{c}" in identifier parts of "{self}"', - Subsystem.ID, - Severity.ERROR, - location, + error.extend( + [ + ( + f'"{c}" in identifier parts of "{self}"', + Subsystem.ID, + Severity.ERROR, + location, + ) + ], ) error.propagate() diff --git a/rflx/model/message.py b/rflx/model/message.py index 2d8c5b099..8b7802dc3 100644 --- a/rflx/model/message.py +++ b/rflx/model/message.py @@ -449,50 +449,64 @@ def message_constraints(cls) -> List[expr.Expr]: ] def __validate(self) -> None: - # pylint: disable=too-many-branches, too-many-locals + # pylint: disable=too-many-branches type_fields = self.__types.keys() | {INITIAL, FINAL} structure_fields = {l.source for l in self.structure} | {l.target for l in self.structure} for f in structure_fields - type_fields: - self.error.append( - f'missing type for field "{f.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'missing type for field "{f.name}" in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) for f in type_fields - structure_fields - {FINAL}: - self.error.append( - f'unused field "{f.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'unused field "{f.name}" in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) initial_links = self.outgoing(INITIAL) if len(initial_links) != 1: - self.error.append( - f'ambiguous first field in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, - ) self.error.extend( [ - ("duplicate", Subsystem.MODEL, Severity.INFO, l.target.identifier.location) - for l in self.outgoing(INITIAL) - if l.target.identifier.location + ( + f'ambiguous first field in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ), + *[ + ("duplicate", Subsystem.MODEL, Severity.INFO, l.target.identifier.location) + for l in self.outgoing(INITIAL) + if l.target.identifier.location + ], ] ) if initial_links[0].first != expr.UNDEFINED: - self.error.append( - "illegal first aspect at initial link", - Subsystem.MODEL, - Severity.ERROR, - initial_links[0].first.location, + self.error.extend( + [ + ( + "illegal first aspect at initial link", + Subsystem.MODEL, + Severity.ERROR, + initial_links[0].first.location, + ) + ], ) name_conflicts = [ @@ -504,17 +518,22 @@ def __validate(self) -> None: if name_conflicts: conflicting_field, conflicting_literal = name_conflicts.pop(0) - self.error.append( - f'name conflict for field "{conflicting_field.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - conflicting_field.identifier.location, - ) - self.error.append( - "conflicting enumeration literal", - Subsystem.MODEL, - Severity.INFO, - conflicting_literal.location, + self.error.extend( + [ + ( + f'name conflict for field "{conflicting_field.name}" in' + f' "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + conflicting_field.identifier.location, + ), + ( + "conflicting enumeration literal", + Subsystem.MODEL, + Severity.INFO, + conflicting_literal.location, + ), + ], ) self.error.propagate() @@ -525,11 +544,15 @@ def __validate(self) -> None: break else: self.__has_unreachable = True - self.error.append( - f'unreachable field "{f.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'unreachable field "{f.name}" in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) duplicate_links = defaultdict(list) @@ -538,22 +561,24 @@ def __validate(self) -> None: for links in duplicate_links.values(): if len(links) > 1: - self.error.append( - f'duplicate link from "{links[0].source.identifier}"' - f' to "{links[0].target.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - links[0].source.identifier.location, - ) self.error.extend( [ ( - "duplicate link", + f'duplicate link from "{links[0].source.identifier}"' + f' to "{links[0].target.identifier}"', Subsystem.MODEL, - Severity.INFO, - l.location, - ) - for l in links + Severity.ERROR, + links[0].source.identifier.location, + ), + *[ + ( + "duplicate link", + Subsystem.MODEL, + Severity.INFO, + l.location, + ) + for l in links + ], ] ) @@ -565,19 +590,25 @@ def __validate(self) -> None: assert isinstance(e, expr.Pow) variables = e.right.findall(lambda x: isinstance(x, expr.Variable)) if variables: - self.error.append( - f'unsupported expression in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - e.location, + self.error.extend( + [ + ( + f'unsupported expression in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + e.location, + ), + *[ + ( + f'variable "{v}" in exponent', + Subsystem.MODEL, + Severity.INFO, + v.location, + ) + for v in variables + ], + ] ) - for v in variables: - self.error.append( - f'variable "{v}" in exponent', - Subsystem.MODEL, - Severity.INFO, - v.location, - ) def __normalize(self) -> None: """Qualify enumeration literals in conditions to prevent ambiguities.""" @@ -611,11 +642,15 @@ def __compute_topological_sorting(self) -> Optional[Tuple[Field, ...]]: if set(self.incoming(e.target)) <= visited: fields.append(e.target) if not self.__has_unreachable and set(self.structure) - visited: - self.error.append( - f'structure of "{self.identifier}" contains cycle', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'structure of "{self.identifier}" contains cycle', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) # ISSUE: Componolit/RecordFlux#256 return None @@ -848,26 +883,32 @@ def remove_variable_prefix(expression: expr.Expr) -> expr.Expr: failures.append((path, proof.error)) error = RecordFluxError() - error.append( - f"unable to calculate size for message \"{self.identifier}'(" - + ", ".join(f"{f.identifier} => {v}" for f, v in field_values.items()) - + ')"', - Subsystem.MODEL, - Severity.ERROR, - self.location, + error.extend( + [ + ( + f"unable to calculate size for message \"{self.identifier}'(" + + ", ".join(f"{f.identifier} => {v}" for f, v in field_values.items()) + + ')"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) for path, proof_error in failures: - error.append( - "on path " + " -> ".join([l.target.name for l in path]), - Subsystem.MODEL, - Severity.INFO, - self.location, - ) error.extend( [ - (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, locn) - for m, locn in proof_error - ] + ( + "on path " + " -> ".join([l.target.name for l in path]), + Subsystem.MODEL, + Severity.INFO, + self.location, + ), + *[ + (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, locn) + for m, locn in proof_error + ], + ], ) error.propagate() @@ -978,11 +1019,15 @@ def typed_variable(expression: expr.Expr) -> expr.Expr: self.error.extend(error) if error.check(): - self.error.append( - "on path " + " -> ".join(f.name for f in path), - Subsystem.MODEL, - Severity.INFO, - expression.location, + self.error.extend( + [ + ( + "on path " + " -> ".join(f.name for f in path), + Subsystem.MODEL, + Severity.INFO, + expression.location, + ) + ], ) def __verify_expressions(self) -> None: @@ -1002,46 +1047,66 @@ def __check_attributes(self, expression: expr.Expr, location: Location = None) - or a.prefix.identifier in self._type_literals ) ): - self.error.append( - f'invalid use of size attribute for "{a.prefix}"', - Subsystem.MODEL, - Severity.ERROR, - location, + self.error.extend( + [ + ( + f'invalid use of size attribute for "{a.prefix}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ) + ], ) def __check_first_expression(self, link: Link, location: Location = None) -> None: if link.first != expr.UNDEFINED and not isinstance(link.first, expr.First): - self.error.append( - f'invalid First for field "{link.target.name}"', - Subsystem.MODEL, - Severity.ERROR, - location, + self.error.extend( + [ + ( + f'invalid First for field "{link.target.name}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ) + ], ) def __check_size_expression(self, link: Link) -> None: if link.target == FINAL and link.size != expr.UNDEFINED: - self.error.append( - f'size attribute for final field in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - link.size.location, + self.error.extend( + [ + ( + f'size attribute for final field in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + link.size.location, + ) + ], ) if link.target != FINAL and link.target in self.types: t = self.types[link.target] unconstrained = isinstance(t, (mty.Opaque, mty.Sequence)) if not unconstrained and link.size != expr.UNDEFINED: - self.error.append( - f'fixed size field "{link.target.name}" with size aspect', - Subsystem.MODEL, - Severity.ERROR, - link.target.identifier.location, + self.error.extend( + [ + ( + f'fixed size field "{link.target.name}" with size aspect', + Subsystem.MODEL, + Severity.ERROR, + link.target.identifier.location, + ) + ], ) if unconstrained and link.size == expr.UNDEFINED: - self.error.append( - f'unconstrained field "{link.target.name}" without size aspect', - Subsystem.MODEL, - Severity.ERROR, - link.target.identifier.location, + self.error.extend( + [ + ( + f'unconstrained field "{link.target.name}" without size aspect', + Subsystem.MODEL, + Severity.ERROR, + link.target.identifier.location, + ) + ], ) def __verify_checksums(self) -> None: @@ -1062,11 +1127,15 @@ def valid_upper(expression: expr.Expr) -> bool: for name, expressions in self.checksums.items(): # pylint: disable=too-many-nested-blocks if Field(name) not in self.fields: - self.error.append( - f'checksum definition for unknown field "{name}"', - Subsystem.MODEL, - Severity.ERROR, - name.location, + self.error.extend( + [ + ( + f'checksum definition for unknown field "{name}"', + Subsystem.MODEL, + Severity.ERROR, + name.location, + ) + ], ) for e in expressions: @@ -1078,23 +1147,31 @@ def valid_upper(expression: expr.Expr) -> bool: and valid_upper(e.upper) ) ): - self.error.append( - f'unsupported expression "{e}" in definition of checksum "{name}"', - Subsystem.MODEL, - Severity.ERROR, - e.location, + self.error.extend( + [ + ( + f'unsupported expression "{e}" in definition of checksum "{name}"', + Subsystem.MODEL, + Severity.ERROR, + e.location, + ) + ], ) for v in e.findall(lambda x: isinstance(x, expr.Variable)): assert isinstance(v, expr.Variable) if Field(v.name) not in self.fields: - self.error.append( - f'unknown field "{v.name}" referenced' - f' in definition of checksum "{name}"', - Subsystem.MODEL, - Severity.ERROR, - v.location, + self.error.extend( + [ + ( + f'unknown field "{v.name}" referenced' + f' in definition of checksum "{name}"', + Subsystem.MODEL, + Severity.ERROR, + v.location, + ) + ], ) if isinstance(e, expr.ValueRange): @@ -1113,11 +1190,16 @@ def valid_upper(expression: expr.Expr) -> bool: ) for p in self.paths(upper_field): if not any(lower_field == l.source for l in p): - self.error.append( - f'invalid range "{e}" in definition of checksum "{name}"', - Subsystem.MODEL, - Severity.ERROR, - e.location, + self.error.extend( + [ + ( + f'invalid range "{e}" in definition of checksum' + f' "{name}"', + Subsystem.MODEL, + Severity.ERROR, + e.location, + ) + ], ) checked = { @@ -1128,18 +1210,26 @@ def valid_upper(expression: expr.Expr) -> bool: if isinstance(e, expr.ValidChecksum) and isinstance(e.prefix, expr.Variable) } for name in set(self.checksums) - checked: - self.error.append( - f'no validity check of checksum "{name}"', - Subsystem.MODEL, - Severity.ERROR, - name.location, + self.error.extend( + [ + ( + f'no validity check of checksum "{name}"', + Subsystem.MODEL, + Severity.ERROR, + name.location, + ) + ], ) for name in checked - set(self.checksums): - self.error.append( - f'validity check for undefined checksum "{name}"', - Subsystem.MODEL, - Severity.ERROR, - name.location, + self.error.extend( + [ + ( + f'validity check for undefined checksum "{name}"', + Subsystem.MODEL, + Severity.ERROR, + name.location, + ) + ], ) def __prove_conflicting_conditions(self) -> None: @@ -1152,25 +1242,29 @@ def __prove_conflicting_conditions(self) -> None: if proof.result == expr.ProofResult.SAT: c1_message = str(c1.condition).replace("\n", " ") c2_message = str(c2.condition).replace("\n", " ") - self.error.append( - f'conflicting conditions for field "{f.name}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, - ) - self.error.append( - f"condition {i1} ({f.identifier} -> {c1.target.identifier}):" - f" {c1_message}", - Subsystem.MODEL, - Severity.INFO, - c1.condition.location, - ) - self.error.append( - f"condition {i2} ({f.identifier} -> {c2.target.identifier}):" - f" {c2_message}", - Subsystem.MODEL, - Severity.INFO, - c2.condition.location, + self.error.extend( + [ + ( + f'conflicting conditions for field "{f.name}"', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ), + ( + f"condition {i1} ({f.identifier} ->" + f" {c1.target.identifier}): {c1_message}", + Subsystem.MODEL, + Severity.INFO, + c1.condition.location, + ), + ( + f"condition {i2} ({f.identifier} ->" + f" {c2.target.identifier}): {c2_message}", + Subsystem.MODEL, + Severity.INFO, + c2.condition.location, + ), + ], ) def __prove_reachability(self) -> None: @@ -1184,11 +1278,15 @@ def has_final(field: Field) -> bool: for f in (INITIAL, *self.fields): if not has_final(f): - self.error.append( - f'no path to FINAL for field "{f.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'no path to FINAL for field "{f.name}" in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) for f in (*self.fields, FINAL): @@ -1210,25 +1308,31 @@ def has_final(field: Field) -> bool: paths.append((path, proof.error)) else: - self.error.append( - f'unreachable field "{f.name}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, - ) - for index, (path, errors) in enumerate(sorted(paths)): - self.error.append( - f"path {index} (" + " -> ".join([l.target.name for l in path]) + "):", + error = [] + error.append( + ( + f'unreachable field "{f.name}" in "{self.identifier}"', Subsystem.MODEL, - Severity.INFO, + Severity.ERROR, f.identifier.location, ) - self.error.extend( + ) + for index, (path, errors) in enumerate(sorted(paths)): + error.append( + ( + f"path {index} (" + " -> ".join([l.target.name for l in path]) + "):", + Subsystem.MODEL, + Severity.INFO, + f.identifier.location, + ) + ) + error.extend( [ (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, l) for m, l in errors ] ) + self.error.extend(error) def __prove_contradictions(self) -> None: for f in (INITIAL, *self.fields): @@ -1248,11 +1352,15 @@ def __prove_contradictions(self) -> None: if paths == len(contradictions): for path, cond, errors in sorted(contradictions): - self.error.append( - f'contradicting condition in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - cond.location, + self.error.extend( + [ + ( + f'contradicting condition in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + cond.location, + ) + ], ) self.error.extend( [ @@ -1318,21 +1426,23 @@ def __prove_coverage(self) -> None: # Coverage expression must be False, i.e. no bits left proof = expr.TRUE.check(facts) if proof.result == expr.ProofResult.SAT: - self.error.append( - "path does not cover whole message", - Subsystem.MODEL, - Severity.ERROR, - self.identifier.location, - ) self.error.extend( [ ( - f'on path: "{l.target.identifier}"', + "path does not cover whole message", Subsystem.MODEL, - Severity.INFO, - l.target.identifier.location, - ) - for l in path + Severity.ERROR, + self.identifier.location, + ), + *[ + ( + f'on path: "{l.target.identifier}"', + Subsystem.MODEL, + Severity.INFO, + l.target.identifier.location, + ) + for l in path + ], ] ) return @@ -1347,17 +1457,19 @@ def __prove_overlays(self) -> None: ) proof = overlaid.check(facts) if proof.result != expr.ProofResult.SAT: - self.error.append( - f'field "{f.name}" not congruent with' - f' overlaid field "{l.first.prefix}"', - Subsystem.MODEL, - Severity.ERROR, - self.identifier.location, - ) self.error.extend( [ - (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, l) - for m, l in proof.error + ( + f'field "{f.name}" not congruent with' + f' overlaid field "{l.first.prefix}"', + Subsystem.MODEL, + Severity.ERROR, + self.identifier.location, + ), + *[ + (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, l) + for m, l in proof.error + ], ] ) @@ -1391,27 +1503,33 @@ def __prove_field_positions(self) -> None: proof = negative.check(facts) if proof.result != expr.ProofResult.UNSAT: path_message = " -> ".join([l.target.name for l in path]) - self.error.append( - f'negative size for field "{f.name}" ({path_message})', - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'negative size for field "{f.name}" ({path_message})', + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) return proof = start.check(facts) if proof.result != expr.ProofResult.SAT: path_message = " -> ".join([last.target.name for last in path]) - self.error.append( - f'negative start for field "{f.name}" ({path_message})', - Subsystem.MODEL, - Severity.ERROR, - self.identifier.location, - ) self.error.extend( [ - (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, locn) - for m, locn in proof.error + ( + f'negative start for field "{f.name}" ({path_message})', + Subsystem.MODEL, + Severity.ERROR, + self.identifier.location, + ), + *[ + (f'unsatisfied "{m}"', Subsystem.MODEL, Severity.INFO, locn) + for m, locn in proof.error + ], ] ) return @@ -1437,12 +1555,16 @@ def __prove_field_positions(self) -> None: ) if proof.result != expr.ProofResult.UNSAT: path_message = " -> ".join([p.target.name for p in path]) - self.error.append( - f'opaque field "{f.name}" not aligned to {element_size} bit boundary' - f" ({path_message})", - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'opaque field "{f.name}" not aligned to {element_size} bit' + f" boundary ({path_message})", + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) return @@ -1462,12 +1584,16 @@ def __prove_field_positions(self) -> None: ) if proof.result != expr.ProofResult.UNSAT: path_message = " -> ".join([p.target.name for p in path]) - self.error.append( - f'size of opaque field "{f.name}" not multiple of {element_size} bit' - f" ({path_message})", - Subsystem.MODEL, - Severity.ERROR, - f.identifier.location, + self.error.extend( + [ + ( + f'size of opaque field "{f.name}" not multiple of' + f" {element_size} bit ({path_message})", + Subsystem.MODEL, + Severity.ERROR, + f.identifier.location, + ) + ], ) return @@ -1499,17 +1625,21 @@ def __prove_message_size(self) -> None: facts ) if proof.result == expr.ProofResult.SAT: - self.error.append( - "message size must be multiple of 8 bit", - Subsystem.MODEL, - Severity.ERROR, - self.identifier.location, - ) - self.error.append( - "on path " + " -> ".join(l.target.name for l in path), - Subsystem.MODEL, - Severity.INFO, - self.identifier.location, + self.error.extend( + [ + ( + "message size must be multiple of 8 bit", + Subsystem.MODEL, + Severity.ERROR, + self.identifier.location, + ), + ( + "on path " + " -> ".join(l.target.name for l in path), + Subsystem.MODEL, + Severity.INFO, + self.identifier.location, + ), + ], ) return @@ -1683,19 +1813,25 @@ def prune_dangling_fields( if len(message_attribute_locations) > 0 and any( n.target != FINAL for n in message.outgoing(field) ): - self.error.append( - "message types with message attribute may only be used for last field", - Subsystem.MODEL, - Severity.ERROR, - field.identifier.location, + self.error.extend( + [ + ( + "message types with message attribute may only be used for last field", + Subsystem.MODEL, + Severity.ERROR, + field.identifier.location, + ), + *[ + ( + f'message attribute used in "{inner_message.identifier}"', + Subsystem.MODEL, + Severity.INFO, + loc, + ) + for loc in message_attribute_locations + ], + ] ) - for loc in message_attribute_locations: - self.error.append( - f'message attribute used in "{inner_message.identifier}"', - Subsystem.MODEL, - Severity.INFO, - loc, - ) name_conflicts = [ f for f in message.fields for g in inner_message.fields if f.name == g.name @@ -1703,23 +1839,28 @@ def prune_dangling_fields( if name_conflicts: conflicting = name_conflicts.pop(0) - self.error.append( - f'name conflict for "{conflicting.identifier}" in "{message.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - conflicting.identifier.location, - ) - self.error.append( - f'when merging message "{inner_message.identifier}"', - Subsystem.MODEL, - Severity.INFO, - inner_message.location, - ) - self.error.append( - f'into field "{field.name}"', - Subsystem.MODEL, - Severity.INFO, - field.identifier.location, + self.error.extend( + [ + ( + f'name conflict for "{conflicting.identifier}" in' + f' "{message.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + conflicting.identifier.location, + ), + ( + f'when merging message "{inner_message.identifier}"', + Subsystem.MODEL, + Severity.INFO, + inner_message.location, + ), + ( + f'into field "{field.name}"', + Subsystem.MODEL, + Severity.INFO, + field.identifier.location, + ), + ], ) structure = [] @@ -1808,17 +1949,21 @@ def __init__( self.base = base if isinstance(base, (UnprovenDerivedMessage, DerivedMessage)): - self.error.append( - f'illegal derivation "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, - ) - self.error.append( - f'illegal base message type "{base.identifier}"', - Subsystem.MODEL, - Severity.INFO, - base.location, + self.error.extend( + [ + ( + f'illegal derivation "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ), + ( + f'illegal base message type "{base.identifier}"', + Subsystem.MODEL, + Severity.INFO, + base.location, + ), + ], ) self.error.propagate() @@ -1881,46 +2026,63 @@ def __init__( self.error = error or RecordFluxError() if len(package.parts) != 1: - self.error.append( - f'unexpected format of package name "{package}"', - Subsystem.MODEL, - Severity.ERROR, - package.location, + self.error.extend( + [ + ( + f'unexpected format of package name "{package}"', + Subsystem.MODEL, + Severity.ERROR, + package.location, + ) + ], ) for f, t in pdu.types.items(): if f == field: if not isinstance(t, mty.Opaque): - self.error.append( - f'invalid type of field "{field.name}" in refinement of "{pdu.identifier}"', + self.error.extend( + [ + ( + f'invalid type of field "{field.name}" in refinement of' + f' "{pdu.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + field.identifier.location, + ), + ( + "expected field of type Opaque", + Subsystem.MODEL, + Severity.INFO, + f.identifier.location, + ), + ], + ) + break + else: + self.error.extend( + [ + ( + f'invalid field "{field.name}" in refinement of "{pdu.identifier}"', Subsystem.MODEL, Severity.ERROR, field.identifier.location, ) - self.error.append( - "expected field of type Opaque", - Subsystem.MODEL, - Severity.INFO, - f.identifier.location, - ) - break - else: - self.error.append( - f'invalid field "{field.name}" in refinement of "{pdu.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - field.identifier.location, + ], ) for variable in condition.variables(): literals = mty.enum_literals(pdu.types.values(), self.package) if Field(str(variable.name)) not in pdu.fields and variable.identifier not in literals: - self.error.append( - f'unknown field or literal "{variable.identifier}" in refinement' - f' condition of "{pdu.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - variable.location, + self.error.extend( + [ + ( + f'unknown field or literal "{variable.identifier}" in refinement' + f' condition of "{pdu.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + variable.location, + ) + ], ) def __str__(self) -> str: diff --git a/rflx/model/model.py b/rflx/model/model.py index ebe05ce6c..e0f74a7d7 100644 --- a/rflx/model/model.py +++ b/rflx/model/model.py @@ -55,39 +55,48 @@ def __check_duplicates(self) -> RecordFluxError: for t in self.__types: if t.identifier in types: - error.append( - f'conflicting refinement of "{t.pdu.identifier}" with "{t.sdu.identifier}"' - if isinstance(t, message.Refinement) - else f'name conflict for type "{t.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - t.location, - ) - error.append( - "previous occurrence of refinement" - if isinstance(t, message.Refinement) - else f'previous occurrence of "{t.identifier}"', - Subsystem.MODEL, - Severity.INFO, - types[t.identifier].location, + error.extend( + [ + ( + f'conflicting refinement of "{t.pdu.identifier}" with' + f' "{t.sdu.identifier}"' + if isinstance(t, message.Refinement) + else f'name conflict for type "{t.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + t.location, + ), + ( + "previous occurrence of refinement" + if isinstance(t, message.Refinement) + else f'previous occurrence of "{t.identifier}"', + Subsystem.MODEL, + Severity.INFO, + types[t.identifier].location, + ), + ], ) types[t.identifier] = t for s in self.__sessions: if s.identifier in types or s.identifier in sessions: - error.append( - f'name conflict for session "{s.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - s.location, - ) - error.append( - f'previous occurrence of "{s.identifier}"', - Subsystem.MODEL, - Severity.INFO, - types[s.identifier].location - if s.identifier in types - else sessions[s.identifier].location, + error.extend( + [ + ( + f'name conflict for session "{s.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + s.location, + ), + ( + f'previous occurrence of "{s.identifier}"', + Subsystem.MODEL, + Severity.INFO, + types[s.identifier].location + if s.identifier in types + else sessions[s.identifier].location, + ), + ], ) sessions[s.identifier] = s @@ -115,22 +124,24 @@ def __check_conflicts(self) -> RecordFluxError: if identical_literals: literals_message = ", ".join([f"{l}" for l in sorted(identical_literals)]) - error.append( - f"conflicting literals: {literals_message}", - Subsystem.MODEL, - Severity.ERROR, - e2.location, - ) error.extend( [ ( - f'previous occurrence of "{l}"', + f"conflicting literals: {literals_message}", Subsystem.MODEL, - Severity.INFO, - l.location, - ) - for l in sorted(identical_literals) - ] + Severity.ERROR, + e2.location, + ), + *[ + ( + f'previous occurrence of "{l}"', + Subsystem.MODEL, + Severity.INFO, + l.location, + ) + for l in sorted(identical_literals) + ], + ], ) literals = [ @@ -147,17 +158,21 @@ def __check_conflicts(self) -> RecordFluxError: and l.name == t.identifier.name ] for literal, conflicting_type in name_conflicts: - error.append( - f'literal "{literal.name}" conflicts with type declaration', - Subsystem.MODEL, - Severity.ERROR, - literal.location, - ) - error.append( - f'conflicting type "{conflicting_type.identifier}"', - Subsystem.MODEL, - Severity.INFO, - conflicting_type.location, + error.extend( + [ + ( + f'literal "{literal.name}" conflicts with type declaration', + Subsystem.MODEL, + Severity.ERROR, + literal.location, + ), + ( + f'conflicting type "{conflicting_type.identifier}"', + Subsystem.MODEL, + Severity.INFO, + conflicting_type.location, + ), + ], ) return error diff --git a/rflx/model/session.py b/rflx/model/session.py index ae1f42e5e..7702121d5 100644 --- a/rflx/model/session.py +++ b/rflx/model/session.py @@ -164,11 +164,15 @@ def __init__( } if len(self.identifier.parts) != 2: - self.error.append( - f'invalid session name "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.identifier.location, + self.error.extend( + [ + ( + f'invalid session name "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.identifier.location, + ) + ], ) self._literals = { @@ -217,11 +221,8 @@ def __init__( def __validate_states(self) -> None: if not self.states: - self.error.append( - "empty states", - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [("empty states", Subsystem.MODEL, Severity.ERROR, self.location)], ) self.__validate_state_existence() @@ -231,28 +232,40 @@ def __validate_states(self) -> None: def __validate_state_existence(self) -> None: state_identifiers = [s.identifier for s in self.states] if self.initial not in state_identifiers: - self.error.append( - f'initial state "{self.initial}" does not exist in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.initial.location, + self.error.extend( + [ + ( + f'initial state "{self.initial}" does not exist in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.initial.location, + ) + ], ) if self.final not in state_identifiers: - self.error.append( - f'final state "{self.final}" does not exist in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - self.final.location, + self.error.extend( + [ + ( + f'final state "{self.final}" does not exist in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + self.final.location, + ) + ], ) for s in self.states: for t in s.transitions: if t.target not in state_identifiers: - self.error.append( - f'transition from state "{s.identifier}" to non-existent state' - f' "{t.target}" in "{self.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - t.target.location, + self.error.extend( + [ + ( + f'transition from state "{s.identifier}" to non-existent state' + f' "{t.target}" in "{self.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + t.target.location, + ) + ], ) def __validate_duplicate_states(self) -> None: @@ -263,17 +276,21 @@ def __validate_duplicate_states(self) -> None: for identifier, states in identifier_states.items(): if len(states) >= 2: for s in states[1:]: - self.error.append( - f'duplicate state "{identifier}"', - Subsystem.MODEL, - Severity.ERROR, - s.location, - ) - self.error.append( - f'previous definition of state "{identifier}"', - Subsystem.MODEL, - Severity.INFO, - states[0].location, + self.error.extend( + [ + ( + f'duplicate state "{identifier}"', + Subsystem.MODEL, + Severity.ERROR, + s.location, + ), + ( + f'previous definition of state "{identifier}"', + Subsystem.MODEL, + Severity.INFO, + states[0].location, + ), + ], ) def __validate_state_reachability(self) -> None: @@ -289,19 +306,27 @@ def __validate_state_reachability(self) -> None: inputs[t.target] = [s.identifier] if s.identifier != self.initial and s.identifier not in inputs: - self.error.append( - f'unreachable state "{s.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - s.location, + self.error.extend( + [ + ( + f'unreachable state "{s.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + s.location, + ) + ], ) if s.identifier != self.final and not s.transitions: - self.error.append( - f'detached state "{s.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - s.location, + self.error.extend( + [ + ( + f'detached state "{s.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + s.location, + ) + ], ) def __validate_declarations( @@ -312,26 +337,34 @@ def __validate_declarations( visible_declarations = dict(visible_declarations) def undefined_type(type_identifier: StrID, location: Optional[Location]) -> None: - self.error.append( - f'undefined type "{type_identifier}"', - Subsystem.MODEL, - Severity.ERROR, - location, + self.error.extend( + [ + ( + f'undefined type "{type_identifier}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ) + ], ) for k, d in declarations.items(): if k in visible_declarations: - self.error.append( - f'local variable "{k}" shadows previous declaration', - Subsystem.MODEL, - Severity.ERROR, - d.location, - ) - self.error.append( - f'previous declaration of variable "{k}"', - Subsystem.MODEL, - Severity.INFO, - visible_declarations[k].location, + self.error.extend( + [ + ( + f'local variable "{k}" shadows previous declaration', + Subsystem.MODEL, + Severity.ERROR, + d.location, + ), + ( + f'previous declaration of variable "{k}"', + Subsystem.MODEL, + Severity.INFO, + visible_declarations[k].location, + ), + ], ) self.__reference_variable_declaration(d.variables(), visible_declarations) @@ -339,11 +372,8 @@ def undefined_type(type_identifier: StrID, location: Optional[Location]) -> None if isinstance(d, decl.TypeDeclaration): type_identifier = mty.qualified_type_identifier(k, self.package) if type_identifier in self.types: - self.error.append( - f'type "{k}" shadows type', - Subsystem.MODEL, - Severity.ERROR, - d.location, + self.error.extend( + [(f'type "{k}" shadows type', Subsystem.MODEL, Severity.ERROR, d.location)], ) self.types[type_identifier] = d.type_definition @@ -403,19 +433,27 @@ def __validate_transitions( self.__reference_variable_declaration(t.condition.variables(), declarations) if not state.exception_transition and state.has_exceptions: - self.error.append( - f'missing exception transition in state "{state.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - state.location, + self.error.extend( + [ + ( + f'missing exception transition in state "{state.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + state.location, + ) + ], ) if state.exception_transition and not state.has_exceptions: - self.error.append( - f'unnecessary exception transition in state "{state.identifier}"', - Subsystem.MODEL, - Severity.ERROR, - state.exception_transition.location, + self.error.extend( + [ + ( + f'unnecessary exception transition in state "{state.identifier}"', + Subsystem.MODEL, + Severity.ERROR, + state.exception_transition.location, + ) + ], ) def __validate_usage(self) -> None: @@ -423,11 +461,15 @@ def __validate_usage(self) -> None: local_declarations = ((k, d) for s in self.states for k, d in s.declarations.items()) for k, d in itertools.chain(global_declarations, local_declarations): if not d.is_referenced: - self.error.append( - f'unused {d.DESCRIPTIVE_NAME} "{k}"', - Subsystem.MODEL, - Severity.ERROR, - d.location, + self.error.extend( + [ + ( + f'unused {d.DESCRIPTIVE_NAME} "{k}"', + Subsystem.MODEL, + Severity.ERROR, + d.location, + ) + ], ) def __typify_variable( diff --git a/rflx/model/type_.py b/rflx/model/type_.py index 93bc2edb7..92b1c3850 100644 --- a/rflx/model/type_.py +++ b/rflx/model/type_.py @@ -19,11 +19,15 @@ def __init__( self.error = error or RecordFluxError() if len(identifier.parts) != 2: - self.error.append( - f'invalid format of type identifier "{identifier}"', - Subsystem.MODEL, - Severity.ERROR, - location, + self.error.extend( + [ + ( + f'invalid format of type identifier "{identifier}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ) + ], ) self.identifier = identifier @@ -111,29 +115,41 @@ def __init__(self, identifier: StrID, modulus: expr.Expr, location: Location = N modulus_num = modulus.simplified() if not isinstance(modulus_num, expr.Number): - self.error.append( - f'modulus of "{self.name}" contains variable', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'modulus of "{self.name}" contains variable', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) return modulus_int = int(modulus_num) if modulus_int > 2 ** 64: - self.error.append( - f'modulus of "{self.name}" exceeds limit (2**64)', - Subsystem.MODEL, - Severity.ERROR, - modulus.location, + self.error.extend( + [ + ( + f'modulus of "{self.name}" exceeds limit (2**64)', + Subsystem.MODEL, + Severity.ERROR, + modulus.location, + ) + ], ) if modulus_int == 0 or (modulus_int & (modulus_int - 1)) != 0: - self.error.append( - f'modulus of "{self.name}" not power of two', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'modulus of "{self.name}" not power of two', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) self.__modulus = modulus @@ -187,35 +203,51 @@ def __init__( size_num = size.simplified() if not isinstance(first_num, expr.Number): - self.error.append( - f'first of "{self.name}" contains variable', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'first of "{self.name}" contains variable', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if not isinstance(last_num, expr.Number): - self.error.append( - f'last of "{self.name}" contains variable', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'last of "{self.name}" contains variable', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) return if int(last_num) >= 2 ** 63: - self.error.append( - f'last of "{self.name}" exceeds limit (2**63 - 1)', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'last of "{self.name}" exceeds limit (2**63 - 1)', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if not isinstance(size_num, expr.Number): - self.error.append( - f'size of "{self.name}" contains variable', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" contains variable', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if self.error.check(): @@ -226,33 +258,49 @@ def __init__( assert isinstance(size_num, expr.Number) if first_num < expr.Number(0): - self.error.append( - f'first of "{self.name}" negative', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'first of "{self.name}" negative', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if first_num > last_num: - self.error.append( - f'range of "{self.name}" negative', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'range of "{self.name}" negative', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if int(last_num).bit_length() > int(size_num): - self.error.append( - f'size of "{self.name}" too small', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" too small', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if int(size_num) > 64: - self.error.append( - f'size of "{self.name}" exceeds limit (2**64)', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" exceeds limit (2**64)', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) self.__first_expr = first @@ -329,27 +377,35 @@ def __init__( for i1, e1 in enumerate(literals): for i2, e2 in enumerate(literals): if i2 < i1 and e1[0] == e2[0]: - self.error.append( - f'duplicate literal "{e1[0]}"', - Subsystem.MODEL, - Severity.ERROR, - e1[0].location if isinstance(e1[0], ID) else self.location, - ) - self.error.append( - "previous occurrence", - Subsystem.MODEL, - Severity.INFO, - e2[0].location if isinstance(e2[0], ID) else self.location, + self.error.extend( + [ + ( + f'duplicate literal "{e1[0]}"', + Subsystem.MODEL, + Severity.ERROR, + e1[0].location if isinstance(e1[0], ID) else self.location, + ), + ( + "previous occurrence", + Subsystem.MODEL, + Severity.INFO, + e2[0].location if isinstance(e2[0], ID) else self.location, + ), + ], ) self.literals = {} for k, v in literals: if " " in str(k) or "." in str(k): - self.error.append( - f'invalid literal name "{k}" in "{self.name}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'invalid literal name "{k}" in "{self.name}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) continue self.literals[ID(k)] = v @@ -357,11 +413,15 @@ def __init__( size_num = size.simplified() if not isinstance(size_num, expr.Number): - self.error.append( - f'size of "{self.name}" contains variable', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" contains variable', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) return @@ -369,46 +429,64 @@ def __init__( min_literal_value = min(map(int, self.literals.values())) max_literal_value = max(map(int, self.literals.values())) if min_literal_value < 0 or max_literal_value > 2 ** 63 - 1: - self.error.append( - f'enumeration value of "{self.name}"' - " outside of permitted range (0 .. 2**63 - 1)", - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'enumeration value of "{self.name}"' + " outside of permitted range (0 .. 2**63 - 1)", + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if max_literal_value.bit_length() > int(size_num): - self.error.append( - f'size of "{self.name}" too small', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" too small', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) if int(size_num) > 64: - self.error.append( - f'size of "{self.name}" exceeds limit (2**64)', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'size of "{self.name}" exceeds limit (2**64)', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) for i1, v1 in enumerate(self.literals.values()): for i2, v2 in enumerate(self.literals.values()): if i1 < i2 and v1 == v2: - self.error.append( - f'duplicate enumeration value "{v1}" in "{self.name}"', - Subsystem.MODEL, - Severity.ERROR, - v2.location, - ) - self.error.append( - "previous occurrence", Subsystem.MODEL, Severity.INFO, v1.location + self.error.extend( + [ + ( + f'duplicate enumeration value "{v1}" in "{self.name}"', + Subsystem.MODEL, + Severity.ERROR, + v2.location, + ), + ("previous occurrence", Subsystem.MODEL, Severity.INFO, v1.location), + ] ) if always_valid and len(self.literals) == 2 ** int(size_num): - self.error.append( - f'unnecessary always-valid aspect on "{self.name}"', - Subsystem.MODEL, - Severity.ERROR, - self.location, + self.error.extend( + [ + ( + f'unnecessary always-valid aspect on "{self.name}"', + Subsystem.MODEL, + Severity.ERROR, + self.location, + ) + ], ) self.always_valid = always_valid @@ -476,34 +554,42 @@ def __init__(self, identifier: StrID, element_type: Type, location: Location = N if not isinstance(element_type, Scalar) and not ( isinstance(element_type, message.Message) and element_type.structure ): - self.error.append( - f'invalid element type of sequence "{self.name}"', - Subsystem.MODEL, - Severity.ERROR, - location, - ) - self.error.append( - f'type "{element_type.name}" must be scalar or non-null message', - Subsystem.MODEL, - Severity.INFO, - element_type.location, + self.error.extend( + [ + ( + f'invalid element type of sequence "{self.name}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ), + ( + f'type "{element_type.name}" must be scalar or non-null message', + Subsystem.MODEL, + Severity.INFO, + element_type.location, + ), + ], ) if isinstance(element_type, Scalar): element_type_size = element_type.size.simplified() if not isinstance(element_type_size, expr.Number) or int(element_type_size) % 8 != 0: - self.error.append( - f'unsupported element type size of sequence "{self.name}"', - Subsystem.MODEL, - Severity.ERROR, - location, - ) - self.error.append( - f'type "{element_type.name}" has size {element_type_size},' - r" must be multiple of 8", - Subsystem.MODEL, - Severity.INFO, - element_type.location, + self.error.extend( + [ + ( + f'unsupported element type size of sequence "{self.name}"', + Subsystem.MODEL, + Severity.ERROR, + location, + ), + ( + f'type "{element_type.name}" has size {element_type_size},' + r" must be multiple of 8", + Subsystem.MODEL, + Severity.INFO, + element_type.location, + ), + ], ) def __repr__(self) -> str: diff --git a/rflx/pyrflx/error.py b/rflx/pyrflx/error.py index 6914b4a3f..a4245c5a1 100644 --- a/rflx/pyrflx/error.py +++ b/rflx/pyrflx/error.py @@ -4,4 +4,4 @@ class PyRFLXError(RecordFluxError): def __init__(self, message: str) -> None: super().__init__() - self.append(message, Subsystem.PYRFLX, Severity.ERROR) + self.extend([(message, Subsystem.PYRFLX, Severity.ERROR, None)]) diff --git a/rflx/pyrflx/typevalue.py b/rflx/pyrflx/typevalue.py index 82b3b9c7a..7e3b4ce49 100644 --- a/rflx/pyrflx/typevalue.py +++ b/rflx/pyrflx/typevalue.py @@ -40,7 +40,7 @@ Type, ) from rflx.pyrflx.bitstring import Bitstring -from rflx.pyrflx.error import PyRFLXError, Severity, Subsystem +from rflx.pyrflx.error import PyRFLXError class TypeValue(Base): @@ -353,12 +353,11 @@ def parse(self, value: ty.Union[Bitstring, bytes], check: bool = True) -> None: try: nested_msg.parse(value, check) except PyRFLXError as e: - e.appendleft( - f"Error while parsing nested message {self._refinement_message.identifier}", - Subsystem.PYRFLX, - Severity.ERROR, + new_exception = PyRFLXError( + f"Error while parsing nested message {self._refinement_message.identifier}" ) - raise e + new_exception.extend(e) + raise new_exception from e assert nested_msg.valid_message self._nested_message = nested_msg self._value = nested_msg.bytestring @@ -452,13 +451,12 @@ def parse(self, value: ty.Union[Bitstring, bytes], check: bool = True) -> None: try: nested_message.parse(value, check) except PyRFLXError as e: - e.appendleft( + new_exception = PyRFLXError( f"cannot parse nested messages in sequence of type " - f"{self._element_type.full_name}", - Subsystem.PYRFLX, - Severity.ERROR, + f"{self._element_type.full_name}" ) - raise e + new_exception.extend(e) + raise new_exception from e assert nested_message.valid_message self._value.append(nested_message) value = value[len(nested_message.bitstring) :] @@ -877,12 +875,9 @@ def check_outgoing_condition_satisfied() -> None: f" != {type(value).__name__}" ) except PyRFLXError as e: - e.appendleft( - f"cannot set value for field {field_name}", - Subsystem.PYRFLX, - Severity.ERROR, - ) - raise e + new_exception = PyRFLXError(f"cannot set value for field {field_name}") + new_exception.extend(e) + raise new_exception from e else: raise PyRFLXError(f"cannot access field {field_name}") diff --git a/rflx/specification/parser.py b/rflx/specification/parser.py index c60f50bfe..31fa5aabc 100644 --- a/rflx/specification/parser.py +++ b/rflx/specification/parser.py @@ -88,15 +88,19 @@ def diagnostics_to_error( for diag in diagnostics: loc = diag.sloc_range - error.append( - diag.message, - Subsystem.PARSER, - Severity.ERROR, - Location( - start=(loc.start.line, loc.start.column), - source=filename, - end=(loc.end.line, loc.end.column), - ), + error.extend( + [ + ( + diag.message, + Subsystem.PARSER, + Severity.ERROR, + Location( + start=(loc.start.line, loc.start.column), + source=filename, + end=(loc.end.line, loc.end.column), + ), + ) + ], ) return True @@ -823,11 +827,15 @@ def extract_aspect(aspects: List[Aspect]) -> Tuple[expr.Expr, expr.Expr]: elif aspect.f_identifier.text == "First": first = create_math_expression(aspect.f_value, filename) else: - error.append( - f'invalid aspect "{aspect.f_identifier.text}"', - Subsystem.PARSER, - Severity.ERROR, - node_location(aspect.f_identifier, filename), + error.extend( + [ + ( + f'invalid aspect "{aspect.f_identifier.text}"', + Subsystem.PARSER, + Severity.ERROR, + node_location(aspect.f_identifier, filename), + ) + ], ) return size, first @@ -867,11 +875,15 @@ def extract_then( ) component_identifier = create_id(component.f_identifier, filename) if component.f_identifier.text.lower() == "message": - error.append( - 'reserved word "Message" used as identifier', - Subsystem.PARSER, - Severity.ERROR, - component_identifier.location, + error.extend( + [ + ( + 'reserved word "Message" used as identifier', + Subsystem.PARSER, + Severity.ERROR, + component_identifier.location, + ) + ], ) continue @@ -888,11 +900,15 @@ def extract_then( if then.f_target.kind_name != "NullID" and not any( then.f_target.text == c.f_identifier.text for c in components.f_components ): - error.append( - f'undefined field "{then.f_target.text}"', - Subsystem.PARSER, - Severity.ERROR, - node_location(then.f_target, filename) if then.f_target else None, + error.extend( + [ + ( + f'undefined field "{then.f_target.text}"', + Subsystem.PARSER, + Severity.ERROR, + node_location(then.f_target, filename) if then.f_target else None, + ) + ], ) continue structure.append(model.Link(source_node, *extract_then(then))) @@ -924,37 +940,45 @@ def merge_component_aspects( if l.first == expr.UNDEFINED: l.first = first else: - error.append( - f'first aspect of field "{component_identifier}"' - " conflicts with previous" - " specification", - Subsystem.MODEL, - Severity.ERROR, - first.location, - ) - error.append( - "previous specification of first", - Subsystem.MODEL, - Severity.INFO, - l.first.location, + error.extend( + [ + ( + f'first aspect of field "{component_identifier}"' + " conflicts with previous" + " specification", + Subsystem.MODEL, + Severity.ERROR, + first.location, + ), + ( + "previous specification of first", + Subsystem.MODEL, + Severity.INFO, + l.first.location, + ), + ], ) if size != expr.UNDEFINED: if l.size == expr.UNDEFINED: l.size = size else: - error.append( - f'size aspect of field "{component_identifier}" conflicts with previous' - " specification", - Subsystem.MODEL, - Severity.ERROR, - size.location, - ) - error.append( - "previous specification of size", - Subsystem.MODEL, - Severity.INFO, - l.size.location, + error.extend( + [ + ( + f'size aspect of field "{component_identifier}" conflicts with' + " previous specification", + Subsystem.MODEL, + Severity.ERROR, + size.location, + ), + ( + "previous specification of size", + Subsystem.MODEL, + Severity.INFO, + l.size.location, + ), + ], ) @@ -1020,17 +1044,21 @@ def create_derived_message( if not base_messages: error = RecordFluxError() - error.append( - f'illegal derivation "{identifier}"', - Subsystem.PARSER, - Severity.ERROR, - identifier.location, - ) - error.append( - f'invalid base message type "{base_name}"', - Subsystem.PARSER, - Severity.INFO, - base_types[0].identifier.location, + error.extend( + [ + ( + f'illegal derivation "{identifier}"', + Subsystem.PARSER, + Severity.ERROR, + identifier.location, + ), + ( + f'invalid base message type "{base_name}"', + Subsystem.PARSER, + Severity.INFO, + base_types[0].identifier.location, + ), + ], ) error.propagate() @@ -1068,20 +1096,28 @@ def create_aspects(aspects: List[Aspect]) -> Tuple[expr.Expr, bool]: elif av_expr == expr.Variable("False"): always_valid = False else: - error.append( - f"invalid Always_Valid expression: {av_expr}", - Subsystem.PARSER, - Severity.ERROR, - node_location(a.f_value, filename), + error.extend( + [ + ( + f"invalid Always_Valid expression: {av_expr}", + Subsystem.PARSER, + Severity.ERROR, + node_location(a.f_value, filename), + ) + ], ) else: always_valid = True if not size: - error.append( - f'no size set for "{identifier}"', - Subsystem.PARSER, - Severity.ERROR, - identifier.location, + error.extend( + [ + ( + f'no size set for "{identifier}"', + Subsystem.PARSER, + Severity.ERROR, + identifier.location, + ) + ], ) error.propagate() assert size @@ -1163,43 +1199,58 @@ def create_refinement( def check_naming(error: RecordFluxError, package: PackageNode, name: Path) -> None: identifier = package.f_identifier.text if identifier.startswith("RFLX"): - error.append( - f'illegal prefix "RFLX" in package identifier "{identifier}"', - Subsystem.PARSER, - Severity.ERROR, - node_location(package.f_identifier, name), + error.extend( + [ + ( + f'illegal prefix "RFLX" in package identifier "{identifier}"', + Subsystem.PARSER, + Severity.ERROR, + node_location(package.f_identifier, name), + ) + ], ) if identifier != package.f_end_identifier.text: - error.append( - f'inconsistent package identifier "{package.f_end_identifier.text}"', - Subsystem.PARSER, - Severity.ERROR, - node_location(package.f_end_identifier, name), - ) - error.append( - f'previous identifier was "{identifier}"', - Subsystem.PARSER, - Severity.INFO, - node_location(package.f_identifier, name), + error.extend( + [ + ( + f'inconsistent package identifier "{package.f_end_identifier.text}"', + Subsystem.PARSER, + Severity.ERROR, + node_location(package.f_end_identifier, name), + ), + ( + f'previous identifier was "{identifier}"', + Subsystem.PARSER, + Severity.INFO, + node_location(package.f_identifier, name), + ), + ], ) if name != STDIN: expected_filename = f"{identifier.lower()}.rflx" if name.name != expected_filename: - - error.append( - f'file name does not match unit name "{identifier}",' - f' should be "{expected_filename}"', - Subsystem.PARSER, - Severity.ERROR, - node_location(package.f_identifier, name), + error.extend( + [ + ( + f'file name does not match unit name "{identifier}",' + f' should be "{expected_filename}"', + Subsystem.PARSER, + Severity.ERROR, + node_location(package.f_identifier, name), + ) + ], ) for t in package.f_declarations: if isinstance(t, TypeDecl) and model.is_builtin_type(create_id(t.f_identifier, name).name): - error.append( - f'illegal redefinition of built-in type "{t.f_identifier.text}"', - Subsystem.MODEL, - Severity.ERROR, - node_location(t, name), + error.extend( + [ + ( + f'illegal redefinition of built-in type "{t.f_identifier.text}"', + Subsystem.MODEL, + Severity.ERROR, + node_location(t, name), + ) + ], ) @@ -1239,22 +1290,24 @@ def __convert_unit( for context in spec.f_context_clause: item = create_id(context.f_item, filename) if item in transitions: - error.append( - f'dependency cycle when including "{transitions[0]}"', - Subsystem.PARSER, - Severity.ERROR, - transitions[0].location, - ) error.extend( [ ( - f'when including "{i}"', + f'dependency cycle when including "{transitions[0]}"', Subsystem.PARSER, - Severity.INFO, - i.location, - ) - for i in transitions[1:] + [item] - ] + Severity.ERROR, + transitions[0].location, + ), + *[ + ( + f'when including "{i}"', + Subsystem.PARSER, + Severity.INFO, + i.location, + ) + for i in transitions[1:] + [item] + ], + ], ) continue withed_file = filename.parent / f"{str(item).lower()}.rflx" @@ -1266,20 +1319,26 @@ def __convert_unit( packagefile in self.__specifications and filename != self.__specifications[packagefile].filename ): - error.append( - "duplicate specification", - Subsystem.PARSER, - Severity.ERROR, - node_location(spec.f_package_declaration.f_identifier, filename), - ) - error.append( - "previous specification", - Subsystem.PARSER, - Severity.INFO, - node_location( - self.__specifications[packagefile].spec.f_package_declaration.f_identifier, - self.__specifications[packagefile].filename, - ), + error.extend( + [ + ( + "duplicate specification", + Subsystem.PARSER, + Severity.ERROR, + node_location(spec.f_package_declaration.f_identifier, filename), + ), + ( + "previous specification", + Subsystem.PARSER, + Severity.INFO, + node_location( + self.__specifications[ + packagefile + ].spec.f_package_declaration.f_identifier, + self.__specifications[packagefile].filename, + ), + ), + ], ) self.__specifications[packagefile] = SpecificationNode(filename, spec, withed_files) diff --git a/rflx/statement.py b/rflx/statement.py index 95d264f53..0ca361d82 100644 --- a/rflx/statement.py +++ b/rflx/statement.py @@ -106,17 +106,21 @@ def check_type( if isinstance(statement_type.element, rty.Message) and isinstance( self.parameter, Variable ): - error.append( - "appending independently created message not supported", - Subsystem.MODEL, - Severity.ERROR, - self.parameter.location, - ) - error.append( - "message aggregate should be used instead", - Subsystem.MODEL, - Severity.INFO, - self.parameter.location, + error.extend( + [ + ( + "appending independently created message not supported", + Subsystem.MODEL, + Severity.ERROR, + self.parameter.location, + ), + ( + "message aggregate should be used instead", + Subsystem.MODEL, + Severity.INFO, + self.parameter.location, + ), + ], ) return error diff --git a/rflx/typing_.py b/rflx/typing_.py index 3dd8fb532..43cabf451 100644 --- a/rflx/typing_.py +++ b/rflx/typing_.py @@ -346,17 +346,11 @@ def check_type( desc = ( " or ".join(map(str, expected_types)) if isinstance(expected, tuple) else str(expected) ) - error.append( - f"expected {desc}", - Subsystem.MODEL, - Severity.ERROR, - location, - ) - error.append( - f"found {actual}", - Subsystem.MODEL, - Severity.INFO, - location, + error.extend( + [ + (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location), + (f"found {actual}", Subsystem.MODEL, Severity.INFO, location), + ], ) return error @@ -381,17 +375,11 @@ def check_type_instance( if isinstance(expected, tuple) else expected.DESCRIPTIVE_NAME ) - error.append( - f"expected {desc}", - Subsystem.MODEL, - Severity.ERROR, - location, - ) - error.append( - f"found {actual}", - Subsystem.MODEL, - Severity.INFO, - location, + error.extend( + [ + (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location), + (f"found {actual}", Subsystem.MODEL, Severity.INFO, location), + ], ) return error @@ -399,10 +387,14 @@ def check_type_instance( def _undefined_type(location: ty.Optional[Location], description: str = "") -> RecordFluxError: error = RecordFluxError() - error.append( - "undefined" + (f" {description}" if description else ""), - Subsystem.MODEL, - Severity.ERROR, - location, + error.extend( + [ + ( + "undefined" + (f" {description}" if description else ""), + Subsystem.MODEL, + Severity.ERROR, + location, + ) + ], ) return error diff --git a/tests/property/strategies.py b/tests/property/strategies.py index 916a2c9c8..fefc7077d 100644 --- a/tests/property/strategies.py +++ b/tests/property/strategies.py @@ -301,10 +301,15 @@ def outgoing(field: Field) -> ty.Sequence[Link]: try: return message.proven() except error.RecordFluxError as e: - e.append( - f"incorrectly generated message:\n {message!r}", - error.Subsystem.MODEL, - error.Severity.INFO, + e.extend( + [ + ( + f"incorrectly generated message:\n {message!r}", + error.Subsystem.MODEL, + error.Severity.INFO, + None, + ) + ], ) raise e