/usr/local/bin/python3.9 -m pytest test ======================================================================================================================= test session starts ======================================================================================================================= platform freebsd13 -- Python 3.9.16, pytest-7.2.1, pluggy-1.0.0 rootdir: /usr/ports/math/py-mathics-scanner/work/Mathics_Scanner-1.3.0 plugins: cov-2.9.0 collected 54 items test/test_ascii.py . [ 1%] test/test_feed.py ... [ 7%] test/test_general_yaml_sanity.py .......... [ 25%] test/test_has_unicode_inverse_sanity.py . [ 27%] test/test_letterlikes_sanity.py . [ 29%] test/test_prescanner.py ............. [ 53%] test/test_table_consistency.py .. [ 57%] test/test_tokeniser.py FF....F.........F... [ 94%] test/test_translation_regressions.py . [ 96%] test/test_unicode_equivalent.py F [ 98%] test/test_wl_to_ascii.py . [100%] ============================================================================================================================ FAILURES ============================================================================================================================= ___________________________________________________________________________________________________________________________ test_apply ____________________________________________________________________________________________________________________________ def test_apply(): > assert tokens("f // x") == [ Token("Symbol", "f", 0), Token("Postfix", "//", 2), Token("Symbol", "x", 5), ] E assert [Token(Symbol...Symbol, x, 5)] == [Token(Symbol...Symbol, x, 5)] E At index 0 diff: Token(Symbol, f , 0) != Token(Symbol, f, 0) E Use -v to get more diff test/test_tokeniser.py:69: AssertionError ________________________________________________________________________________________________________________________ test_association _________________________________________________________________________________________________________________________ def test_association(): > assert tokens("<|x -> m|>") == [ Token("RawLeftAssociation", "<|", 0), Token("Symbol", "x", 2), Token("Rule", "->", 4), Token("Symbol", "m", 7), Token("RawRightAssociation", "|>", 8), ] E assert [Token(RawLef...ation, |>, 8)] == [Token(RawLef...ation, |>, 8)] E At index 1 diff: Token(Symbol, x , 2) != Token(Symbol, x, 2) E Use -v to get more diff test/test_tokeniser.py:87: AssertionError _________________________________________________________________________________________________________________________ test_integeral __________________________________________________________________________________________________________________________ def test_integeral(): > assert tokens("\u222B x \uf74c y") == [ Token("Integral", "\u222B", 0), Token("Symbol", "x", 2), Token("DifferentialD", "\uf74c", 4), Token("Symbol", "y", 6), ] E assert [Token(Integr...Symbol, y, 6)] == [Token(Integr...Symbol, y, 6)] E At index 1 diff: Token(Symbol, x , 2) != Token(Symbol, x, 2) E Use -v to get more diff test/test_tokeniser.py:128: AssertionError ____________________________________________________________________________________________________________________________ test_set _____________________________________________________________________________________________________________________________ def test_set(): > assert tokens("x = y") == [ Token("Symbol", "x", 0), Token("Set", "=", 2), Token("Symbol", "y", 4), ] E assert [Token(Symbol...Symbol, y, 4)] == [Token(Symbol...Symbol, y, 4)] E At index 0 diff: Token(Symbol, x , 0) != Token(Symbol, x, 0) E Use -v to get more diff test/test_tokeniser.py:196: AssertionError ___________________________________________________________________________________________________________________ test_has_unicode_equivalent ___________________________________________________________________________________________________________________ def test_has_unicode_equivalent(): for k, v in yaml_data.items(): unicode_equivalent = v.get("unicode-equivalent", None) if unicode_equivalent is not None: > assert unicode_equivalent != v.get( "ascii" ), f"In {k} - remove add unicode equivalent" E AssertionError: In Alternative - remove add unicode equivalent E assert '|' != '|' E + where '|' = ('ascii') E + where = {'ascii': '|', 'has-unicode-inverse': False, 'is-letter-like': False, 'operator-name': 'Alternative', ...}.get test/test_unicode_equivalent.py:16: AssertionError ===================================================================================================================== short test summary info ===================================================================================================================== FAILED test/test_tokeniser.py::test_apply - assert [Token(Symbol...Symbol, x, 5)] == [Token(Symbol...Symbol, x, 5)] FAILED test/test_tokeniser.py::test_association - assert [Token(RawLef...ation, |>, 8)] == [Token(RawLef...ation, |>, 8)] FAILED test/test_tokeniser.py::test_integeral - assert [Token(Integr...Symbol, y, 6)] == [Token(Integr...Symbol, y, 6)] FAILED test/test_tokeniser.py::test_set - assert [Token(Symbol...Symbol, y, 4)] == [Token(Symbol...Symbol, y, 4)] FAILED test/test_unicode_equivalent.py::test_has_unicode_equivalent - AssertionError: In Alternative - remove add unicode equivalent ================================================================================================================== 5 failed, 49 passed in 10.51s ==================================================================================================================