From e23f6d8c1ae5c66c73c7eb4d89580f5f68703074 Mon Sep 17 00:00:00 2001 From: Brujo Benavides Date: Wed, 25 May 2022 09:11:02 +0200 Subject: [PATCH 1/4] Include all the new tools --- .github/ISSUE_TEMPLATE/bug_report.md | 25 +++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ++ .github/ISSUE_TEMPLATE/other-issues.md | 10 + .gitignore | 20 +- CHANGELOG.md | 228 +--------------------- elvis.config | 58 +----- rebar.config | 91 ++++----- src/katana_code.app.src | 12 +- 8 files changed, 115 insertions(+), 349 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/other-issues.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..d56a6b6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,25 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +## Bug Description +A clear and concise description of what the bug is. + +## To Reproduce +Steps to reproduce the behavior + +## Expected Behavior +A clear and concise description of what you expected to happen. + +## `rebar3` Logs +If applicable, run `rebar3` with `DIAGNOSTIC=1` and attach all the logs to your report. + +## Additional Context + - OS: [e.g. MacOS] + - Erlang version + - rebar3 version diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..219ef58 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: enhancement +assignees: '' + +--- + +## Is your feature request related to a problem? Please describe. +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +## Describe the solution you'd like +A clear and concise description of what you want to happen. + +## Describe alternatives you've considered +A clear and concise description of any alternative solutions or features you've considered. + +## Additional Context +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/other-issues.md b/.github/ISSUE_TEMPLATE/other-issues.md new file mode 100644 index 0000000..683f930 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/other-issues.md @@ -0,0 +1,10 @@ +--- +name: Other Issues +about: Something that's not covered by the other categories +title: '' +labels: '' +assignees: '' + +--- + + diff --git a/.gitignore b/.gitignore index 9198e3f..a52a814 100644 --- a/.gitignore +++ b/.gitignore @@ -1,18 +1,12 @@ -katana_code.d -.erlang.mk/ +rebar3.crashdump +.rebar3 +_* .eunit -deps *.o *.beam *.plt erl_crash.dump -log -logs -bin -ebin -deps -.erlang.mk.* -hexer -hexer.config -doc -_build \ No newline at end of file +rel/ +_build +_checkouts +doc/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 3967667..c09ecc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,227 +1 @@ -# Changelog - -## [1.2.0](https://github.com/inaka/katana-code/tree/1.2.0) (2021-09-18) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.1.2...1.2.0) - -**Merged pull requests:** - -- Add pre\_fixer and post\_fixer options to the dodger [\#67](https://github.com/inaka/katana-code/pull/67) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Allow for analysis under rebar3\_hank [\#65](https://github.com/inaka/katana-code/pull/65) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) -- Increase consumer confidence [\#64](https://github.com/inaka/katana-code/pull/64) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) -- Move to a GitHub action \(instead of container-based\) CI approach [\#63](https://github.com/inaka/katana-code/pull/63) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) - -## [1.1.2](https://github.com/inaka/katana-code/tree/1.1.2) (2021-02-23) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.1.1...1.1.2) - -**Merged pull requests:** - -- Remove discrepancies. Thanks, dialyzer! [\#62](https://github.com/inaka/katana-code/pull/62) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [1.1.1](https://github.com/inaka/katana-code/tree/1.1.1) (2021-02-18) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.1.0...1.1.1) - -**Closed issues:** - -- Parse non-module files [\#60](https://github.com/inaka/katana-code/issues/60) - -**Merged pull requests:** - -- Fix \#60: Handle parsing of none-module files [\#61](https://github.com/inaka/katana-code/pull/61) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [1.1.0](https://github.com/inaka/katana-code/tree/1.1.0) (2021-02-02) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.0.3...1.1.0) - -**Closed issues:** - -- Missing attribute definition elements for ktn\_code:type [\#53](https://github.com/inaka/katana-code/issues/53) -- `ktn\_code:parse\_tree/1` issue while parsing for module attributes [\#43](https://github.com/inaka/katana-code/issues/43) - -**Merged pull requests:** - -- Compact strings directly when parsing, to preserve original formatting [\#59](https://github.com/inaka/katana-code/pull/59) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Fix broken CI [\#58](https://github.com/inaka/katana-code/pull/58) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) -- Add GitHub Actions for CI [\#57](https://github.com/inaka/katana-code/pull/57) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) -- Add some missing tree node types [\#56](https://github.com/inaka/katana-code/pull/56) ([paulo-ferraz-oliveira](https://github.com/paulo-ferraz-oliveira)) - -## [1.0.3](https://github.com/inaka/katana-code/tree/1.0.3) (2020-11-25) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.0.2...1.0.3) - -**Merged pull requests:** - -- Fix escript parsing [\#55](https://github.com/inaka/katana-code/pull/55) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [1.0.2](https://github.com/inaka/katana-code/tree/1.0.2) (2020-11-25) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.0.1...1.0.2) - -**Fixed bugs:** - -- Improve formatting in stringyfied macro definitions [\#52](https://github.com/inaka/katana-code/issues/52) - -**Merged pull requests:** - -- Be far more specific on how to stringify stuff [\#54](https://github.com/inaka/katana-code/pull/54) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [1.0.1](https://github.com/inaka/katana-code/tree/1.0.1) (2020-11-19) - -[Full Changelog](https://github.com/inaka/katana-code/compare/1.0.0...1.0.1) - -**Fixed bugs:** - -- Using macros in macro definitions breaks them [\#49](https://github.com/inaka/katana-code/issues/49) -- Remove discrepancies [\#48](https://github.com/inaka/katana-code/issues/48) - -**Merged pull requests:** - -- \[Fix \#49\] Don't preprocess macros if we're not going to parse them later [\#51](https://github.com/inaka/katana-code/pull/51) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Fix \#48 by properly running dialyzer, xref and then linter [\#50](https://github.com/inaka/katana-code/pull/50) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [1.0.0](https://github.com/inaka/katana-code/tree/1.0.0) (2020-11-19) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.2.1...1.0.0) - -**Fixed bugs:** - -- ktn\_dodger can't parse stringifyied macro arguments [\#41](https://github.com/inaka/katana-code/issues/41) -- ktn\_dodger can't parse macros in specs [\#38](https://github.com/inaka/katana-code/issues/38) -- ktn\_dodger can't parse macros surrounded with parentheses [\#37](https://github.com/inaka/katana-code/issues/37) - -**Closed issues:** - -- Allow ktn\_dodger to parse escripts [\#42](https://github.com/inaka/katana-code/issues/42) -- We need a way to retrieve macro text verbatim from ktn\_dodger [\#40](https://github.com/inaka/katana-code/issues/40) - -**Merged pull requests:** - -- Don't parse \(i.e. stringify\) macro definitions by default. [\#47](https://github.com/inaka/katana-code/pull/47) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- \[Fix \#41\] Parse stringyfied macros [\#46](https://github.com/inaka/katana-code/pull/46) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Allow ktn\_dodger to parse escripts [\#45](https://github.com/inaka/katana-code/pull/45) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Prettify the stringification a bit [\#44](https://github.com/inaka/katana-code/pull/44) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [0.2.1](https://github.com/inaka/katana-code/tree/0.2.1) (2019-12-03) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.2.0...0.2.1) - -**Merged pull requests:** - -- Prepare release 0.2.1 [\#36](https://github.com/inaka/katana-code/pull/36) ([jfacorro](https://github.com/jfacorro)) -- \[\#26\] Remove aleppo from .app.src [\#35](https://github.com/inaka/katana-code/pull/35) ([jfacorro](https://github.com/jfacorro)) - -## [0.2.0](https://github.com/inaka/katana-code/tree/0.2.0) (2019-12-02) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.1.3...0.2.0) - -**Closed issues:** - -- Published documentation on hexdocs.pm contains private functions [\#30](https://github.com/inaka/katana-code/issues/30) -- ‘Ignored variable is being used’ reported for non-variables [\#26](https://github.com/inaka/katana-code/issues/26) - -**Merged pull requests:** - -- Bump version to 0.2.0 [\#33](https://github.com/inaka/katana-code/pull/33) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- \[\#30\] Don't show private functions in documentation [\#32](https://github.com/inaka/katana-code/pull/32) ([juanbono](https://github.com/juanbono)) -- \[\#26\] Drop aleppo and use a forked epp\_dodger [\#31](https://github.com/inaka/katana-code/pull/31) ([jfacorro](https://github.com/jfacorro)) - -## [0.1.3](https://github.com/inaka/katana-code/tree/0.1.3) (2019-06-24) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.1.2...0.1.3) - -**Closed issues:** - -- Unknown type `ktn\_code:tree\_node/0` [\#27](https://github.com/inaka/katana-code/issues/27) - -**Merged pull requests:** - -- Bump Version to 0.1.3 [\#29](https://github.com/inaka/katana-code/pull/29) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- fix error with exporting types [\#28](https://github.com/inaka/katana-code/pull/28) ([NobbZ](https://github.com/NobbZ)) - -## [0.1.2](https://github.com/inaka/katana-code/tree/0.1.2) (2018-06-29) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.1.0...0.1.2) - -**Fixed bugs:** - -- ktn\_code:type/1 returns 'var' for ?MODULE\_STRING [\#15](https://github.com/inaka/katana-code/issues/15) - -**Closed issues:** - -- Replace ktn\_xref\_SUITE by ktn\_meta\_SUITE [\#4](https://github.com/inaka/katana-code/issues/4) -- Fulfill the open-source checklist [\#2](https://github.com/inaka/katana-code/issues/2) - -**Merged pull requests:** - -- Update deps and bump version to 0.1.2 [\#25](https://github.com/inaka/katana-code/pull/25) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Update Dependencies [\#24](https://github.com/inaka/katana-code/pull/24) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Bump Version to 0.1.1 [\#23](https://github.com/inaka/katana-code/pull/23) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Update aleppo to 1.0.1 [\#22](https://github.com/inaka/katana-code/pull/22) ([jfacorro](https://github.com/jfacorro)) -- Add API to provide file name for `parse\_tree` [\#21](https://github.com/inaka/katana-code/pull/21) ([seriyps](https://github.com/seriyps)) -- Remove dead hipchat link [\#20](https://github.com/inaka/katana-code/pull/20) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [0.1.0](https://github.com/inaka/katana-code/tree/0.1.0) (2016-06-14) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.0.4...0.1.0) - -**Closed issues:** - -- Version Bump to 0.1.0 [\#18](https://github.com/inaka/katana-code/issues/18) -- Move from erlang.mk to rebar3 [\#16](https://github.com/inaka/katana-code/issues/16) -- Version Bump 0.0.4 [\#13](https://github.com/inaka/katana-code/issues/13) - -**Merged pull requests:** - -- \[Close \#18\] version bump to 0.1.0 [\#19](https://github.com/inaka/katana-code/pull/19) ([Euen](https://github.com/Euen)) -- \[Close \#16\] rebar3 support [\#17](https://github.com/inaka/katana-code/pull/17) ([Euen](https://github.com/Euen)) - -## [0.0.4](https://github.com/inaka/katana-code/tree/0.0.4) (2016-04-26) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.0.3...0.0.4) - -**Merged pull requests:** - -- \[\#13\] Version Bump 0.0.4 [\#14](https://github.com/inaka/katana-code/pull/14) ([davecaos](https://github.com/davecaos)) -- Add support for latin1 encoded source files [\#12](https://github.com/inaka/katana-code/pull/12) ([tjarvstrand](https://github.com/tjarvstrand)) - -## [0.0.3](https://github.com/inaka/katana-code/tree/0.0.3) (2016-04-07) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.0.2...0.0.3) - -**Fixed bugs:** - -- Add deps to the app.src file [\#10](https://github.com/inaka/katana-code/pull/10) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -**Merged pull requests:** - -- Version Bump to 0.0.3 [\#11](https://github.com/inaka/katana-code/pull/11) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [0.0.2](https://github.com/inaka/katana-code/tree/0.0.2) (2016-03-30) - -[Full Changelog](https://github.com/inaka/katana-code/compare/0.0.1...0.0.2) - -**Fixed bugs:** - -- katana-code.app.src should be called katana\_code.app.src [\#6](https://github.com/inaka/katana-code/pull/6) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -**Merged pull requests:** - -- \[\#quick\] Add katana\_code.d to .gitignore [\#9](https://github.com/inaka/katana-code/pull/9) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- \[\#quick\] Fix project name [\#8](https://github.com/inaka/katana-code/pull/8) ([elbrujohalcon](https://github.com/elbrujohalcon)) -- Version Bump to 0.0.2 [\#7](https://github.com/inaka/katana-code/pull/7) ([elbrujohalcon](https://github.com/elbrujohalcon)) - -## [0.0.1](https://github.com/inaka/katana-code/tree/0.0.1) (2016-03-01) - -[Full Changelog](https://github.com/inaka/katana-code/compare/d4c5b63916ff692ff1fedc3f31787e4cedb70cf8...0.0.1) - -**Merged pull requests:** - -- Igaray.version bump [\#5](https://github.com/inaka/katana-code/pull/5) ([igaray](https://github.com/igaray)) -- Initial commit [\#3](https://github.com/inaka/katana-code/pull/3) ([igaray](https://github.com/igaray)) -- Update LICENSE [\#1](https://github.com/inaka/katana-code/pull/1) ([elbrujohalcon](https://github.com/elbrujohalcon)) - - - -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* +See the [Releases](../../releases) page. diff --git a/elvis.config b/elvis.config index 9b25e36..4d39c8e 100644 --- a/elvis.config +++ b/elvis.config @@ -1,49 +1,9 @@ -[ - { - elvis, - [ - {config, - [#{dirs => ["src", "test"], - filter => "*.erl", - rules => [{elvis_text_style, line_length, #{limit => 80, - skip_comments => false}}, - {elvis_text_style, no_tabs}, - {elvis_text_style, no_trailing_whitespace}, - {elvis_style, macro_names}, - {elvis_style, macro_module_names}, - {elvis_style, operator_spaces, #{rules => [{right, ","}, - {right, "++"}, - {left, "++"}]}}, - {elvis_style, nesting_level, #{level => 3}}, - {elvis_style, god_modules, #{limit => 25}}, - {elvis_style, no_if_expression}, - {elvis_style, invalid_dynamic_call, #{ignore => [ktn_recipe_verify]}}, - {elvis_style, used_ignored_variable}, - {elvis_style, no_behavior_info}, - { - elvis_style, - module_naming_convention, - #{regex => "^([a-z][a-z0-9]*_?)*(_SUITE)?$", - ignore => []} - }, - { - elvis_style, - function_naming_convention, - #{regex => "^([a-z$][a-z0-9]*_?)*$"} - }, - {elvis_style, state_record_and_type}, - {elvis_style, no_spec_with_records}, - {elvis_style, dont_repeat_yourself, #{min_complexity => 15}}, - {elvis_style, no_debug_call, #{debug_functions => [{ct, pal}]}} - ], - ignore => [ktn_dodger] - }, - #{dirs => ["."], - filter => "elvis.config", - rules => [{elvis_project, old_configuration_format}] - } - ] - } - ] - } -]. +[{elvis, + [{config, + [#{dirs => ["src", "src/**"], + filter => "*.erl", + ruleset => erl_files}, + #{dirs => ["test"], + filter => "*.erl", + ruleset => erl_files, + rules => [{elvis_style, no_debug_call, disable}]}]}]}]. diff --git a/rebar.config b/rebar.config index c49b577..ccb348c 100644 --- a/rebar.config +++ b/rebar.config @@ -1,62 +1,47 @@ %% -*- mode: erlang;erlang-indent-level: 2;indent-tabs-mode: nil -*- %% ex: ts=4 sw=4 ft=erlang et -{erl_opts, [ warn_unused_vars - , warn_export_all - , warn_shadow_vars - , warn_unused_import - , warn_unused_function - , warn_bif_clash - , warn_unused_record - , warn_deprecated_function - , warn_obsolete_guard - , strict_validation - , warn_export_vars - , warn_exported_vars - , warn_missing_spec - , warn_untyped_record - , debug_info]}. +{erl_opts, + [warn_unused_import, warn_export_vars, warnings_as_errors, verbose, report, debug_info]}. {minimum_otp_vsn, "21"}. +{ex_doc, + [{source_url, <<"https://github.com/inaka/katana-code">>}, + {extras, [<<"README.md">>, <<"LICENSE">>]}, + {main, <<"readme">>}]}. + +{hex, [{doc, #{provider => ex_doc}}]}. + +{project_plugins, + [{rebar3_hex, "~> 7.0.1"}, + {rebar3_format, "~> 1.1.0"}, + {rebar3_lint, "~> 1.0.2"}, + {rebar3_ex_doc, "~> 0.2.9"}]}. + +{dialyzer, [{warnings, [no_return, unmatched_returns, error_handling, underspecs]}]}. + +{edoc_opts, + [{todo, true}, + {title, "Hank"}, + {overview, "priv/overview.edoc"}, + {packages, true}, + {subpackages, true}, + {source_path, "src"}, + {application, rebar3_hank}, + {new, true}, + report_missing_types]}. + +{xref_checks, + [undefined_function_calls, + locals_not_used, + deprecated_function_calls, + deprecated_functions]}. + {cover_enabled, true}. {cover_opts, [verbose]}. -{edoc_opts, [ {private, false} - , {hidden, false} - , {todo, true} - ]}. - -{dialyzer, [ {warnings, [ no_return - , unmatched_returns - , error_handling - , underspecs - ]} - , {plt_apps, top_level_deps} - , {plt_extra_apps, [tools, syntax_tools]} - , {plt_location, local} - , {base_plt_apps, [stdlib, kernel]} - , {base_plt_location, global} - ]}. - -{xref_checks,[ undefined_function_calls - , locals_not_used - , deprecated_function_calls - , deprecated_functions - ]}. - -{project_plugins, [ - rebar3_hex, - rebar3_lint, - rebar3_hank -]}. - -{alias, [{test, [xref, dialyzer, lint, hank, ct, cover, edoc]}]}. - -%% == hank == - -{hank, [ - {ignore, [ - {"example/**", unnecessary_function_arguments} - ]} -]}. +{alias, + [{test, [compile, format, spellcheck, lint, dialyzer, {ct, "--verbose"}, cover, edoc]}]}. + +{hex, [{doc, #{provider => edoc}}]}. diff --git a/src/katana_code.app.src b/src/katana_code.app.src index ae3fc8b..b95fab1 100644 --- a/src/katana_code.app.src +++ b/src/katana_code.app.src @@ -1,12 +1,10 @@ -{application, katana_code, - [ - {description, "Functions useful for processing Erlang code."}, +{application, + katana_code, + [{description, "Functions useful for processing Erlang code."}, {vsn, git}, {applications, [kernel, stdlib]}, {modules, []}, {registered, []}, {licenses, ["Apache 2.0"]}, - {links, [{"Github", "https://github.com/inaka/katana-code"}]}, - {build_tools, ["rebar3"]} - ] -}. + {links, [{"github", "https://github.com/inaka/katana-code"}]}, + {build_tools, ["rebar3"]}]}. From b7b0d84409c77a0160189b4ccc226f18aa15928f Mon Sep 17 00:00:00 2001 From: Brujo Benavides Date: Wed, 25 May 2022 09:16:55 +0200 Subject: [PATCH 2/4] Format almost everything --- .github/workflows/ci.yml | 55 +-- rebar.config | 5 +- src/ktn_code.erl | 732 ++++++++++++++++----------------------- src/ktn_dodger.erl | 2 + src/ktn_io_string.erl | 23 +- test/ktn_code_SUITE.erl | 104 +++--- 6 files changed, 387 insertions(+), 534 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 26d3d14..5878af8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,24 +1,39 @@ ---- -name: build -on: - push: - branches: - - master - pull_request: - branches: - - master +name: Erlang CI + +on: [push, pull_request] + jobs: - ci: - name: Run checks and tests over ${{matrix.otp_vsn}} and ${{matrix.os}} - runs-on: ${{matrix.os}} + + build: + + runs-on: ubuntu-latest + strategy: matrix: - otp_vsn: [21, 22, 23, 24] - os: [ubuntu-latest] + otp: ['22.3', '23.3', '24.3'] + rebar: ['3.18.0'] + steps: - - uses: actions/checkout@v2 - - uses: erlef/setup-beam@v1 - with: - otp-version: ${{matrix.otp_vsn}} - rebar3-version: '3.14' - - run: rebar3 test + - uses: actions/checkout@v2 + - uses: erlef/setup-beam@v1 + id: setup-beam + with: + otp-version: ${{matrix.otp}} + rebar3-version: ${{matrix.rebar}} + - name: Restore _build + uses: actions/cache@v2 + with: + path: _build + key: _build-cache-for-os-${{runner.os}}-otp-${{steps.setup-beam.outputs.otp-version}}-rebar3-${{steps.setup-beam.outputs.rebar3-version}}-hash-${{hashFiles('rebar.lock')}} + - name: Restore rebar3's cache + uses: actions/cache@v2 + with: + path: ~/.cache/rebar3 + key: rebar3-cache-for-os-${{runner.os}}-otp-${{steps.setup-beam.outputs.otp-version}}-rebar3-${{steps.setup-beam.outputs.rebar3-version}}-hash-${{hashFiles('rebar.lock')}} + - name: Compile + run: rebar3 compile + - name: Format check + run: rebar3 format --verify + - name: Run tests and verifications + run: rebar3 test + diff --git a/rebar.config b/rebar.config index ccb348c..db4cded 100644 --- a/rebar.config +++ b/rebar.config @@ -3,7 +3,7 @@ {erl_opts, [warn_unused_import, warn_export_vars, warnings_as_errors, verbose, report, debug_info]}. -{minimum_otp_vsn, "21"}. +{minimum_otp_vsn, "23"}. {ex_doc, [{source_url, <<"https://github.com/inaka/katana-code">>}, @@ -41,7 +41,6 @@ {cover_opts, [verbose]}. -{alias, - [{test, [compile, format, spellcheck, lint, dialyzer, {ct, "--verbose"}, cover, edoc]}]}. +{alias, [{test, [compile, format, lint, dialyzer, {ct, "--verbose"}, cover, edoc]}]}. {hex, [{doc, #{provider => edoc}}]}. diff --git a/src/ktn_code.erl b/src/ktn_code.erl index a1bfc3e..7b47832 100644 --- a/src/ktn_code.erl +++ b/src/ktn_code.erl @@ -3,60 +3,22 @@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -module(ktn_code). --export([ - beam_to_string/1, - beam_to_erl/2, - parse_tree/1, - eval/1, - consult/1, - to_str/1 - ]). - +-export([beam_to_string/1, beam_to_erl/2, parse_tree/1, eval/1, consult/1, to_str/1]). %% Getters --export([ - type/1, - attr/2, - node_attr/2, - content/1 - ]). +-export([type/1, attr/2, node_attr/2, content/1]). -export_type([tree_node/0, tree_node_type/0]). -type tree_node_type() :: - root | function | clause | match | tuple - | atom | integer | float | string | char - | binary | binary_element | var - | call | remote - | 'case' | case_expr | case_clauses - | 'fun' | named_fun - | 'query' - | 'try' | try_catch | try_case | try_after - | 'if' | 'catch' - | 'receive' | receive_after | receive_case - | nil | cons - | map | map_field_assoc | map_field_exact - | lc | lc_expr | generate - | bc | bc_expr | b_generate - | op - | record_field | record_index - | block - %% Pre-Defined Module Attributes - | module | export | import | compile | vsn | on_load - %% Behaviour Module Attribute - | behaviour | behavior | callback - %% Record Definitions - | record - %% Preprocessor - | include | include_lib | define - %% Flow Control in Macros - | undef | ifdef | ifndef | else | endif | elif | error | warning - %% Setting File and Line - | file | line - %% Other attributes - | type | opaque - | export_type - | remote_type | ann_type | paren_type - | any. + root | function | clause | match | tuple | atom | integer | float | string | char | + binary | binary_element | var | call | remote | 'case' | case_expr | case_clauses | + 'fun' | named_fun | query | 'try' | try_catch | try_case | try_after | 'if' | 'catch' | + 'receive' | receive_after | receive_case | nil | cons | map | map_field_assoc | + map_field_exact | lc | lc_expr | generate | bc | bc_expr | b_generate | op | + record_field | record_index | block | module | export | import | compile | vsn | on_load | + behaviour | behavior | callback | record | include | include_lib | define | undef | + ifdef | ifndef | else | endif | elif | error | warning | file | line | type | opaque | + export_type | remote_type | ann_type | paren_type | any. -type tree_node() :: #{type => tree_node_type(), @@ -71,16 +33,16 @@ %% @doc If the beam was not compiled with debug_info %% the code generated by this function will look really ugly %% @end --spec beam_to_string(beam_lib:beam()) -> - {ok, string()} | {error, beam_lib, term()}. +-spec beam_to_string(beam_lib:beam()) -> {ok, string()} | {error, beam_lib, term()}. beam_to_string(BeamPath) -> - case beam_lib:chunks(BeamPath, [abstract_code]) of - {ok, {_, [{abstract_code, {raw_abstract_v1, Forms}}]}} -> - Src = erl_prettypr:format(erl_syntax:form_list(tl(Forms))), - {ok, Src}; - Error -> - Error - end. + case beam_lib:chunks(BeamPath, [abstract_code]) of + {ok, {_, [{abstract_code, {raw_abstract_v1, Forms}}]}} -> + Src = erl_prettypr:format( + erl_syntax:form_list(tl(Forms))), + {ok, Src}; + Error -> + Error + end. %% @doc If the beam was not compiled with debug_info %% the code generated by this function will look really ugly @@ -88,73 +50,76 @@ beam_to_string(BeamPath) -> -spec beam_to_erl(beam_lib:beam(), string()) -> ok. beam_to_erl(BeamPath, ErlPath) -> case beam_to_string(BeamPath) of - {ok, Src} -> - {ok, Fd} = file:open(ErlPath, [write]), - io:fwrite(Fd, "~s~n", [Src]), - file:close(Fd); - Error -> - Error + {ok, Src} -> + {ok, Fd} = file:open(ErlPath, [write]), + io:fwrite(Fd, "~s~n", [Src]), + file:close(Fd); + Error -> + Error end. %% @doc Parses code in a string or binary format and returns the parse tree. -spec parse_tree(string() | binary()) -> tree_node(). parse_tree(Source) -> - SourceStr = to_str(Source), - ScanOpts = [text, return_comments], + SourceStr = to_str(Source), + ScanOpts = [text, return_comments], {ok, Tokens, _} = erl_scan:string(SourceStr, {1, 1}, ScanOpts), - IoString = ktn_io_string:new(SourceStr), - {ok, Forms} = ktn_dodger:parse( IoString - , {1, 1} - , [{scan_opts, [text]}] - ), - ok = file:close(IoString), + IoString = ktn_io_string:new(SourceStr), + {ok, Forms} = ktn_dodger:parse(IoString, {1, 1}, [{scan_opts, [text]}]), + ok = file:close(IoString), Comments = lists:filter(fun is_comment/1, Tokens), - Children = [ to_map(Form) - || Form <- Forms, - %% filter forms that couldn't be parsed - element(1, Form) =/= error - ], - - #{ type => root - , attrs => #{tokens => lists:map(fun token_to_map/1, Tokens)} - , content => to_map(Comments) ++ Children - }. + Children = + [to_map(Form) + || Form <- Forms, + %% filter forms that couldn't be parsed + element(1, Form) =/= error], + #{type => root, + attrs => #{tokens => lists:map(fun token_to_map/1, Tokens)}, + content => to_map(Comments) ++ Children}. -spec is_comment(erl_scan:token()) -> boolean(). -is_comment({comment, _, _}) -> true; -is_comment(_) -> false. +is_comment({comment, _, _}) -> + true; +is_comment(_) -> + false. -spec revert(erl_syntax:syntaxTree()) -> erl_parse:foo(). revert(Form) -> - MaybeReverted = try erl_syntax:revert(Form) - catch _:_ -> Form - end, + MaybeReverted = + try + erl_syntax:revert(Form) + catch + _:_ -> + Form + end, case erl_syntax:is_tree(MaybeReverted) of - true -> revert(erl_syntax:type(Form), Form); - false -> MaybeReverted + true -> + revert(erl_syntax:type(Form), Form); + false -> + MaybeReverted end. -spec revert(atom(), erl_syntax:syntaxTree()) -> erl_parse:foo(). revert(attribute, Node0) -> Subs = erl_syntax:subtrees(Node0), - Gs = [[erl_syntax:revert(X) || X <- L] || L <- Subs], + Gs = [[erl_syntax:revert(X) || X <- L] || L <- Subs], Node = erl_syntax:update_tree(Node0, Gs), Name = erl_syntax:attribute_name(Node), Args = erl_syntax:attribute_arguments(Node), - Pos = erl_syntax:get_pos(Node), + Pos = erl_syntax:get_pos(Node), {attribute, Pos, Name, Args}; revert(macro, Node0) -> Subs = erl_syntax:subtrees(Node0), - Gs = [[erl_syntax:revert(X) || X <- L] || L <- Subs], + Gs = [[erl_syntax:revert(X) || X <- L] || L <- Subs], Node = erl_syntax:update_tree(Node0, Gs), Name = erl_syntax:macro_name(Node), Args = erl_syntax:macro_arguments(Node), - Pos = erl_syntax:get_pos(Node), + Pos = erl_syntax:get_pos(Node), {macro, Pos, Name, Args}; revert(_, Node) -> %% When a node can't be reverted we avoid failing by returning @@ -162,9 +127,7 @@ revert(_, Node) -> {atom, [{node, Node}], non_reversible_form}. token_to_map({Type, Attrs}) -> - #{type => Type, - attrs => #{text => get_text(Attrs), - location => get_location(Attrs)}}; + #{type => Type, attrs => #{text => get_text(Attrs), location => get_location(Attrs)}}; token_to_map({Type, Attrs, Value}) -> Map = token_to_map({Type, Attrs}), Map#{value => Value}. @@ -188,15 +151,17 @@ consult(Source) -> SourceStr = to_str(Source), {ok, Tokens, _} = erl_scan:string(SourceStr), Forms = split_when(fun is_dot/1, Tokens), - ParseFun = fun (Form) -> - {ok, Expr} = erl_parse:parse_exprs(Form), - Expr - end, + ParseFun = + fun(Form) -> + {ok, Expr} = erl_parse:parse_exprs(Form), + Expr + end, Parsed = lists:map(ParseFun, Forms), - ExprsFun = fun(P) -> - {value, Value, _} = erl_eval:exprs(P, []), - Value - end, + ExprsFun = + fun(P) -> + {value, Value, _} = erl_eval:exprs(P, []), + Value + end, lists:map(ExprsFun, Parsed). %% Getters @@ -210,8 +175,10 @@ type(undefined) -> -spec attr(term(), tree_node()) -> term() | undefined. attr(Key, #{attrs := Attrs}) -> case maps:is_key(Key, Attrs) of - true -> maps:get(Key, Attrs); - false -> undefined + true -> + maps:get(Key, Attrs); + false -> + undefined end; attr(_Key, Node) when is_map(Node) -> undefined; @@ -220,12 +187,14 @@ attr(_Key, undefined) -> -spec node_attr(term(), tree_node()) -> term() | undefined. node_attr(Key, #{node_attrs := Attrs}) -> - case maps:is_key(Key, Attrs) of - true -> maps:get(Key, Attrs); - false -> undefined - end; + case maps:is_key(Key, Attrs) of + true -> + maps:get(Key, Attrs); + false -> + undefined + end; node_attr(_Key, Node) when is_map(Node) -> - undefined; + undefined; node_attr(_Key, undefined) -> undefined. @@ -267,8 +236,10 @@ source_encoding(Source) -> end. -spec is_dot(tuple()) -> boolean(). -is_dot({dot, _}) -> true; -is_dot(_) -> false. +is_dot({dot, _}) -> + true; +is_dot(_) -> + false. %% @private get_location(Attrs) when is_integer(Attrs) -> @@ -301,571 +272,465 @@ get_text(_Attrs) -> -spec to_map(term()) -> tree_node() | [tree_node()]. to_map(ListParsed) when is_list(ListParsed) -> lists:map(fun to_map/1, ListParsed); - to_map({function, Attrs, Name, Arity, Clauses}) -> #{type => function, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name, - arity => Arity}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name, + arity => Arity}, content => to_map(Clauses)}; to_map({function, Name, Arity}) -> - #{type => function, - attrs => #{name => Name, - arity => Arity}}; + #{type => function, attrs => #{name => Name, arity => Arity}}; to_map({function, Module, Name, Arity}) -> #{type => function, - attrs => #{module => Module, - name => Name, - arity => Arity}}; - + attrs => + #{module => Module, + name => Name, + arity => Arity}}; to_map({clause, Attrs, Patterns, Guards, Body}) -> #{type => clause, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{pattern => to_map(Patterns), - guards => to_map(Guards)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{pattern => to_map(Patterns), guards => to_map(Guards)}, content => to_map(Body)}; - to_map({match, Attrs, Left, Right}) -> #{type => match, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map([Left, Right])}; - to_map({maybe_match, Attrs, Left, Right}) -> #{type => maybe_match, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map([Left, Right])}; - to_map({tuple, Attrs, Elements}) -> #{type => tuple, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Elements)}; - %% Literals - -to_map({Type, Attrs, Value}) when - Type == atom; - Type == integer; - Type == float; - Type == string; - Type == char -> +to_map({Type, Attrs, Value}) + when Type == atom; Type == integer; Type == float; Type == string; Type == char -> #{type => Type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - value => Value}}; - + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + value => Value}}; to_map({bin, Attrs, Elements}) -> #{type => binary, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Elements)}; - to_map({bin_element, Attrs, Value, Size, TSL}) -> #{type => binary_element, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - type_spec_list => TSL}, - node_attrs => #{value => to_map(Value), - size => case Size of - default -> #{type => default}; - _ -> to_map(Size) - end }}; - + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + type_spec_list => TSL}, + node_attrs => + #{value => to_map(Value), + size => + case Size of + default -> + #{type => default}; + _ -> + to_map(Size) + end}}; %% Variables - to_map({var, Attrs, Name}) -> #{type => var, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}}; - + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}}; %% Function call - to_map({call, Attrs, Function, Arguments}) -> #{type => call, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{function => to_map(Function)}, content => to_map(Arguments)}; - to_map({remote, Attrs, Module, Function}) -> #{type => remote, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{module => to_map(Module), - function => to_map(Function)}}; - + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{module => to_map(Module), function => to_map(Function)}}; %% case - to_map({'case', Attrs, Expr, Clauses}) -> CaseExpr = to_map({case_expr, Attrs, Expr}), CaseClauses = to_map({case_clauses, Attrs, Clauses}), #{type => 'case', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{expression => to_map(Expr)}, content => [CaseExpr, CaseClauses]}; to_map({case_expr, Attrs, Expr}) -> #{type => case_expr, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [to_map(Expr)]}; to_map({case_clauses, Attrs, Clauses}) -> #{type => case_clauses, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; - %% fun - to_map({'fun', Attrs, {function, Name, Arity}}) -> - #{type => 'fun', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name, - arity => Arity}}; - + #{type => 'fun', + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name, + arity => Arity}}; to_map({'fun', Attrs, {function, Module, Name, Arity}}) -> #{type => 'fun', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - module => Module, - name => Name, - arity => Arity}}; - + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + module => Module, + name => Name, + arity => Arity}}; to_map({'fun', Attrs, {clauses, Clauses}}) -> #{type => 'fun', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; - to_map({named_fun, Attrs, Name, Clauses}) -> #{type => named_fun, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => to_map(Clauses)}; - %% query - deprecated, implemented for completion. - -to_map({'query', Attrs, ListCompr}) -> - #{type => 'query', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, +to_map({query, Attrs, ListCompr}) -> + #{type => query, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(ListCompr)}; - %% try..catch..after - to_map({'try', Attrs, Body, [], CatchClauses, AfterBody}) -> TryBody = to_map(Body), TryCatch = to_map({try_catch, Attrs, CatchClauses}), TryAfter = to_map({try_after, Attrs, AfterBody}), #{type => 'try', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{catch_clauses => to_map(CatchClauses), - after_body => to_map(AfterBody)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{catch_clauses => to_map(CatchClauses), after_body => to_map(AfterBody)}, content => TryBody ++ [TryCatch, TryAfter]}; - %% try..of..catch..after - to_map({'try', Attrs, Expr, CaseClauses, CatchClauses, AfterBody}) -> TryCase = to_map({try_case, Attrs, Expr, CaseClauses}), TryCatch = to_map({try_catch, Attrs, CatchClauses}), TryAfter = to_map({try_after, Attrs, AfterBody}), #{type => 'try', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [TryCase, TryCatch, TryAfter]}; - to_map({try_case, Attrs, Expr, Clauses}) -> #{type => try_case, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{expression => to_map(Expr)}, content => to_map(Clauses)}; - to_map({try_catch, Attrs, Clauses}) -> #{type => try_catch, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; - to_map({try_after, Attrs, AfterBody}) -> #{type => try_after, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(AfterBody)}; - %% maybe..else..end - -to_map({'maybe', Attrs, Body, Else}) -> +to_map({maybe, Attrs, Body, Else}) -> MaybeBody = to_map(Body), MaybeElse = to_map(Else), - #{type => 'maybe', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + #{type => maybe, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => MaybeBody ++ [MaybeElse]}; - -to_map({'else', Attrs, Clauses}) -> - #{type => 'else', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, +to_map({else, Attrs, Clauses}) -> + #{type => else, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; - %% if - to_map({'if', Attrs, IfClauses}) -> #{type => 'if', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(IfClauses)}; - %% catch - to_map({'catch', Attrs, Expr}) -> #{type => 'catch', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [to_map(Expr)]}; - %% receive - to_map({'receive', Attrs, Clauses}) -> RecClauses = to_map({receive_case, Attrs, Clauses}), #{type => 'receive', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [RecClauses]}; - to_map({'receive', Attrs, Clauses, AfterExpr, AfterBody}) -> RecClauses = to_map({receive_case, Attrs, Clauses}), RecAfter = to_map({receive_after, Attrs, AfterExpr, AfterBody}), #{type => 'receive', - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [RecClauses, RecAfter]}; - to_map({receive_case, Attrs, Clauses}) -> #{type => receive_case, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; - to_map({receive_after, Attrs, Expr, Body}) -> #{type => receive_after, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{expression => to_map(Expr)}, content => to_map(Body)}; - %% List - to_map({nil, Attrs}) -> - #{type => nil, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}}; - + #{type => nil, attrs => #{location => get_location(Attrs), text => get_text(Attrs)}}; to_map({cons, Attrs, Head, Tail}) -> #{type => cons, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [to_map(Head), to_map(Tail)]}; - %% Map - to_map({map, Attrs, Pairs}) -> #{type => map, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Pairs)}; to_map({map, Attrs, Var, Pairs}) -> #{type => map, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{var => to_map(Var)}, content => to_map(Pairs)}; - -to_map({Type, Attrs, Key, Value}) when - map_field_exact == Type; - map_field_assoc == Type -> +to_map({Type, Attrs, Key, Value}) when map_field_exact == Type; map_field_assoc == Type -> #{type => Type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{key => to_map(Key), - value => to_map(Value)}}; - + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{key => to_map(Key), value => to_map(Value)}}; %% List Comprehension - to_map({lc, Attrs, Expr, GeneratorsFilters}) -> LcExpr = to_map({lc_expr, Attrs, Expr}), LcGenerators = to_map(GeneratorsFilters), #{type => lc, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [LcExpr | LcGenerators]}; - to_map({generate, Attrs, Pattern, Expr}) -> #{type => generate, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{pattern => to_map(Pattern), - expression => to_map(Expr)}}; + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{pattern => to_map(Pattern), expression => to_map(Expr)}}; to_map({lc_expr, Attrs, Expr}) -> #{type => lc_expr, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [to_map(Expr)]}; - %% Binary Comprehension - to_map({bc, Attrs, Expr, GeneratorsFilters}) -> BcExpr = to_map({bc_expr, Attrs, Expr}), BcGenerators = to_map(GeneratorsFilters), #{type => bc, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [BcExpr | BcGenerators]}; to_map({b_generate, Attrs, Pattern, Expr}) -> #{type => b_generate, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{pattern => to_map(Pattern), - expression => to_map(Expr)}}; + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{pattern => to_map(Pattern), expression => to_map(Expr)}}; to_map({bc_expr, Attrs, Expr}) -> #{type => bc_expr, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => [to_map(Expr)]}; - %% Operation - to_map({op, Attrs, Operation, Left, Right}) -> #{type => op, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - operation => Operation}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + operation => Operation}, content => to_map([Left, Right])}; - to_map({op, Attrs, Operation, Single}) -> #{type => op, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - operation => Operation}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + operation => Operation}, content => to_map([Single])}; - %% Record - to_map({record, Attrs, Name, Fields}) -> #{type => record, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => to_map(Fields)}; to_map({record, Attrs, Var, Name, Fields}) -> #{type => record, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, node_attrs => #{variable => to_map(Var)}, content => to_map(Fields)}; - to_map({record_index, Attrs, Name, Field}) -> #{type => record_index, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => [to_map(Field)]}; - to_map({record_field, Attrs, Name}) -> #{type => record_field, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{name => to_map(Name)}}; to_map({record_field, Attrs, Name, Default}) -> #{type => record_field, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{default => to_map(Default), - name => to_map(Name)}}; + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{default => to_map(Default), name => to_map(Name)}}; to_map({record_field, Attrs, Var, Name, Field}) -> #{type => record_field, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, node_attrs => #{variable => to_map(Var)}, content => [to_map(Field)]}; - %% Block - to_map({block, Attrs, Body}) -> #{type => block, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Body)}; - %% Record Attribute - to_map({attribute, Attrs, record, {Name, Fields}}) -> #{type => record_attr, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => to_map(Fields)}; to_map({typed_record_field, Field, Type}) -> FieldMap = to_map(Field), #{type => typed_record_field, - attrs => #{location => attr(location, FieldMap), - text => attr(text, FieldMap), - field => FieldMap}, + attrs => + #{location => attr(location, FieldMap), + text => attr(text, FieldMap), + field => FieldMap}, node_attrs => #{type => to_map(Type)}}; - %% Type - to_map({type, Attrs, 'fun', Types}) -> #{type => type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => 'fun'}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => 'fun'}, content => to_map(Types)}; to_map({type, Attrs, constraint, [Sub, SubType]}) -> #{type => type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => constraint, - subtype => Sub}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => constraint, + subtype => Sub}, content => to_map(SubType)}; to_map({type, Attrs, bounded_fun, [FunType, Defs]}) -> #{type => type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => bounded_fun}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => bounded_fun}, node_attrs => #{'fun' => to_map(FunType)}, content => to_map(Defs)}; to_map({type, Attrs, Name, any}) -> to_map({type, Attrs, Name, [any]}); to_map({type, Attrs, any}) -> #{type => type, - attrs => #{location => get_location(Attrs), - text => "...", - name => '...'}}; + attrs => + #{location => get_location(Attrs), + text => "...", + name => '...'}}; to_map({type, Attrs, Name, Types}) -> #{type => type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => to_map(Types)}; to_map({user_type, Attrs, Name, Types}) -> %% any() #{type => user_type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, content => to_map(Types)}; - to_map({type, Attrs, map_field_assoc, Name, Type}) -> {Location, Text} = case Attrs of Line when is_integer(Attrs) -> {{Line, Line}, undefined}; Attrs -> - {get_location(Attrs), - get_text(Attrs)} + {get_location(Attrs), get_text(Attrs)} end, #{type => type_map_field, - attrs => #{location => Location, - text => Text}, - node_attrs => #{key => to_map(Name), - type => to_map(Type)}}; + attrs => #{location => Location, text => Text}, + node_attrs => #{key => to_map(Name), type => to_map(Type)}}; to_map({remote_type, Attrs, [Module, Function, Args]}) -> #{type => remote_type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{module => to_map(Module), - function => to_map(Function), - args => to_map(Args)}}; + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => + #{module => to_map(Module), + function => to_map(Function), + args => to_map(Args)}}; to_map({ann_type, Attrs, [Var, Type]}) -> #{type => record_field, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, - node_attrs => #{var => to_map(Var), - type => to_map(Type)}}; + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, + node_attrs => #{var => to_map(Var), type => to_map(Type)}}; to_map({paren_type, Attrs, [Type]}) -> #{type => record_field, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}, + attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, node_attrs => #{type => to_map(Type)}}; to_map(any) -> %% any() #{type => any}; - %% Other Attributes - to_map({attribute, Attrs, type, {Name, Type, Args}}) -> #{type => type_attr, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name}, - node_attrs => #{args => to_map(Args), - type => to_map(Type)}}; + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name}, + node_attrs => #{args => to_map(Args), type => to_map(Type)}}; to_map({attribute, Attrs, spec, {{Name, Arity}, Types}}) -> #{type => spec, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - name => Name, - arity => Arity}, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + name => Name, + arity => Arity}, node_attrs => #{types => to_map(Types)}}; to_map({attribute, Attrs, Type, Value}) -> #{type => Type, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs), - value => Value}}; - + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs), + value => Value}}; %% Comments - to_map({comment, Attrs, _Text}) -> - #{type => comment, - attrs => #{location => get_location(Attrs), - text => get_text(Attrs)}}; - + #{type => comment, attrs => #{location => get_location(Attrs), text => get_text(Attrs)}}; %% Macro - to_map({macro, Attrs, Name, Args}) -> - Args1 = case Args of - none -> []; - _ -> Args - end, + Args1 = + case Args of + none -> + []; + _ -> + Args + end, NameStr = macro_name(Name), - #{ type => macro - , attrs => #{ location => get_location(Attrs) - , text => get_text(Attrs) ++ NameStr - , name => NameStr - } - , content => to_map(Args1) - }; - + #{type => macro, + attrs => + #{location => get_location(Attrs), + text => get_text(Attrs) ++ NameStr, + name => NameStr}, + content => to_map(Args1)}; %% Unhandled forms - to_map(Parsed) when is_tuple(Parsed) -> case erl_syntax:is_tree(Parsed) of - true -> to_map(revert(Parsed)); - false -> throw({unhandled_abstract_form, Parsed}) + true -> + to_map(revert(Parsed)); + false -> + throw({unhandled_abstract_form, Parsed}) end; to_map(Parsed) -> throw({unexpected_abstract_form, Parsed}). -spec macro_name(erl_syntax:syntaxTree()) -> string(). macro_name(Name) -> - case erl_syntax:type(Name) of - atom -> - erl_syntax:atom_name(Name); - variable -> - erl_syntax:variable_literal(Name) - end. + case erl_syntax:type(Name) of + atom -> + erl_syntax:atom_name(Name); + variable -> + erl_syntax:variable_literal(Name) + end. %% @doc Splits a list whenever an element satisfies the When predicate. %% Returns a list of lists where each list includes the matched element @@ -888,10 +753,11 @@ split_when(_When, [], Results) -> lists:reverse(Reversed); split_when(When, [Head | Tail], [Current0 | Rest]) -> Current = [Head | Current0], - Result = case When(Head) of - true -> - [[], Current | Rest]; - false -> - [Current | Rest] - end, + Result = + case When(Head) of + true -> + [[], Current | Rest]; + false -> + [Current | Rest] + end, split_when(When, Tail, Result). diff --git a/src/ktn_dodger.erl b/src/ktn_dodger.erl index 8791ca3..ba62be5 100644 --- a/src/ktn_dodger.erl +++ b/src/ktn_dodger.erl @@ -70,6 +70,8 @@ -module(ktn_dodger). +-format ignore. + -export([parse_file/1, quick_parse_file/1, parse_file/2, quick_parse_file/2, parse/1, quick_parse/1, parse/2, quick_parse/2, parse/3, quick_parse/3, parse_form/2, diff --git a/src/ktn_io_string.erl b/src/ktn_io_string.erl index db07c21..b0356da 100644 --- a/src/ktn_io_string.erl +++ b/src/ktn_io_string.erl @@ -1,19 +1,11 @@ -module(ktn_io_string). -export([new/1]). +-export([start_link/1, init/1, loop/1, skip/2, skip/3]). --export([ start_link/1 - , init/1 - , loop/1 - , skip/2 - , skip/3 - ]). +-type state() :: #{buffer := string(), original := string()}. --type state() :: #{ buffer := string() - , original := string() - }. - --hank([{unnecessary_function_arguments, [skip/3]}]). +-hank([{unnecessary_function_arguments, [{skip, 3}]}]). %%------------------------------------------------------------------------------ %% API @@ -115,13 +107,12 @@ do_get_line("\r" ++ RestStr, Result) -> do_get_line([Ch | RestStr], Result) -> do_get_line(RestStr, [Result, Ch]). --spec get_until(module(), atom(), list(), eof | string()) -> - {term(), string()}. +-spec get_until(module(), atom(), list(), eof | string()) -> {term(), string()}. get_until(Module, Function, XArgs, Str) -> apply_get_until(Module, Function, [], Str, XArgs). -spec apply_get_until(module(), atom(), any(), string() | eof, list()) -> - {term(), string()}. + {term(), string()}. apply_get_until(Module, Function, State, String, XArgs) -> case apply(Module, Function, [State, String | XArgs]) of {done, Result, NewStr} -> @@ -131,12 +122,12 @@ apply_get_until(Module, Function, State, String, XArgs) -> end. -spec skip(string() | {cont, integer(), string()}, term(), integer()) -> - {more, {cont, integer(), string()}} | {done, integer(), string()}. + {more, {cont, integer(), string()}} | {done, integer(), string()}. skip(Str, _Data, Length) -> skip(Str, Length). -spec skip(string() | {cont, integer(), string()}, integer()) -> - {more, {cont, integer(), string()}} | {done, integer(), string()}. + {more, {cont, integer(), string()}} | {done, integer(), string()}. skip(Str, Length) when is_list(Str) -> {more, {cont, Length, Str}}; skip({cont, 0, Str}, Length) -> diff --git a/test/ktn_code_SUITE.erl b/test/ktn_code_SUITE.erl index fa44e53..8424e97 100644 --- a/test/ktn_code_SUITE.erl +++ b/test/ktn_code_SUITE.erl @@ -1,39 +1,23 @@ -module(ktn_code_SUITE). --export([ - all/0, - init_per_suite/1, - end_per_suite/1 - ]). - --export([ - consult/1, - beam_to_string/1, - parse_tree/1, - parse_tree_otp/1, - latin1_parse_tree/1, - to_string/1 - ]). +-export([all/0, init_per_suite/1, end_per_suite/1]). +-export([consult/1, beam_to_string/1, parse_tree/1, parse_tree_otp/1, latin1_parse_tree/1, + to_string/1]). -if(?OTP_RELEASE >= 25). --export([ - parse_maybe/1 - ]). + +-export([parse_maybe/1]). + -endif. --define(EXCLUDED_FUNS, - [ - module_info, - all, - test, - init_per_suite, - end_per_suite - ]). +-define(EXCLUDED_FUNS, [module_info, all, test, init_per_suite, end_per_suite]). -type config() :: [{atom(), term()}]. -if(?OTP_RELEASE >= 23). + -behaviour(ct_suite). + -endif. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -79,48 +63,47 @@ beam_to_string(_Config) -> -spec parse_tree(config()) -> ok. parse_tree(_Config) -> - ModuleNode = #{ type => module - , attrs => #{ location => {1, 2} - , text => "module" - , value => x - } - }, + ModuleNode = + #{type => module, + attrs => + #{location => {1, 2}, + text => "module", + value => x}}, - #{ type := root - , content := _ - } = ktn_code:parse_tree("-module(x)."), + #{type := root, content := _} = ktn_code:parse_tree("-module(x)."), - #{ type := root - , content := [ModuleNode] - } = ktn_code:parse_tree("-module(x)."), + #{type := root, content := [ModuleNode]} = ktn_code:parse_tree("-module(x)."), ok. %% @doc Parse a 100 random OTP modules -spec parse_tree_otp(config()) -> ok. parse_tree_otp(_Config) -> - OTP = code:root_dir(), + OTP = code:root_dir(), Paths = filelib:wildcard(OTP ++ "/**/*.erl"), ShuffledPaths = shuffle(Paths), - _ = [ begin - {ok, Source} = file:read_file(Path), - ktn_code:parse_tree(Source) - end || Path <- lists:sublist(ShuffledPaths, 1, 100) - ], + _ = [begin + {ok, Source} = file:read_file(Path), + ktn_code:parse_tree(Source) + end + || Path <- lists:sublist(ShuffledPaths, 1, 100)], ok. -spec latin1_parse_tree(config()) -> ok. latin1_parse_tree(_Config) -> - error = try ktn_code:parse_tree(<<"%% �\n-module(x).">>) - catch error:_ -> error - end, - #{ type := root - , content := _ - } = ktn_code:parse_tree(<<"%% -*- coding: latin-1 -*-\n" - "%% �" - "-module(x).">>), + error = + try + ktn_code:parse_tree(<<"%% �\n-module(x).">>) + catch + error:_ -> + error + end, + #{type := root, content := _} = + ktn_code:parse_tree(<<"%% -*- coding: latin-1 -*-\n" + "%% �" + "-module(x).">>), ok. @@ -133,20 +116,17 @@ to_string(_Config) -> ok. -if(?OTP_RELEASE >= 25). + -spec parse_maybe(config()) -> ok. parse_maybe(_Config) -> %% Note that to pass this test case, the 'maybe_expr' feature must be enabled. - - #{ type := root - , content := - [#{ type := function - , content := - [#{ type := clause - , content := [#{type := 'maybe'}]}] - }] - } = ktn_code:parse_tree(<<"foo() -> maybe ok ?= ok else _ -> ng end.">>), + #{type := root, + content := + [#{type := function, content := [#{type := clause, content := [#{type := maybe}]}]}]} = + ktn_code:parse_tree(<<"foo() -> maybe ok ?= ok else _ -> ng end.">>), ok. + -endif. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -155,5 +135,5 @@ parse_maybe(_Config) -> -spec shuffle(any()) -> [any()]. shuffle(List) -> - Items = [{rand:uniform(), X} || X <- List], - [X || {_, X} <- lists:sort(Items)]. + Items = [{rand:uniform(), X} || X <- List], + [X || {_, X} <- lists:sort(Items)]. From 43a196e090dfe4aa21cb47bef0bf4f575719ff15 Mon Sep 17 00:00:00 2001 From: Brujo Benavides Date: Wed, 25 May 2022 09:41:07 +0200 Subject: [PATCH 3/4] Ready for OTP 25.0 --- .github/workflows/ci.yml | 2 +- elvis.config | 5 +- rebar.config | 5 +- src/ktn_code.erl | 3 +- src/ktn_dodger.erl | 556 +++++++++++++++++++++------------------ test/ktn_code_SUITE.erl | 5 +- 6 files changed, 304 insertions(+), 272 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5878af8..6bc489f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: strategy: matrix: - otp: ['22.3', '23.3', '24.3'] + otp: ['23.3', '24.3', '25.0'] rebar: ['3.18.0'] steps: diff --git a/elvis.config b/elvis.config index 4d39c8e..7f97b4f 100644 --- a/elvis.config +++ b/elvis.config @@ -1,8 +1,9 @@ [{elvis, [{config, - [#{dirs => ["src", "src/**"], + [#{dirs => ["src"], filter => "*.erl", - ruleset => erl_files}, + ruleset => erl_files, + rules => [{elvis_style, atom_naming_convention, #{regex => "^([a-z][A-Za-z0-9]*_?)*$"}}]}, #{dirs => ["test"], filter => "*.erl", ruleset => erl_files, diff --git a/rebar.config b/rebar.config index db4cded..73c6748 100644 --- a/rebar.config +++ b/rebar.config @@ -23,13 +23,12 @@ {edoc_opts, [{todo, true}, {title, "Hank"}, - {overview, "priv/overview.edoc"}, {packages, true}, {subpackages, true}, {source_path, "src"}, {application, rebar3_hank}, {new, true}, - report_missing_types]}. + {report_missing_types, true}]}. {xref_checks, [undefined_function_calls, @@ -42,5 +41,3 @@ {cover_opts, [verbose]}. {alias, [{test, [compile, format, lint, dialyzer, {ct, "--verbose"}, cover, edoc]}]}. - -{hex, [{doc, #{provider => edoc}}]}. diff --git a/src/ktn_code.erl b/src/ktn_code.erl index 7b47832..df89843 100644 --- a/src/ktn_code.erl +++ b/src/ktn_code.erl @@ -3,6 +3,8 @@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -module(ktn_code). +-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 25}}]). + -export([beam_to_string/1, beam_to_erl/2, parse_tree/1, eval/1, consult/1, to_str/1]). %% Getters -export([type/1, attr/2, node_attr/2, content/1]). @@ -19,7 +21,6 @@ behaviour | behavior | callback | record | include | include_lib | define | undef | ifdef | ifndef | else | endif | elif | error | warning | file | line | type | opaque | export_type | remote_type | ann_type | paren_type | any. - -type tree_node() :: #{type => tree_node_type(), attrs => map(), diff --git a/src/ktn_dodger.erl b/src/ktn_dodger.erl index ba62be5..54d88d6 100644 --- a/src/ktn_dodger.erl +++ b/src/ktn_dodger.erl @@ -72,12 +72,12 @@ -format ignore. --export([parse_file/1, quick_parse_file/1, parse_file/2, - quick_parse_file/2, parse/1, quick_parse/1, parse/2, - quick_parse/2, parse/3, quick_parse/3, parse_form/2, - parse_form/3, quick_parse_form/2, quick_parse_form/3, - format_error/1, tokens_to_string/1]). +%% We have snake_case macros here +-elvis([{elvis_style, macro_names, disable}]). +-export([parse_file/1, quick_parse_file/1, parse_file/2, quick_parse_file/2, parse/1, + quick_parse/1, parse/2, quick_parse/2, parse/3, quick_parse/3, parse_form/2, parse_form/3, + quick_parse_form/2, quick_parse_form/3, format_error/1, tokens_to_string/1]). %% The following should be: 1) pseudo-uniquely identifiable, and 2) %% cause nice looking error messages when the parser has to give up. @@ -95,17 +95,12 @@ -type option() :: atom() | {atom(), term()}. --hank([{unnecessary_function_arguments, [ - no_fix/1, - quick_parser/2 - ]}]). +-hank([{unnecessary_function_arguments, [{no_fix, 1}, {quick_parser, 2}]}]). %% ===================================================================== %% @equiv parse_file(File, []) --spec parse_file(file:filename()) -> - {'ok', erl_syntax:forms()} | {'error', errorinfo()}. - +-spec parse_file(file:filename()) -> {ok, erl_syntax:forms()} | {error, errorinfo()}. parse_file(File) -> parse_file(File, []). @@ -139,16 +134,14 @@ parse_file(File) -> %% @see erl_syntax:is_form/1 -spec parse_file(file:filename(), [option()]) -> - {'ok', erl_syntax:forms()} | {'error', errorinfo()}. - + {ok, erl_syntax:forms()} | {error, errorinfo()}. parse_file(File, Options) -> parse_file(File, fun parse/3, Options). %% @equiv quick_parse_file(File, []) -spec quick_parse_file(file:filename()) -> - {'ok', erl_syntax:forms()} | {'error', errorinfo()}. - + {ok, erl_syntax:forms()} | {error, errorinfo()}. quick_parse_file(File) -> quick_parse_file(File, []). @@ -167,14 +160,13 @@ quick_parse_file(File) -> %% @see parse_file/2 -spec quick_parse_file(file:filename(), [option()]) -> - {'ok', erl_syntax:forms()} | {'error', errorinfo()}. - + {ok, erl_syntax:forms()} | {error, errorinfo()}. quick_parse_file(File, Options) -> parse_file(File, fun quick_parse/3, Options ++ [no_fail]). parse_file(File, Parser, Options) -> case do_parse_file(utf8, File, Parser, Options) of - {ok, Forms}=Ret -> + {ok, Forms} = Ret -> case find_invalid_unicode(Forms) of none -> Ret; @@ -194,35 +186,36 @@ do_parse_file(DefEncoding, File, Parser, Options) -> case file:open(File, [read]) of {ok, Dev} -> _ = epp:set_encoding(Dev, DefEncoding), - try Parser(Dev, 1, Options) - after ok = file:close(Dev) + try + Parser(Dev, 1, Options) + after + ok = file:close(Dev) end; {error, Error} -> {error, {0, file, Error}} % defer to file:format_error/1 end. -find_invalid_unicode([H|T]) -> +find_invalid_unicode([H | T]) -> case H of {error, {_Line, file_io_server, invalid_unicode}} -> invalid_unicode; _Other -> find_invalid_unicode(T) end; -find_invalid_unicode([]) -> none. +find_invalid_unicode([]) -> + none. %% ===================================================================== %% @equiv parse(IODevice, 1) --spec parse(file:io_device()) -> {'ok', erl_syntax:forms()}. - +-spec parse(file:io_device()) -> {ok, erl_syntax:forms()}. parse(Dev) -> parse(Dev, 1). %% @equiv parse(IODevice, StartLine, []) %% @see parse/1 --spec parse(file:io_device(), integer()) -> {'ok', erl_syntax:forms()}. - +-spec parse(file:io_device(), integer()) -> {ok, erl_syntax:forms()}. parse(Dev, L) -> parse(Dev, L, []). @@ -237,25 +230,20 @@ parse(Dev, L) -> %% @see quick_parse/3 -spec parse(file:io_device(), erl_anno:location(), [option()]) -> - {'ok', erl_syntax:forms()}. - + {ok, erl_syntax:forms()}. parse(Dev, L0, Options) -> parse(Dev, L0, fun parse_form/3, Options). %% @equiv quick_parse(IODevice, 1) --spec quick_parse(file:io_device()) -> - {'ok', erl_syntax:forms()}. - +-spec quick_parse(file:io_device()) -> {ok, erl_syntax:forms()}. quick_parse(Dev) -> quick_parse(Dev, 1). %% @equiv quick_parse(IODevice, StartLine, []) %% @see quick_parse/1 --spec quick_parse(file:io_device(), integer()) -> - {'ok', erl_syntax:forms()}. - +-spec quick_parse(file:io_device(), integer()) -> {ok, erl_syntax:forms()}. quick_parse(Dev, L) -> quick_parse(Dev, L, []). @@ -267,9 +255,7 @@ quick_parse(Dev, L) -> %% @see quick_parse_form/2 %% @see parse/3 --spec quick_parse(file:io_device(), integer(), [option()]) -> - {'ok', erl_syntax:forms()}. - +-spec quick_parse(file:io_device(), integer(), [option()]) -> {ok, erl_syntax:forms()}. quick_parse(Dev, L0, Options) -> parse(Dev, L0, fun quick_parse_form/3, Options). @@ -294,10 +280,10 @@ parse(Dev, L0, Fs, Parser, Options) -> %% %% @see quick_parse_form/2 --spec parse_form(file:io_device(), integer()) -> - {'ok', erl_syntax:forms(), integer()} - | {'eof', integer()} | {'error', errorinfo(), integer()}. - +-spec parse_form(file:io_device(), non_neg_integer()) -> + {ok, erl_syntax:forms(), non_neg_integer()} | + {eof, non_neg_integer()} | + {error, errorinfo(), non_neg_integer()}. parse_form(Dev, L0) -> parse_form(Dev, L0, []). @@ -314,9 +300,9 @@ parse_form(Dev, L0) -> %% @see quick_parse_form/3 -spec parse_form(file:io_device(), integer(), [option()]) -> - {'ok', erl_syntax:forms(), integer()} - | {'eof', integer()} | {'error', errorinfo(), integer()}. - + {ok, erl_syntax:forms(), integer()} | + {eof, integer()} | + {error, errorinfo(), integer()}. parse_form(Dev, L0, Options) -> parse_form(Dev, L0, fun normal_parser/2, Options). @@ -324,10 +310,10 @@ parse_form(Dev, L0, Options) -> %% %% @see parse_form/2 --spec quick_parse_form(file:io_device(), integer()) -> - {'ok', erl_syntax:forms(), integer()} - | {'eof', integer()} | {'error', errorinfo(), integer()}. - +-spec quick_parse_form(file:io_device(), non_neg_integer()) -> + {ok, erl_syntax:forms(), non_neg_integer()} | + {eof, non_neg_integer()} | + {error, errorinfo(), non_neg_integer()}. quick_parse_form(Dev, L0) -> quick_parse_form(Dev, L0, []). @@ -339,63 +325,62 @@ quick_parse_form(Dev, L0) -> %% @see parse_form/3 -spec quick_parse_form(file:io_device(), integer(), [option()]) -> - {'ok', erl_syntax:forms(), integer()} - | {'eof', integer()} | {'error', errorinfo(), integer()}. - + {ok, erl_syntax:forms(), integer()} | + {eof, integer()} | + {error, errorinfo(), integer()}. quick_parse_form(Dev, L0, Options) -> parse_form(Dev, L0, fun quick_parser/2, Options). -type pre_fixer() :: fun((erl_scan:tokens()) -> no_fix | {retry, erl_scan:tokens()}). --type post_fixer() :: fun((erl_parse:abstract_form()) -> no_fix | {form, erl_parse:abstract_form()}). +-type post_fixer() :: + fun((erl_parse:abstract_form()) -> no_fix | {form, erl_parse:abstract_form()}). --record(opt, { - clever = false :: boolean(), - parse_macro_definitions = false :: boolean(), - compact_strings = false :: boolean(), - pre_fixer = fun no_fix/1 :: pre_fixer(), - post_fixer = fun no_fix/1 :: post_fixer() -}). +-record(opt, + {clever = false :: boolean(), + parse_macro_definitions = false :: boolean(), + compact_strings = false :: boolean(), + pre_fixer = fun no_fix/1 :: pre_fixer(), + post_fixer = fun no_fix/1 :: post_fixer()}). parse_form(Dev, L0, Parser, Options) -> NoFail = proplists:get_bool(no_fail, Options), - Opt = #opt{ - clever = proplists:get_bool(clever, Options), - parse_macro_definitions = proplists:get_bool(parse_macro_definitions, Options), - compact_strings = proplists:get_bool(compact_strings, Options), - pre_fixer = proplists:get_value(pre_fixer, Options, fun no_fix/1), - post_fixer = proplists:get_value(post_fixer, Options, fun no_fix/1) - }, + Opt = #opt{clever = proplists:get_bool(clever, Options), + parse_macro_definitions = proplists:get_bool(parse_macro_definitions, Options), + compact_strings = proplists:get_bool(compact_strings, Options), + pre_fixer = proplists:get_value(pre_fixer, Options, fun no_fix/1), + post_fixer = proplists:get_value(post_fixer, Options, fun no_fix/1)}, ScanOpts = proplists:get_value(scan_opts, Options, []), case io:scan_erl_form(Dev, "", L0, ScanOpts) of {ok, Ts, L1} -> case extract_escript_header(Ts) of - no_header -> parse_form(Parser, Ts, L1, NoFail, Opt); + no_header -> + parse_form(Parser, Ts, L1, NoFail, Opt); {LineNo, {Header, Rest}} -> case parse_form(Parser, Rest, L1, NoFail, Opt) of {ok, Form, L2} -> - {ok, erl_syntax:form_list([ - erl_syntax:set_pos( - erl_syntax:text(tokens_to_string(Header)), - LineNo - ), - Form - ]), L2}; + {ok, + erl_syntax:form_list([erl_syntax:set_pos( + erl_syntax:text(tokens_to_string(Header)), + LineNo), + Form]), + L2}; Error -> Error end end; - {error, _IoErr, _L1} = Err -> Err; - {error, _Reason} -> {eof, L0}; % This is probably encoding problem - {eof, _L1} = Eof -> Eof + {error, _IoErr, _L1} = Err -> + Err; + {error, _Reason} -> + {eof, L0}; % This is probably encoding problem + {eof, _L1} = Eof -> + Eof end. extract_escript_header([{'#', Anno}, {'!', _} | _] = Ts) -> LineNo = erl_anno:line(Anno), - { - LineNo, - lists:splitwith(fun(Token) -> erl_scan:line(Token) == LineNo end, Ts) - }; -extract_escript_header(_) -> no_header. + {LineNo, lists:splitwith(fun(Token) -> erl_scan:line(Token) == LineNo end, Ts)}; +extract_escript_header(_) -> + no_header. parse_form(Parser, Ts, L1, NoFail, Opt) -> case catch {ok, Parser(Ts, Opt)} of @@ -405,9 +390,9 @@ parse_form(Parser, Ts, L1, NoFail, Opt) -> IoErr = io_error(L1, Term), {error, IoErr, L1}; {parse_error, _IoErr} when NoFail -> - {ok, erl_syntax:set_pos( - erl_syntax:text(tokens_to_string(Ts)), - start_pos(Ts, L1)), + {ok, + erl_syntax:set_pos( + erl_syntax:text(tokens_to_string(Ts)), start_pos(Ts, L1)), L1}; {parse_error, IoErr} -> {error, IoErr, L1}; @@ -485,7 +470,8 @@ parse_tokens_as_terms(Ts, PreFix, FormFix) -> end. expression_dot() -> - erl_syntax:set_ann(erl_syntax:text("."), [expression_dot]). + erl_syntax:set_ann( + erl_syntax:text("."), [expression_dot]). %% --------------------------------------------------------------------- %% Quick scanning/parsing - deletes macro definitions and other @@ -514,59 +500,60 @@ quickscan_form([{'-', _L}, {atom, La, else} | _Ts]) -> kill_form(La); quickscan_form([{'-', _L}, {atom, La, endif} | _Ts]) -> kill_form(La); -quickscan_form([{'-', L}, {'?', _}, {Type, _, _}=N | [{'(', _} | _]=Ts]) - when Type =:= atom; Type =:= var -> +quickscan_form([{'-', L}, {'?', _}, {Type, _, _} = N | [{'(', _} | _] = Ts]) + when Type =:= atom; Type =:= var -> %% minus, macro and open parenthesis at start of form - assume that %% the macro takes no arguments; e.g. `-?foo(...).' - quickscan_macros_1(N, Ts, [{'-', L}]); -quickscan_form([{'?', _L}, {Type, _, _}=N | [{'(', _} | _]=Ts]) - when Type =:= atom; Type =:= var -> + do_quickscan_macros(N, Ts, [{'-', L}]); +quickscan_form([{'?', _L}, {Type, _, _} = N | [{'(', _} | _] = Ts]) + when Type =:= atom; Type =:= var -> %% macro and open parenthesis at start of form - assume that the %% macro takes no arguments (see scan_macros for details) - quickscan_macros_1(N, Ts, []); + do_quickscan_macros(N, Ts, []); quickscan_form(Ts) -> quickscan_macros(Ts). kill_form(L) -> - [{atom, L, ?pp_form}, {'(', L}, {')', L}, {'->', L}, {atom, L, kill}, - {dot, L}]. + [{atom, L, ?pp_form}, {'(', L}, {')', L}, {'->', L}, {atom, L, kill}, {dot, L}]. quickscan_macros(Ts) -> quickscan_macros(Ts, []). -quickscan_macros([{'?',_}, {Type, _, A} | Ts], [{string, L, S} | As]) +quickscan_macros([{'?', _}, {Type, _, A} | Ts], [{string, L, S} | As]) when Type =:= atom; Type =:= var -> %% macro after a string literal: change to a single string {_, Ts1} = skip_macro_args(Ts), S1 = S ++ quick_macro_string(A), quickscan_macros(Ts1, [{string, L, S1} | As]); -quickscan_macros([{'?',_}, {Type, _, _}=N | [{'(',_}|_]=Ts], - [{':',_}|_]=As) - when Type =:= atom; Type =:= var -> +quickscan_macros([{'?', _}, {Type, _, _} = N | [{'(', _} | _] = Ts], [{':', _} | _] = As) + when Type =:= atom; Type =:= var -> %% macro and open parenthesis after colon - check the token %% following the arguments (see scan_macros for details) Ts1 = case skip_macro_args(Ts) of - {_, [{'->',_} | _] = Ts2} -> Ts2; - {_, [{'when',_} | _] = Ts2} -> Ts2; - _ -> Ts %% assume macro without arguments + {_, [{'->', _} | _] = Ts2} -> + Ts2; + {_, [{'when', _} | _] = Ts2} -> + Ts2; + _ -> + Ts %% assume macro without arguments end, - quickscan_macros_1(N, Ts1, As); -quickscan_macros([{'?',_}, {Type, _, _}=N | Ts], As) - when Type =:= atom; Type =:= var -> + do_quickscan_macros(N, Ts1, As); +quickscan_macros([{'?', _}, {Type, _, _} = N | Ts], As) + when Type =:= atom; Type =:= var -> %% macro with or without arguments {_, Ts1} = skip_macro_args(Ts), - quickscan_macros_1(N, Ts1, As); + do_quickscan_macros(N, Ts1, As); quickscan_macros([T | Ts], As) -> quickscan_macros(Ts, [T | As]); quickscan_macros([], As) -> lists:reverse(As). %% (after a macro has been found and the arglist skipped, if any) -quickscan_macros_1({_Type, _, A}, [{string, L, S} | Ts], As) -> +do_quickscan_macros({_Type, _, A}, [{string, L, S} | Ts], As) -> %% string literal following macro: change to single string S1 = quick_macro_string(A) ++ S, quickscan_macros(Ts, [{string, L, S1} | As]); -quickscan_macros_1({_Type, L, A}, Ts, As) -> +do_quickscan_macros({_Type, L, A}, Ts, As) -> %% normal case - just replace the macro with an atom quickscan_macros(Ts, [{atom, L, quick_macro_atom(A)} | As]). @@ -578,42 +565,45 @@ quick_macro_string(A) -> %% Skipping to the end of a macro call, tracking open/close constructs. -skip_macro_args([{'(',_}=T | Ts]) -> +skip_macro_args([{'(', _} = T | Ts]) -> skip_macro_args(Ts, [')'], [T]); skip_macro_args(Ts) -> {[], Ts}. -skip_macro_args([{'(',_}=T | Ts], Es, As) -> +skip_macro_args([{'(', _} = T | Ts], Es, As) -> skip_macro_args(Ts, [')' | Es], [T | As]); -skip_macro_args([{'{',_}=T | Ts], Es, As) -> +skip_macro_args([{'{', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['}' | Es], [T | As]); -skip_macro_args([{'[',_}=T | Ts], Es, As) -> +skip_macro_args([{'[', _} = T | Ts], Es, As) -> skip_macro_args(Ts, [']' | Es], [T | As]); -skip_macro_args([{'<<',_}=T | Ts], Es, As) -> +skip_macro_args([{'<<', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['>>' | Es], [T | As]); -skip_macro_args([{'begin',_}=T | Ts], Es, As) -> +skip_macro_args([{'begin', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{'if',_}=T | Ts], Es, As) -> +skip_macro_args([{'if', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{'case',_}=T | Ts], Es, As) -> +skip_macro_args([{'case', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{'receive',_}=T | Ts], Es, As) -> +skip_macro_args([{'receive', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{'try',_}=T | Ts], Es, As) -> +skip_macro_args([{'try', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{'cond',_}=T | Ts], Es, As) -> +skip_macro_args([{'cond', _} = T | Ts], Es, As) -> skip_macro_args(Ts, ['end' | Es], [T | As]); -skip_macro_args([{E,_}=T | Ts], [E], As) -> %final close +skip_macro_args([{E, _} = T | Ts], + [E], + As) -> %final close {lists:reverse([T | As]), Ts}; -skip_macro_args([{E,_}=T | Ts], [E | Es], As) -> %matching close +skip_macro_args([{E, _} = T | Ts], + [E | Es], + As) -> %matching close skip_macro_args(Ts, Es, [T | As]); skip_macro_args([T | Ts], Es, As) -> skip_macro_args(Ts, Es, [T | As]); skip_macro_args([], _Es, _As) -> throw({error, macro_args}). -filter_form({function, _, ?pp_form, _, - [{clause, _, [], [], [{atom, _, kill}]}]}) -> +filter_form({function, _, ?pp_form, _, [{clause, _, [], [], [{atom, _, kill}]}]}) -> none; filter_form(T) -> T. @@ -625,14 +615,10 @@ filter_form(T) -> normal_parser(Ts0, Opt) -> case scan_form(Ts0, Opt) of Ts when is_list(Ts) -> - rewrite_form( - parse_tokens( - Ts, - normal_parser_prefix(Opt), - fun fix_form/1, - Opt#opt.post_fixer - ) - ); + rewrite_form(parse_tokens(Ts, + normal_parser_prefix(Opt), + fun fix_form/1, + Opt#opt.post_fixer)); Node -> Node end. @@ -640,68 +626,72 @@ normal_parser(Ts0, Opt) -> normal_parser_prefix(#opt{pre_fixer = PreFixer} = Opt) -> DefaultPrefix = default_prefix(Opt), fun(Ts) -> - case PreFixer(Ts) of - no_fix -> DefaultPrefix(Ts); - {retry, Ts1} -> - case DefaultPrefix(Ts1) of - no_fix -> - {retry, Ts1}; - Other -> - Other - end - end + case PreFixer(Ts) of + no_fix -> + DefaultPrefix(Ts); + {retry, Ts1} -> + case DefaultPrefix(Ts1) of + no_fix -> + {retry, Ts1}; + Other -> + Other + end + end end. -default_prefix(#opt{parse_macro_definitions = true, compact_strings = false}) -> fun no_fix/1; -default_prefix(#opt{parse_macro_definitions = true, compact_strings = true}) -> fun fix_contiguous_strings/1; -default_prefix(#opt{parse_macro_definitions = false, compact_strings = false}) -> fun fix_define/1; +default_prefix(#opt{parse_macro_definitions = true, compact_strings = false}) -> + fun no_fix/1; +default_prefix(#opt{parse_macro_definitions = true, compact_strings = true}) -> + fun fix_contiguous_strings/1; +default_prefix(#opt{parse_macro_definitions = false, compact_strings = false}) -> + fun fix_define/1; default_prefix(#opt{parse_macro_definitions = false, compact_strings = true}) -> fun(Ts) -> - case fix_contiguous_strings(Ts) of - no_fix -> fix_define(Ts); - {retry, Ts1} -> - case fix_define(Ts1) of - no_fix -> - {retry, Ts1}; - Other -> - Other - end - end + case fix_contiguous_strings(Ts) of + no_fix -> + fix_define(Ts); + {retry, Ts1} -> + case fix_define(Ts1) of + no_fix -> + {retry, Ts1}; + Other -> + Other + end + end end. scan_form([{'-', _L}, {atom, La, define} | Ts], #opt{parse_macro_definitions = false}) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, define} | Ts]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, define} | Ts]; scan_form([{'-', _L}, {atom, La, define} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, define} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, define} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, undef} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, undef} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, undef} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, include} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, include} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, include} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, include_lib} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, include_lib} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, include_lib} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, ifdef} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, ifdef} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, ifdef} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, ifndef} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, ifndef} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, ifndef} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {'if', La} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, 'if'} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, 'if'} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, elif} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, 'elif'} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, elif} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, else} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, else} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, else} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, endif} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, - {atom, La, endif} | scan_macros(Ts, Opt)]; + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, endif} + | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, error} | Ts], _Opt) -> Desc = build_info_string("-error", Ts), ErrorInfo = {La, ?MODULE, {error, Desc}}, @@ -710,13 +700,13 @@ scan_form([{'-', _L}, {atom, La, warning} | Ts], _Opt) -> Desc = build_info_string("-warning", Ts), ErrorInfo = {La, ?MODULE, {warning, Desc}}, erl_syntax:error_marker(ErrorInfo); -scan_form([{'-', L}, {'?', L1}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) - when Type =:= atom; Type =:= var -> +scan_form([{'-', L}, {'?', L1}, {Type, _, _} = N | [{'(', _} | _] = Ts], Opt) + when Type =:= atom; Type =:= var -> %% minus, macro and open parenthesis at start of form - assume that %% the macro takes no arguments; e.g. `-?foo(...).' macro(L1, N, Ts, [{'-', L}], Opt); -scan_form([{'?', L}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) - when Type =:= atom; Type =:= var -> +scan_form([{'?', L}, {Type, _, _} = N | [{'(', _} | _] = Ts], Opt) + when Type =:= atom; Type =:= var -> %% macro and open parenthesis at start of form - assume that the %% macro takes no arguments; probably a function declaration on the %% form `?m(...) -> ...', which will not parse if it is rewritten as @@ -733,14 +723,14 @@ build_info_string(Prefix, Ts0) -> scan_macros(Ts, Opt) -> scan_macros(Ts, [], Opt). -scan_macros([{'?', _}=M, {Type, _, _}=N | Ts], [{string, L, _}=S | As], - #opt{clever = true}=Opt) - when Type =:= atom; Type =:= var -> +scan_macros([{'?', _} = M, {Type, _, _} = N | Ts], + [{string, L, _} = S | As], + #opt{clever = true} = Opt) + when Type =:= atom; Type =:= var -> %% macro after a string literal: be clever and insert ++ scan_macros([M, N | Ts], [{'++', L}, S | As], Opt); -scan_macros([{'?', L}, {Type, _, _}=N | [{'(',_}|_]=Ts], - [{':',_}|_]=As, Opt) - when Type =:= atom; Type =:= var -> +scan_macros([{'?', L}, {Type, _, _} = N | [{'(', _} | _] = Ts], [{':', _} | _] = As, Opt) + when Type =:= atom; Type =:= var -> %% macro and open parentheses after colon - probably a call %% `m:?F(...)' so the argument list might belong to the call, not %% the macro - but it could also be a try-clause pattern @@ -748,20 +738,20 @@ scan_macros([{'?', L}, {Type, _, _}=N | [{'(',_}|_]=Ts], %% arguments to decide {Args, Rest} = skip_macro_args(Ts), case Rest of - [{'->',_} | _] -> + [{'->', _} | _] -> macro_call(Args, L, N, Rest, As, Opt); - [{'when',_} | _] -> + [{'when', _} | _] -> macro_call(Args, L, N, Rest, As, Opt); _ -> macro(L, N, Ts, As, Opt) end; -scan_macros([{'?', L}, {Type, _, _}=N | [{'(',_}|_]=Ts], As, Opt) - when Type =:= atom; Type =:= var -> +scan_macros([{'?', L}, {Type, _, _} = N | [{'(', _} | _] = Ts], As, Opt) + when Type =:= atom; Type =:= var -> %% macro with arguments {Args, Rest} = skip_macro_args(Ts), macro_call(Args, L, N, Rest, As, Opt); -scan_macros([{'?', L }, {Type, _, _}=N | Ts], As, Opt) - when Type =:= atom; Type =:= var -> +scan_macros([{'?', L}, {Type, _, _} = N | Ts], As, Opt) + when Type =:= atom; Type =:= var -> %% macro without arguments macro(L, N, Ts, As, Opt); scan_macros([T | Ts], As, Opt) -> @@ -773,22 +763,22 @@ scan_macros([], As, _Opt) -> %% (we insert parentheses to preserve the precedences when parsing). macro(L, {Type, _, A}, Rest, As, Opt) -> - scan_macros_1([], Rest, [{atom,L,macro_atom(Type,A)} | As], Opt). + do_scan_macros([], Rest, [{atom, L, macro_atom(Type, A)} | As], Opt). -macro_call([{'(',_}, {')',_}], L, {_, Ln, _}=N, Rest, As, Opt) -> +macro_call([{'(', _}, {')', _}], L, {_, Ln, _} = N, Rest, As, Opt) -> {Open, Close} = parentheses(As), - scan_macros_1([], Rest, - lists:reverse(Open ++ [{atom,L,?macro_call}, - {'(',L}, N, {')',Ln}] ++ Close, - As), Opt); -macro_call([{'(',_} | Args], L, {_, Ln, _}=N, Rest, As, Opt) -> + do_scan_macros([], + Rest, + lists:reverse(Open ++ [{atom, L, ?macro_call}, {'(', L}, N, {')', Ln}] ++ Close, + As), + Opt); +macro_call([{'(', _} | Args], L, {_, Ln, _} = N, Rest, As, Opt) -> {Open, Close} = parentheses(As), %% note that we must scan the argument list; it may not be skipped - scan_macros_1(Args ++ Close, + do_scan_macros(Args ++ Close, Rest, - lists:reverse(Open ++ [{atom,L,?macro_call}, - {'(',L}, N, {',',Ln}], - As), Opt). + lists:reverse(Open ++ [{atom, L, ?macro_call}, {'(', L}, N, {',', Ln}], As), + Opt). macro_atom(atom, A) -> list_to_atom(?atom_prefix ++ atom_to_list(A)); @@ -801,22 +791,22 @@ macro_atom(var, A) -> parentheses([{string, _, _} | _]) -> {[], []}; parentheses(_) -> - {[{'(',0}], [{')',0}]}. + {[{'(', 0}], [{')', 0}]}. %% (after a macro has been found and the arglist skipped, if any) -scan_macros_1(Args, [{string, L, _} | _]=Rest, As, - #opt{clever = true}=Opt) -> +do_scan_macros(Args, [{string, L, _} | _] = Rest, As, #opt{clever = true} = Opt) -> %% string literal following macro: be clever and insert ++ scan_macros(Args ++ [{'++', L} | Rest], As, Opt); -scan_macros_1(Args, Rest, As, Opt) -> +do_scan_macros(Args, Rest, As, Opt) -> %% normal case - continue scanning scan_macros(Args ++ Rest, As, Opt). -rewrite_form({function, L, ?pp_form, _, - [{clause, _, [], [], [{call, _, A, As}]}]}) -> - erl_syntax:set_pos(erl_syntax:attribute(A, rewrite_list(As)), L); +rewrite_form({function, L, ?pp_form, _, [{clause, _, [], [], [{call, _, A, As}]}]}) -> + erl_syntax:set_pos( + erl_syntax:attribute(A, rewrite_list(As)), L); rewrite_form({function, L, ?pp_form, _, [{clause, _, [], [], [A]}]}) -> - erl_syntax:set_pos(erl_syntax:attribute(A), L); + erl_syntax:set_pos( + erl_syntax:attribute(A), L); rewrite_form(T) -> rewrite(T). @@ -855,31 +845,32 @@ rewrite(Node) -> M = erl_syntax:macro(A, rewrite_list(As)), erl_syntax:copy_pos(Node, M); _ -> - rewrite_1(Node) + do_rewrite(Node) end; _ -> - rewrite_1(Node) + do_rewrite(Node) end; _ -> - rewrite_1(Node) + do_rewrite(Node) end. -rewrite_1(Node) -> +do_rewrite(Node) -> case erl_syntax:subtrees(Node) of [] -> Node; Gs -> - Node1 = erl_syntax:make_tree(erl_syntax:type(Node), - [[rewrite(T) || T <- Ts] - || Ts <- Gs]), + Node1 = + erl_syntax:make_tree( + erl_syntax:type(Node), [[rewrite(T) || T <- Ts] || Ts <- Gs]), erl_syntax:copy_pos(Node, Node1) end. %% attempting a rescue operation on a token sequence for a single form %% if it could not be parsed after the normal treatment -fix_form([{atom, _, ?pp_form}, {'(', _}, {')', _}, {'->', _}, - {atom, _, define}, {'(', _} | _]=Ts) -> +fix_form([{atom, _, ?pp_form}, {'(', _}, {')', _}, {'->', _}, {atom, _, define}, {'(', _} + | _] = + Ts) -> case lists:reverse(Ts) of [{dot, _}, {')', _} | _] -> {retry, Ts, fun fix_stringyfied_macros/1}; @@ -895,26 +886,38 @@ fix_form(_Ts) -> fix_stringyfied_macros(Ts) -> {retry, fix_stringyfied_macros(Ts, []), fun fix_define/1}. -fix_stringyfied_macros([], Ts) -> lists:reverse(Ts); +fix_stringyfied_macros([], Ts) -> + lists:reverse(Ts); fix_stringyfied_macros([{'?', Pos}, {atom, Pos, MacroName} | Rest], Ts) -> NextTs = case atom_to_list(MacroName) of ?var_prefix ++ Name -> - [{atom, Pos, list_to_atom(?var_prefix ++ [$?|Name])} | Ts]; + [{atom, Pos, list_to_atom(?var_prefix ++ [$? | Name])} | Ts]; _ -> [{atom, Pos, MacroName}, {'?', Pos} | Ts] end, fix_stringyfied_macros(Rest, NextTs); -fix_stringyfied_macros([Other|Rest], Ts) -> - fix_stringyfied_macros(Rest, [Other|Ts]). - -fix_define([{atom, L, ?pp_form}, {'(', _}, {')', _}, {'->', _}, - {atom, La, define}, {'(', _}, N, {',', _} | Ts]) -> +fix_stringyfied_macros([Other | Rest], Ts) -> + fix_stringyfied_macros(Rest, [Other | Ts]). + +fix_define([{atom, L, ?pp_form}, + {'(', _}, + {')', _}, + {'->', _}, + {atom, La, define}, + {'(', _}, + N, + {',', _} + | Ts]) -> [{dot, _}, {')', _} | Ts1] = lists:reverse(Ts), S = tokens_to_string(lists:reverse(Ts1)), - A = erl_syntax:set_pos(erl_syntax:atom(define), La), - Txt = erl_syntax:set_pos(erl_syntax:text(S), La), - {form, erl_syntax:set_pos(erl_syntax:attribute(A, [N, Txt]), L)}; + A = erl_syntax:set_pos( + erl_syntax:atom(define), La), + Txt = erl_syntax:set_pos( + erl_syntax:text(S), La), + {form, + erl_syntax:set_pos( + erl_syntax:attribute(A, [N, Txt]), L)}; fix_define(_Ts) -> no_fix. @@ -926,24 +929,29 @@ fix_contiguous_strings(Ts) -> {retry, NextTs} end. -fix_contiguous_strings([], Ts) -> lists:reverse(Ts); -fix_contiguous_strings([{string, L1, S1} = First, {string, L2, S2} = Second | Rest], Ts) -> +fix_contiguous_strings([], Ts) -> + lists:reverse(Ts); +fix_contiguous_strings([{string, L1, S1} = First, {string, L2, S2} = Second | Rest], + Ts) -> case {erl_anno:text(L1), erl_anno:text(L2)} of {T1, T2} when is_list(T1), is_list(T2) -> Separator = case {erl_anno:location(L1), erl_anno:location(L2)} of - {L, L} -> $\s; - {_, _} -> $\n % different lines + {L, L} -> + $\s; + {_, _} -> + $\n % different lines end, - NewL = erl_anno:set_text(T1 ++ [Separator|T2], L1), + NewL = erl_anno:set_text(T1 ++ [Separator | T2], L1), fix_contiguous_strings([{string, NewL, S1 ++ S2} | Rest], Ts); _ -> - fix_contiguous_strings([Second|Rest], [First|Ts]) + fix_contiguous_strings([Second | Rest], [First | Ts]) end; -fix_contiguous_strings([Other|Rest], Ts) -> - fix_contiguous_strings(Rest, [Other|Ts]). +fix_contiguous_strings([Other | Rest], Ts) -> + fix_contiguous_strings(Rest, [Other | Ts]). -no_fix(_) -> no_fix. +no_fix(_) -> + no_fix. %% %% @doc Generates a string corresponding to the given token sequence. @@ -956,14 +964,24 @@ token_to_string(T) -> Text end. -token_to_string(atom, A) -> io_lib:write_atom(A); -token_to_string(string, S) -> io_lib:write_string(S); -token_to_string(char, C) -> io_lib:write_char(C); -token_to_string(float, F) -> lists:flatten(io_lib:format("~p", [F])); -token_to_string(integer, N) -> lists:flatten(io_lib:format("~p", [N])); -token_to_string(var, A) -> atom_to_list(A); -token_to_string(dot, dot) -> ".\n"; -token_to_string(Same, Same) -> atom_to_list(Same). +token_to_string(atom, A) -> + io_lib:write_atom(A); +token_to_string(string, S) -> + io_lib:write_string(S); +token_to_string(char, C) -> + io_lib:write_char(C); +token_to_string(float, F) -> + lists:flatten( + io_lib:format("~p", [F])); +token_to_string(integer, N) -> + lists:flatten( + io_lib:format("~p", [N])); +token_to_string(var, A) -> + atom_to_list(A); +token_to_string(dot, dot) -> + ".\n"; +token_to_string(Same, Same) -> + atom_to_list(Same). -spec tokens_to_string([term()]) -> string(). tokens_to_string([T | Ts]) -> @@ -971,21 +989,35 @@ tokens_to_string([T | Ts]) -> tokens_to_string([]) -> "". -maybe_space(_, []) -> ""; -maybe_space(C, [T|_]) -> maybe_space_between(C, erl_scan:category(T)). - -maybe_space_between(dot, _) -> ""; % No space at the end -maybe_space_between('#', '!') -> ""; % \ -maybe_space_between('!', '/') -> ""; % \_ No space for escript headers -maybe_space_between('/', atom) -> ""; % / -maybe_space_between(atom, '/') -> ""; % / -maybe_space_between('#', _) -> ""; % No space for records and maps -maybe_space_between(atom, '{') -> ""; % No space for records -maybe_space_between('?', _) -> ""; % No space for macro names -maybe_space_between('-', atom) -> ""; % No space for attributes -maybe_space_between(atom, '(') -> ""; % No space for function calls -maybe_space_between(var, '(') -> ""; % No space for function calls -maybe_space_between(_, _) -> " ". % Space between anything else +maybe_space(_, []) -> + ""; +maybe_space(C, [T | _]) -> + maybe_space_between(C, erl_scan:category(T)). + +maybe_space_between(dot, _) -> + ""; % No space at the end +maybe_space_between('#', '!') -> + ""; % \ +maybe_space_between('!', '/') -> + ""; % \_ No space for escript headers +maybe_space_between('/', atom) -> + ""; % / +maybe_space_between(atom, '/') -> + ""; % / +maybe_space_between('#', _) -> + ""; % No space for records and maps +maybe_space_between(atom, '{') -> + ""; % No space for records +maybe_space_between('?', _) -> + ""; % No space for macro names +maybe_space_between('-', atom) -> + ""; % No space for attributes +maybe_space_between(atom, '(') -> + ""; % No space for function calls +maybe_space_between(var, '(') -> + ""; % No space for function calls +maybe_space_between(_, _) -> + " ". % Space between anything else %% @hidden %% @doc Callback function for formatting error descriptors. Not for diff --git a/test/ktn_code_SUITE.erl b/test/ktn_code_SUITE.erl index 8424e97..8586df7 100644 --- a/test/ktn_code_SUITE.erl +++ b/test/ktn_code_SUITE.erl @@ -45,8 +45,9 @@ end_per_suite(Config) -> consult(_Config) -> [{a}, {b}] = ktn_code:consult("{a}. {b}."), [] = ktn_code:consult(""), - [{a}, {b}, {c, d, e}] = ktn_code:consult("{a}. {b}. {c, d, e}."), - [{a}, {b}, {c, d, e}] = ktn_code:consult("{a}.\r\n{b}.\r\n{c, d, e}."), + Expected = [{a}, {b}, {c, d, e}], + Expected = ktn_code:consult("{a}. {b}. {c, d, e}."), + Expected = ktn_code:consult("{a}.\r\n{b}.\r\n{c, d, e}."), [{'.'}] = ktn_code:consult("{'.'}.\n"), [{<<"ble.bla">>}, {"github.com"}] = From ed927fb20edb8ff01c7c57953ae082ab55daf149 Mon Sep 17 00:00:00 2001 From: Brujo Benavides Date: Wed, 25 May 2022 10:22:55 +0200 Subject: [PATCH 4/4] Properly use the new feature maybe_expr in OTP25 --- .github/workflows/ci.yml | 6 +++--- rebar.config | 10 +++++++++- src/ktn_code.erl | 10 +++++----- src/ktn_dodger.erl | 6 +++--- test/ktn_code_SUITE.erl | 6 +----- 5 files changed, 21 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6bc489f..2c5bd53 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,9 +31,9 @@ jobs: path: ~/.cache/rebar3 key: rebar3-cache-for-os-${{runner.os}}-otp-${{steps.setup-beam.outputs.otp-version}}-rebar3-${{steps.setup-beam.outputs.rebar3-version}}-hash-${{hashFiles('rebar.lock')}} - name: Compile - run: rebar3 compile + run: ERL_FLAGS="-enable-feature all" rebar3 compile - name: Format check - run: rebar3 format --verify + run: ERL_FLAGS="-enable-feature all" rebar3 format --verify - name: Run tests and verifications - run: rebar3 test + run: ERL_FLAGS="-enable-feature all" rebar3 test diff --git a/rebar.config b/rebar.config index 73c6748..41cf892 100644 --- a/rebar.config +++ b/rebar.config @@ -1,7 +1,13 @@ %% -*- mode: erlang;erlang-indent-level: 2;indent-tabs-mode: nil -*- %% ex: ts=4 sw=4 ft=erlang et {erl_opts, - [warn_unused_import, warn_export_vars, warnings_as_errors, verbose, report, debug_info]}. + [warn_unused_import, + warn_export_vars, + warnings_as_errors, + verbose, + report, + debug_info, + {feature, maybe_expr, enable}]}. {minimum_otp_vsn, "23"}. @@ -40,4 +46,6 @@ {cover_opts, [verbose]}. +{format, [{options, #{unquote_atoms => false}}]}. + {alias, [{test, [compile, format, lint, dialyzer, {ct, "--verbose"}, cover, edoc]}]}. diff --git a/src/ktn_code.erl b/src/ktn_code.erl index df89843..fdb316d 100644 --- a/src/ktn_code.erl +++ b/src/ktn_code.erl @@ -19,7 +19,7 @@ map_field_exact | lc | lc_expr | generate | bc | bc_expr | b_generate | op | record_field | record_index | block | module | export | import | compile | vsn | on_load | behaviour | behavior | callback | record | include | include_lib | define | undef | - ifdef | ifndef | else | endif | elif | error | warning | file | line | type | opaque | + ifdef | ifndef | 'else' | endif | elif | error | warning | file | line | type | opaque | export_type | remote_type | ann_type | paren_type | any. -type tree_node() :: #{type => tree_node_type(), @@ -431,14 +431,14 @@ to_map({try_after, Attrs, AfterBody}) -> attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(AfterBody)}; %% maybe..else..end -to_map({maybe, Attrs, Body, Else}) -> +to_map({'maybe', Attrs, Body, Else}) -> MaybeBody = to_map(Body), MaybeElse = to_map(Else), - #{type => maybe, + #{type => 'maybe', attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => MaybeBody ++ [MaybeElse]}; -to_map({else, Attrs, Clauses}) -> - #{type => else, +to_map({'else', Attrs, Clauses}) -> + #{type => 'else', attrs => #{location => get_location(Attrs), text => get_text(Attrs)}, content => to_map(Clauses)}; %% if diff --git a/src/ktn_dodger.erl b/src/ktn_dodger.erl index 54d88d6..f34bdee 100644 --- a/src/ktn_dodger.erl +++ b/src/ktn_dodger.erl @@ -496,7 +496,7 @@ quickscan_form([{'-', _L}, {'if', La} | _Ts]) -> kill_form(La); quickscan_form([{'-', _L}, {atom, La, elif} | _Ts]) -> kill_form(La); -quickscan_form([{'-', _L}, {atom, La, else} | _Ts]) -> +quickscan_form([{'-', _L}, {atom, La, 'else'} | _Ts]) -> kill_form(La); quickscan_form([{'-', _L}, {atom, La, endif} | _Ts]) -> kill_form(La); @@ -686,8 +686,8 @@ scan_form([{'-', _L}, {'if', La} | Ts], Opt) -> scan_form([{'-', _L}, {atom, La, elif} | Ts], Opt) -> [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, elif} | scan_macros(Ts, Opt)]; -scan_form([{'-', _L}, {atom, La, else} | Ts], Opt) -> - [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, else} +scan_form([{'-', _L}, {atom, La, 'else'} | Ts], Opt) -> + [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, 'else'} | scan_macros(Ts, Opt)]; scan_form([{'-', _L}, {atom, La, endif} | Ts], Opt) -> [{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La}, {atom, La, endif} diff --git a/test/ktn_code_SUITE.erl b/test/ktn_code_SUITE.erl index 8586df7..1aa1499 100644 --- a/test/ktn_code_SUITE.erl +++ b/test/ktn_code_SUITE.erl @@ -14,12 +14,8 @@ -type config() :: [{atom(), term()}]. --if(?OTP_RELEASE >= 23). - -behaviour(ct_suite). --endif. - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Common test %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -123,7 +119,7 @@ parse_maybe(_Config) -> %% Note that to pass this test case, the 'maybe_expr' feature must be enabled. #{type := root, content := - [#{type := function, content := [#{type := clause, content := [#{type := maybe}]}]}]} = + [#{type := function, content := [#{type := clause, content := [#{type := 'maybe'}]}]}]} = ktn_code:parse_tree(<<"foo() -> maybe ok ?= ok else _ -> ng end.">>), ok.