diff --git a/.github/images.sh b/.github/images.sh index 736647899..0b01839e7 100755 --- a/.github/images.sh +++ b/.github/images.sh @@ -10,7 +10,7 @@ docker build \ --target vunit \ -t "vunit/dev:${TAG}" \ - <<-EOF -$(curl -fsSL https://raw.githubusercontent.com/ghdl/docker/master/dockerfiles/run_debian) +$(curl -fsSL https://raw.githubusercontent.com/ghdl/docker/master/run_debian.dockerfile) FROM $TAG AS vunit COPY --from=ghdl/pkg:buster-$PKG / / diff --git a/.github/run.sh b/.github/run.sh new file mode 100755 index 000000000..da3e2e6c1 --- /dev/null +++ b/.github/run.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +docker run --rm -t \ + -v $(pwd):/src \ + -w /src \ + -e PYTHONPATH=/src \ + "$IMAGE" \ + "$@" diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 000000000..4bb3fb844 --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,44 @@ +name: 'coverage' + +on: + push: + schedule: + - cron: '0 0 * * 5' + +jobs: + + coverage: + runs-on: ubuntu-latest + env: + DOCKER_REGISTRY: docker.pkg.github.com + IMAGE: docker.pkg.github.com/vunit/vunit/dev:llvm + steps: + + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-python@v2 + with: + python-version: 3.7 + + - name: Docker login + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + echo "$GITHUB_TOKEN" | docker login -u vunit-gha --password-stdin "$DOCKER_REGISTRY" + docker pull $IMAGE + docker logout "$DOCKER_REGISTRY" + + - name: Run coverage + run: | + ./.github/run.sh tox -e coverage + ./.github/run.sh coverage html --directory=htmlcov + + - name: Report coverage + run: ./.github/run.sh coverage report -m --skip-covered + + - uses: actions/upload-artifact@v2 + with: + name: VUnit_coverage + path: htmlcov diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index e5cedc201..86ccd1b8e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -11,19 +11,30 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + + - uses: actions/checkout@v2 with: submodules: recursive - - uses: actions/setup-python@v1 + fetch-depth: 0 + + - uses: actions/setup-python@v2 with: python-version: 3.8 - - name: install dependencies + + - name: Install dependencies run: | pip install -U pip --progress-bar off pip install -U virtualenv tox --progress-bar off - - name: build docs + + - name: Build docs run: tox -e py38-docs -- --color - - name: 'publish site to gh-pages' + + - uses: Actions/upload-artifact@v2 + with: + name: VUnit-site + path: .tox/py38-docs/tmp/docsbuild/ + + - name: Publish site to gh-pages if: github.event_name != 'pull_request' && github.repository == 'VUnit/vunit' env: GH_DEPKEY: ${{ secrets.VUNIT_GITHUB_IO_DEPLOY_KEY }} diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index 9f77892a2..cc3d0df25 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -25,22 +25,27 @@ jobs: env: DOCKER_REGISTRY: docker.pkg.github.com steps: - - uses: actions/checkout@v1 - - name: build image + + - uses: actions/checkout@v2 + + - name: Build image env: TAG: ${{ matrix.task.tag }} PKG: ${{ matrix.task.pkg }} run: | ./.github/images.sh - - name: docker login + + - name: Docker login run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login -u vunit-gha --password-stdin "$DOCKER_REGISTRY" - - name: docker push + + - name: Docker push if: github.repository == 'VUnit/vunit' run: | DIMG="vunit/dev:${{ matrix.task.tag }}" GHIMG="${DOCKER_REGISTRY}/vunit/$DIMG" docker tag "$DIMG" "$GHIMG" docker push "$GHIMG" - - name: docker logout + + - name: Docker logout run: docker logout "$DOCKER_REGISTRY" if: always() diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 61bd6c2e6..f04f7d6f0 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -13,21 +13,34 @@ env: jobs: +# +# Python code format +# + fmt: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 + + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 with: python-version: 3.8 - - name: install dependencies + + - name: Install dependencies run: | pip install -U pip --progress-bar off pip install -U virtualenv tox --progress-bar off - - name: run 'black' + + - name: Run 'black' run: tox -e py38-fmt -- --check +# +# Linux linting and unit tests +# + lin: + runs-on: ubuntu-latest strategy: fail-fast: false max-parallel: 2 @@ -37,21 +50,28 @@ jobs: 36-unit, 38-unit, ] - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 + + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 with: python-version: 3.8 - - name: install dependencies + + - name: Install dependencies run: | pip install -U pip --progress-bar off pip install -U virtualenv tox --progress-bar off - - name: run job - run: | - tox -e py${{ matrix.task }} -- --color=yes + + - name: Run job + run: tox -e py${{ matrix.task }} -- --color=yes + +# +# Docker (Linux) acceptance tests +# docker: + runs-on: ubuntu-latest strategy: fail-fast: false max-parallel: 2 @@ -60,24 +80,31 @@ jobs: {do: 38-acceptance, tag: llvm}, {do: 38-vcomponents, tag: mcode}, ] - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + + - uses: actions/checkout@v2 with: submodules: recursive - - name: docker login + + - name: Docker login run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login -u vunit-gha --password-stdin "$DOCKER_REGISTRY" - - name: run job - run: | - docker run --rm -tv $(pwd):/src -w /src "$DOCKER_REGISTRY"/vunit/vunit/dev:${{ matrix.task.tag }} tox -e py${{ matrix.task.do }}-ghdl - - name: docker logout + + - name: Run job + run: docker run --rm -tv $(pwd):/src -w /src "$DOCKER_REGISTRY"/vunit/vunit/dev:${{ matrix.task.tag }} tox -e py${{ matrix.task.do }}-ghdl + + - name: Docker logout run: docker logout "$DOCKER_REGISTRY" if: always() +# +# Windows with latest stable GHDL +# + win: + runs-on: windows-latest strategy: fail-fast: false - max-parallel: 4 + max-parallel: 3 matrix: task: [ 38-acceptance-ghdl, @@ -86,49 +113,60 @@ jobs: 36-unit, 38-unit, ] - runs-on: windows-latest steps: - - uses: actions/checkout@v1 - - name: git submodule update - run: git submodule update --init --recursive - if: (endsWith( matrix.task, '-lint' ) || endsWith( matrix.task, '-unit' )) == false - - uses: actions/setup-python@v1 + + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-python@v2 with: python-version: 3.8 - - name: install dependencies + + - name: Install dependencies run: | pip install -U pip --progress-bar off pip install -U virtualenv tox --progress-bar off - - name: install GHDL + + - name: Install GHDL if: endsWith( matrix.task, '-ghdl' ) shell: bash run: | - curl -fsSL -o ghdl.zip https://github.com/ghdl/ghdl/releases/download/v0.36/ghdl-0.36-mingw32-mcode.zip + curl -fsSL -o ghdl.zip https://github.com/ghdl/ghdl/releases/download/v0.37/ghdl-0.37-mingw32-mcode.zip 7z x ghdl.zip "-o../ghdl" -y - mv ../ghdl/GHDL/0.36-mingw32-mcode/ ../ghdl-v0.36 + mv ../ghdl/GHDL/0.37-mingw32-mcode/ ../ghdl-v0.37 rm -rf ../ghdl ghdl.zip - - name: run job + + - name: Run job shell: bash run: | - export PATH=$PATH:$(pwd)/../ghdl-v0.36/bin + export PATH=$PATH:$(pwd)/../ghdl-v0.37/bin tox -e py${{ matrix.task }} -- --color=yes +# +# Deploy to PyPI +# + deploy: runs-on: ubuntu-latest needs: [ fmt, lin, docker, win ] - if: github.event_name == 'release' && github.event.action == 'created' + if: github.event_name == 'push' && github.event.created == 'true' steps: - - uses: actions/checkout@v1 + + - uses: actions/checkout@v2 with: submodules: recursive - - uses: actions/setup-python@v1 + + - uses: actions/setup-python@v2 with: python-version: 3.8 - - name: install dependencies + + - name: Install dependencies run: | pip install -U pip pip install -U setuptools wheel twine - - name: build and deploy to PyPI + + - name: Build and deploy to PyPI env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.VUNIT_HDL_PYPI_DEPLOY_TOKEN }} diff --git a/.gitignore b/.gitignore index c8fb98bd3..fc6e29d05 100644 --- a/.gitignore +++ b/.gitignore @@ -7,16 +7,19 @@ .devcontainer .tox .vscode -docs/_build -docs/examples.rst -docs/release_notes.rst env/ -tests/acceptance/*_out -tests/unit/test_report_output.txt venv/ -vunit/vhdl/check/test/tb_check_equal.vhd -vunit/vhdl/check/test/tb_check_match.vhd error.csv log.csv my_logger.csv -examples/vhdl/array_axis_vcs/src/test/data/out.csv +/build/ +/dist/ +/docs/_build +/docs/_theme +/docs/examples.rst +/docs/release_notes.rst +/examples/vhdl/array_axis_vcs/src/test/data/out.csv +/tests/acceptance/*_out +/tests/unit/test_report_output.txt +/vunit/vhdl/check/test/tb_check_equal.vhd +/vunit/vhdl/check/test/tb_check_match.vhd diff --git a/LICENSE.rst b/LICENSE.rst new file mode 100644 index 000000000..1fd49f8e4 --- /dev/null +++ b/LICENSE.rst @@ -0,0 +1,15 @@ +**VUnit**, except for the projects below, is released under the terms of +`Mozilla Public License, v. 2.0`_. |copy| 2014-2020 Lars Asplund, lars.anders.asplund@gmail.com. + +The following libraries are `redistributed`_ with VUnit for your convenience: + +* **OSVVM** (``vunit/vhdl/osvvm``): these files are licensed under the terms of `Apache License, v 2.0`_, |copy| 2010 - 2020 by `SynthWorks Design Inc`_. All rights reserved. + +* **JSON-for-VHDL** (``vunit/vhdl/JSON-for-VHDL``): these files are licensed under the terms of `Apache License, v 2.0`_, |copy| 2015 - 2020 Patrick Lehmann. + +.. |copy| unicode:: U+000A9 .. COPYRIGHT SIGN +.. _redistributed: https://github.com/VUnit/vunit/blob/master/.gitmodules +.. _Mozilla Public License, v. 2.0: http://mozilla.org/MPL/2.0/ +.. _ARTISTIC License: http://www.perlfoundation.org/artistic_license_2_0 +.. _Apache License, v 2.0: http://www.apache.org/licenses/LICENSE-2.0 +.. _SynthWorks Design Inc: http://www.synthworks.com diff --git a/LICENSE.txt b/LICENSE.txt deleted file mode 100644 index bcdca799e..000000000 --- a/LICENSE.txt +++ /dev/null @@ -1,15 +0,0 @@ -VUnit ------ - -VUnit except for OSVVM (see below) is released under the terms of -Mozilla Public License, v. 2.0. - -Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -OSVVM ------ - -OSVVM is redistributed as a submodule to VUnit for your convenience. OSVVM and derivative work -located under examples/vhdl/osvvm_integration/src are licensed under the terms of Artistic License 2.0. - -Copyright (c) 2006-2016, SynthWorks Design Inc http://www.synthworks.com diff --git a/README.md b/README.md new file mode 100644 index 000000000..1f6c3914f --- /dev/null +++ b/README.md @@ -0,0 +1,60 @@ +

+ + + +

+ +

+ + + 'docs' workflow Status + 'images' workflow Status + 'push' workflow Status + 'coverage' workflow Status +

+ +**VUnit** is an [open source](LICENSE.rst) unit testing framework for VHDL/SystemVerilog. It features the functionality +needed to realize continuous and automated testing of your HDL code. VUnit doesn't replace but rather complements +traditional testing methodologies by supporting a *test early and often* approach through automation. +**Read more** [about VUnit](http://vunit.github.io/about.html). + +Contributing in the form of code, docs, feedback, ideas or bug reports is welcome. +Read our [contribution guide](https://vunit.github.io/contributing.html) to get started. + +

+ +

diff --git a/README.rst b/README.rst deleted file mode 100644 index 439a58602..000000000 --- a/README.rst +++ /dev/null @@ -1,35 +0,0 @@ -What is VUnit? -============== - -VUnit is an open source unit testing framework for VHDL/SystemVerilog -released under the terms of Mozilla Public License, v. 2.0. It -features the functionality needed to realize continuous and automated -testing of your HDL code. VUnit doesn't replace but rather complements -traditional testing methodologies by supporting a "test early and -often" approach through automation. - -**Read more on our** `Website `__ - -Contributing -============ -Contributing in the form of code, feedback, ideas or bug reports are -welcome. Read our `contribution guide -`__ to get started. - -License -======= -VUnit ------ - -VUnit except for OSVVM (see below) is released under the terms of -Mozilla Public License, v. 2.0. - -Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -OSVVM ------ - -OSVVM is redistributed as a submodule to VUnit for your convenience. OSVVM and derivative work -located under examples/vhdl/osvvm_integration/src are licensed under the terms of Artistic License 2.0. - -Copyright (c) 2006-2016, SynthWorks Design Inc http://www.synthworks.com diff --git a/docs/_static/VUnit_logo_175x175.png b/docs/_static/VUnit_logo_175x175.png new file mode 100644 index 000000000..3836371fa Binary files /dev/null and b/docs/_static/VUnit_logo_175x175.png differ diff --git a/docs/_static/VUnit_logo_420x420.png b/docs/_static/VUnit_logo_420x420.png index 6736f55df..0fe53f477 100644 Binary files a/docs/_static/VUnit_logo_420x420.png and b/docs/_static/VUnit_logo_420x420.png differ diff --git a/docs/_static/style.css b/docs/_static/style.css deleted file mode 100644 index 05126cf43..000000000 --- a/docs/_static/style.css +++ /dev/null @@ -1,24 +0,0 @@ -div.sphinxsidebar a.icon { - text-decoration:none; - border-bottom:none; -} - -.fa-twitter { - color:#00aced; -} - -.fa-youtube { - color:#bb0000; -} - -.fa-envelope { - color:#c4c4b0; -} - -.fa-weixin { - color:#8cdd81; -} - -img.logo { - border-radius: 6%; -} \ No newline at end of file diff --git a/docs/vunit_demo.gif b/docs/_static/vunit_demo.gif similarity index 100% rename from docs/vunit_demo.gif rename to docs/_static/vunit_demo.gif diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html deleted file mode 100644 index 73882fd9d..000000000 --- a/docs/_templates/layout.html +++ /dev/null @@ -1,15 +0,0 @@ -{% extends "!layout.html" %} - -{%- block extrahead %} -{{ super() }} - - - - -{% endblock %} diff --git a/docs/_templates/quicklinks.html b/docs/_templates/quicklinks.html deleted file mode 100644 index 95c9484ee..000000000 --- a/docs/_templates/quicklinks.html +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/docs/about.rst b/docs/about.rst index c07ba155f..75ebd960b 100644 --- a/docs/about.rst +++ b/docs/about.rst @@ -4,11 +4,11 @@ What is VUnit? ============== VUnit is an open source unit testing framework for VHDL/SystemVerilog -released under the terms of Mozilla Public License, v. 2.0. It +released under the terms of `Mozilla Public License, v. 2.0`_. It features the functionality needed to realize continuous and automated testing of your HDL code. VUnit doesn't replace but rather complements -traditional testing methodologies by supporting a "test early and -often" approach through automation. +traditional testing methodologies by supporting a *"test early and +often"* approach through automation. VUnit reduces the overhead of testing by supporting automatic discovery of test benches and compilation order as well as including @@ -54,77 +54,17 @@ Main Features - Outputs JUnit report files for better `Jenkins`_ :ref:`integration `. - Builds on the commonly used `xUnit`_ architecture. -Requirements ------------- - -VUnit depends on a number of components as listed below. Full VUnit -functionality requires Python and a simulator supported by the VUnit -Python test runner. However, VUnit can run with limited functionality -entirely within VHDL using the :doc:`VHDL test runner -<./run/user_guide>`. - - -Languages -********* - -- VHDL-93 -- VHDL-2002 -- VHDL-2008 -- VHDL-2019 -- Verilog -- SystemVerilog (Support is experimental) - -Operating systems -***************** - -- Windows -- Linux -- Mac OS X - -Python -****** - -- Python 3.6 or higher - -Simulators -********** - -- `Aldec Riviera-PRO`_ - - - Tested with Riviera-PRO 2015.06, 2015.10, 2016.02, 2016.10 (x64/x86). - - Only VHDL -- `Aldec Active-HDL`_ - - - Tested with Active-HDL 9.3, 10.1, 10.2, 10.3 (x64/x86) - - Only VHDL -- `Mentor Graphics ModelSim/Questa`_ - - - Tested with 10.1 - 10.5 -- `GHDL`_ - - - Only VHDL - - Works with versions >= 0.33 - - Tested with LLVM and mcode backends, gcc backend might work aswell. - - Integrated support for using `GTKWave`_ to view waveforms. -- `Cadence Incisive`_ (**Experimental**) - - - Community contribution by `Colin Marquardt - `_. VUnit maintainers does not have - access to this simulator to verify the functionality. - - Run ``incisive_vhdl_fixup.py`` to remove VHDL constructs that are - not compatible with Incisive - Getting Started --------------- There are a number of ways to get started. -- The :ref:`VUnit User Guide ` will guide users on how to use start using +- :ref:`VUnit User Guide ` will guide users on how to use start using the basic features of VUnit but also provides information about more specific and advanced usage. -- The :ref:`Run Library User Guide ` presents the run packages. -- The :ref:`Check Library User Guide ` presents the check packages. -- The :ref:`Logging Library User Guide ` presents the log packages. +- :ref:`Run Library User Guide ` presents the run packages. +- :ref:`Check Library User Guide ` presents the check packages. +- :ref:`Logging Library User Guide ` presents the log packages. - There are also various presentations of VUnit on `YouTube`_. For example `an introduction to unit testing (6 min)`_ and a `short introduction to VUnit (12 min)`_. @@ -135,55 +75,36 @@ Support Any bug reports, feature requests or questions about the usage of VUnit can be made by creating a `new issue`_. -Credits -------- +Credits and License +------------------- -Founders -******** -- `Lars Asplund `_ -- `Olof Kraigher `_ +- Founders: -Notable contributors -******************** -- `Colin Marquardt `_: Cadence Incisive support -- `Sławomir Siluk `_: Verification Components such as Avalon and Wishbone + - `Lars Asplund `_ + - `Olof Kraigher `_ -License -------- +- Notable contributors: + + - `Colin Marquardt `_: -.. |copy| unicode:: U+000A9 .. COPYRIGHT SIGN + - Cadence Incisive support -VUnit -***** + - `Sławomir Siluk `_: -VUnit except for OSVVM (see below) is released under the terms of -`Mozilla Public License, v. 2.0`_. + - Verification Components (such as Avalon and Wishbone) -|copy| 2014-2018 Lars Asplund, lars.anders.asplund@gmail.com. + - `Unai Martinez-Corral `_: -OSVVM -***** + - Co-simulation with GHDL's VHPIDIRECT interface (`VUnit/cosim `_, based on `ghdl/ghdl-cosim `_) -OSVVM is `redistributed`_ with VUnit for your convenience. These -files are licensed under the terms of `ARTISTIC License`_. + - Continuous Integration (CI) -|copy| 2010 - 2017 by SynthWorks Design Inc. All rights reserved. +.. include:: license.rst .. _xUnit: http://en.wikipedia.org/wiki/XUnit .. _Jenkins: http://jenkins-ci.org/ -.. _Aldec Riviera-PRO: https://www.aldec.com/en/products/functional_verification/riviera-pro -.. _Aldec Active-HDL: https://www.aldec.com/en/products/fpga_simulation/active-hdl -.. _Mentor Graphics ModelSim/Questa: http://www.mentor.com/products/fv/modelsim/ -.. _Cadence Incisive: https://www.cadence.com/content/cadence-www/global/en_US/home/tools/system-design-and-verification/simulation-and-testbench-verification/incisive-enterprise-simulator.html -.. _GHDL: https://github.com/ghdl/ghdl -.. _GTKWave: http://gtkwave.sourceforge.net/ .. _YouTube: https://www.youtube.com/channel/UCCPVCaeWkz6C95aRUTbIwdg .. _an introduction to unit testing (6 min): https://www.youtube.com/watch?v=PZuBqcxS8t4 .. _short introduction to VUnit (12 min): https://www.youtube.com/watch?v=D8s_VLD91tw -.. _Development document: https://github.com/VUnit/vunit/blob/master/developing.md .. _new issue: https://github.com/VUnit/vunit/issues/new -.. _Mozilla Public License, v. 2.0: http://mozilla.org/MPL/2.0/ -.. _redistributed: https://github.com/VUnit/vunit/blob/master/vunit/vhdl/osvvm -.. _modifications: https://github.com/VUnit/vunit/commit/25fce1b3700e746c3fa23bd7157777dd4f20f0d6 -.. _ARTISTIC License: http://www.perlfoundation.org/artistic_license_2_0 diff --git a/docs/posts/2015_09_24_short_introduction_to_vunit/post.rst b/docs/blog/2015_09_24_short_introduction_to_vunit.rst similarity index 98% rename from docs/posts/2015_09_24_short_introduction_to_vunit/post.rst rename to docs/blog/2015_09_24_short_introduction_to_vunit.rst index d59607d8d..bb6ca9ae1 100644 --- a/docs/posts/2015_09_24_short_introduction_to_vunit/post.rst +++ b/docs/blog/2015_09_24_short_introduction_to_vunit.rst @@ -1,7 +1,6 @@ -.. post:: Sep 24, 2015 - :tags: VUnit - :author: lasplund - :excerpt: 2 +:tags: VUnit +:author: lasplund +:excerpt: 2 .. _short_introduction_to_vunit_post: @@ -10,7 +9,7 @@ Short Introduction to VUnit **Originally posted and commented on** `LinkedIn `_. -.. image:: 4x.png +.. image:: img/4x.png `VUnit `__ is a free and open source unit testing framework for VHDL that was released in November last year. diff --git a/docs/posts/2015_10_08_who_is_using_UVM/post.rst b/docs/blog/2015_10_08_who_is_using_UVM.rst similarity index 93% rename from docs/posts/2015_10_08_who_is_using_UVM/post.rst rename to docs/blog/2015_10_08_who_is_using_UVM.rst index ff5439852..95082d309 100644 --- a/docs/posts/2015_10_08_who_is_using_UVM/post.rst +++ b/docs/blog/2015_10_08_who_is_using_UVM.rst @@ -1,7 +1,6 @@ -.. post:: Oct 10, 2015 - :tags: VUnit - :author: lasplund - :excerpt: 2 +:tags: VUnit +:author: lasplund +:excerpt: 2 Who's Using UVM (or Not) for FPGA Development, and Why? ======================================================= @@ -9,7 +8,7 @@ Who's Using UVM (or Not) for FPGA Development, and Why? **Originally posted and commented on** `LinkedIn `__. -.. image:: uvm.png +.. image:: img/uvm.png Over the last few years a number of open source test solutions have emerged. I'm talking about tools like our diff --git a/docs/posts/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl/post.rst b/docs/blog/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl.rst similarity index 94% rename from docs/posts/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl/post.rst rename to docs/blog/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl.rst index 7c7887676..89cdb0f32 100644 --- a/docs/posts/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl/post.rst +++ b/docs/blog/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl.rst @@ -1,7 +1,6 @@ -.. post:: Dec 15, 2015 - :tags: VUnit - :author: lasplund - :excerpt: 2 +:tags: VUnit +:author: lasplund +:excerpt: 2 Free and Open Source Verification with VUnit and GHDL @@ -9,7 +8,7 @@ Free and Open Source Verification with VUnit and GHDL **Originally posted and commented on** `LinkedIn `__. -.. image:: CPU_load.png +.. image:: img/CPU_load.png `VUnit `__ is a free and open source (FOSS) unit testing framework for VHDL that supports ModelSim, diff --git a/docs/posts/2016_01_26_welcome_to_our_new_website/post.rst b/docs/blog/2016_01_26_welcome_to_our_new_website.rst similarity index 91% rename from docs/posts/2016_01_26_welcome_to_our_new_website/post.rst rename to docs/blog/2016_01_26_welcome_to_our_new_website.rst index 5402b83b1..ce60c9fe7 100644 --- a/docs/posts/2016_01_26_welcome_to_our_new_website/post.rst +++ b/docs/blog/2016_01_26_welcome_to_our_new_website.rst @@ -1,11 +1,10 @@ -.. post:: Jan 26, 2016 - :tags: VUnit - :author: lasplund - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 Welcome to Our New Website ========================== -.. image:: vunit_github_io.png +.. image:: img/vunit_github_io.png Today I moved VUnit from my personal GitHub account to a dedicated `VUnit account `__. Links to the diff --git a/docs/posts/2016_01_29_chat_with_vunit_users_and_developers/post.rst b/docs/blog/2016_01_29_chat_with_vunit_users_and_developers.rst similarity index 72% rename from docs/posts/2016_01_29_chat_with_vunit_users_and_developers/post.rst rename to docs/blog/2016_01_29_chat_with_vunit_users_and_developers.rst index 97dc1df51..03a976530 100644 --- a/docs/posts/2016_01_29_chat_with_vunit_users_and_developers/post.rst +++ b/docs/blog/2016_01_29_chat_with_vunit_users_and_developers.rst @@ -1,7 +1,6 @@ -.. post:: Jan 29, 2016 - :tags: VUnit - :author: lasplund - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 Chat with VUnit Users and Developers ==================================== diff --git a/docs/posts/2016_02_01_website_updates/post.rst b/docs/blog/2016_02_01_website_updates.rst similarity index 85% rename from docs/posts/2016_02_01_website_updates/post.rst rename to docs/blog/2016_02_01_website_updates.rst index 4ea4af2e8..23d5b7c53 100644 --- a/docs/posts/2016_02_01_website_updates/post.rst +++ b/docs/blog/2016_02_01_website_updates.rst @@ -1,7 +1,6 @@ -.. post:: Feb 1, 2016 - :tags: VUnit - :author: lasplund - :excerpt: 2 +:tags: VUnit +:author: lasplund +:excerpt: 2 .. _vunit_website_updates: @@ -9,7 +8,7 @@ Website Updates =============== -.. figure:: blogging_chat_room_documentation.png +.. figure:: img/blogging_chat_room_documentation.png Our website has been updated with a `Gitter `__-based diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/post.rst b/docs/blog/2016_02_21_improving_vhdl_testbench_design_with_message_passing.rst similarity index 99% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/post.rst rename to docs/blog/2016_02_21_improving_vhdl_testbench_design_with_message_passing.rst index 74a9ef2eb..7aa563e6c 100644 --- a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/post.rst +++ b/docs/blog/2016_02_21_improving_vhdl_testbench_design_with_message_passing.rst @@ -1,13 +1,12 @@ -.. post:: Feb 21, 2016 - :tags: VUnit - :author: lasplund - :excerpt: 1 - :image: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 +:image: 1 Improving VHDL Testbench Design with Message Passing ==================================================== -.. figure:: orange_man.jpeg +.. figure:: img/orange_man.jpeg :alt: message passing :align: center @@ -149,7 +148,7 @@ alphabet. This shift value equals 1 in my example so that *a* is encrypted to *b*, *b* is encrypted to *c* and so on. The latency is three clock cycles. -.. figure:: caesar_encoder.png +.. figure:: img/caesar_encoder.png :alt: Caesar Encoder :align: center @@ -227,7 +226,7 @@ below, and we never verify the situation in the hello world example shown previously where the sentences are separated with a single clock cycle. -.. figure:: low_throughput.png +.. figure:: img/low_throughput.png :alt: Low Throughput :align: center @@ -254,7 +253,7 @@ model `__) which communicates with each other using *messages* sent over an abstract communication medium called the *net*. -.. figure:: message_passing_model.png +.. figure:: img/message_passing_model.png :alt: Message Passing Model :align: center @@ -332,7 +331,7 @@ remain. To refine this design pattern I will also have a dedicated still the entry point for understanding the testbench as it will continue to coordinate the actions of the others. -.. figure:: third_interface.png +.. figure:: img/third_interface.png :alt: Third Interface :align: center diff --git a/docs/posts/2016_08_08_making_osvvm_a_submodule/post.rst b/docs/blog/2016_08_08_making_osvvm_a_submodule.rst similarity index 93% rename from docs/posts/2016_08_08_making_osvvm_a_submodule/post.rst rename to docs/blog/2016_08_08_making_osvvm_a_submodule.rst index 1d6da5de8..394ef7e46 100644 --- a/docs/posts/2016_08_08_making_osvvm_a_submodule/post.rst +++ b/docs/blog/2016_08_08_making_osvvm_a_submodule.rst @@ -1,7 +1,6 @@ -.. post:: Aug 8, 2016 - :tags: VUnit, OSVVM - :author: lasplund - :excerpt: 1 +:tags: VUnit, OSVVM +:author: lasplund +:excerpt: 1 Making OSVVM a Git Submodule ==================================================== diff --git a/docs/posts/2016_11_15_vunit_the_best_value_for_initial_effort_part 1/post.rst b/docs/blog/2016_11_15_vunit_the_best_value_for_initial_effort_part1.rst similarity index 93% rename from docs/posts/2016_11_15_vunit_the_best_value_for_initial_effort_part 1/post.rst rename to docs/blog/2016_11_15_vunit_the_best_value_for_initial_effort_part1.rst index 859bb4b5f..75d7934eb 100644 --- a/docs/posts/2016_11_15_vunit_the_best_value_for_initial_effort_part 1/post.rst +++ b/docs/blog/2016_11_15_vunit_the_best_value_for_initial_effort_part1.rst @@ -1,20 +1,17 @@ -.. post:: Nov 15, 2016 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit - The Best Value for Initial Effort - Part 1 ================================================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/bestvalue1.jpg :alt: Best Value Part 1 :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - In the book Effective Coding with VHDL published this summer VUnit was presented as the most advanced testing framework of its kind. diff --git a/docs/posts/2016_11_16_vunit_the_best_value_for_initial_effort_part 2/post.rst b/docs/blog/2016_11_16_vunit_the_best_value_for_initial_effort_part2.rst similarity index 83% rename from docs/posts/2016_11_16_vunit_the_best_value_for_initial_effort_part 2/post.rst rename to docs/blog/2016_11_16_vunit_the_best_value_for_initial_effort_part2.rst index 32f3469db..d3fcd5f7c 100644 --- a/docs/posts/2016_11_16_vunit_the_best_value_for_initial_effort_part 2/post.rst +++ b/docs/blog/2016_11_16_vunit_the_best_value_for_initial_effort_part2.rst @@ -1,20 +1,17 @@ -.. post:: Nov 16, 2016 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit - The Best Value for Initial Effort - Part 2 ================================================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/bestvalue2.jpg :alt: Best Value Part 2 :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - In the previous `blog `__ I showed how `VUnit `__ can be installed in less than @@ -28,7 +25,7 @@ another minute. In my second `video clip `__ I will show how you -can create a compile +can create a compile script for your project within a minute. This script provides incremental compilation, meaning that it will find your source files, figure out their dependencies to create a compile order, and then @@ -38,7 +35,7 @@ files can normally be added and removed without modifications. Another minute of work but this time some real added value. The only requirement is that you are using one of the supported `simulators -`__, +`__, currently ModelSim/Questa, Riviera-Pro, Active-HDL, GHDL, and Cadence Incisive. Support for other simulators is `planned `__. diff --git a/docs/posts/2016_11_22_vunit_the_best_value_for_initial_effort_part 3/post.rst b/docs/blog/2016_11_22_vunit_the_best_value_for_initial_effort_part3.rst similarity index 81% rename from docs/posts/2016_11_22_vunit_the_best_value_for_initial_effort_part 3/post.rst rename to docs/blog/2016_11_22_vunit_the_best_value_for_initial_effort_part3.rst index 0e7edd8b6..bbe40d168 100644 --- a/docs/posts/2016_11_22_vunit_the_best_value_for_initial_effort_part 3/post.rst +++ b/docs/blog/2016_11_22_vunit_the_best_value_for_initial_effort_part3.rst @@ -1,34 +1,31 @@ -.. post:: Nov 22, 2016 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit - The Best Value for Initial Effort - Part 3 ================================================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/bestvalue3.jpg :alt: Best Value Part 3 :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - After spending one minute on `installing VUnit `__ and one minute on `creating a run script `__ for incremental compilation it is time to go full automation. Five lines of code or roughly 30 seconds of work for each testbench is what -it takes to get the following added values: +it takes to get the following added values: - A single command to verify your project designs or, which I will - show in the next blog, part of the designs. + show in the next blog, part of the designs. - Support for distributing the simulations over many CPU cores. If you have a quad core CPU you can have a 4x speed-up. This requires more simulator licenses but if you use a free version of a commercial - simulator or the open source GHDL that is no problem. + simulator or the open source GHDL that is no problem. - Continuous integration with the help of tools like `Jenkins `__ and `Travis `__. Did you notice the build status @@ -40,7 +37,7 @@ it takes to get the following added values: This `video `__ clip will show the details for this step and the resulting code is in my `repository -`__. +`__. All in all I've spent less than ten minutes to convert this project but I haven't really made any changes to how the testbenches are @@ -51,7 +48,7 @@ VUnit must be able to handle legacy asserts in order to be the low-effort solution we're striving for. A better approach is to continuously improve on the code you have, for example when developing tests for new functionality, making manual testbenches self-checking, -or when debugging designs. +or when debugging designs. This was all for now. In the next blog I will talk about test cases in VUnit and how they bring clarity and extra speed to your diff --git a/docs/posts/2017_01_12_vunit_getting_started_1_2_3/post.rst b/docs/blog/2017_01_12_vunit_getting_started_1_2_3.rst similarity index 90% rename from docs/posts/2017_01_12_vunit_getting_started_1_2_3/post.rst rename to docs/blog/2017_01_12_vunit_getting_started_1_2_3.rst index 647be0782..a98f601aa 100644 --- a/docs/posts/2017_01_12_vunit_getting_started_1_2_3/post.rst +++ b/docs/blog/2017_01_12_vunit_getting_started_1_2_3.rst @@ -1,7 +1,6 @@ -.. post:: Jan 12, 2017 - :tags: VUnit - :author: lasplund - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit - Getting Started 1-2-3 ============================= diff --git a/docs/posts/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics/post.rst b/docs/blog/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics.rst similarity index 99% rename from docs/posts/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics/post.rst rename to docs/blog/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics.rst index 846ec7167..630b7dbd7 100644 --- a/docs/posts/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics/post.rst +++ b/docs/blog/2017_06_03_enable_your_simulator_to_handle_complex_top_level_generics.rst @@ -1,7 +1,6 @@ -.. post:: Jun 03, 2017 - :tags: VUnit - :author: lasplund - :excerpt: 2 +:tags: VUnit +:author: lasplund +:excerpt: 2 Enable Your Simulator to Handle Complex Top-Level Generics ========================================================== diff --git a/docs/posts/2017_09_28_sigasi_adds_support_for_vunit_testing_framework/post.rst b/docs/blog/2017_09_28_sigasi_adds_support_for_vunit_testing_framework.rst similarity index 82% rename from docs/posts/2017_09_28_sigasi_adds_support_for_vunit_testing_framework/post.rst rename to docs/blog/2017_09_28_sigasi_adds_support_for_vunit_testing_framework.rst index aaa5fb790..5c7d0ab97 100644 --- a/docs/posts/2017_09_28_sigasi_adds_support_for_vunit_testing_framework/post.rst +++ b/docs/blog/2017_09_28_sigasi_adds_support_for_vunit_testing_framework.rst @@ -1,20 +1,17 @@ -.. post:: Sep 22, 2017 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 Sigasi Adds Support for VUnit Testing Framework =============================================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/vunit_sigasistudio.jpg :alt: Sigasi Support :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - `VUnit `__ was born out of frustration over the lack of an efficient test framework. The continuous and automated approach to testing I used for software @@ -41,7 +38,7 @@ collect the results. All error feedback in one place. This is actually very common when using Eclipse with software languages and with the new Sigasi Studio 3.6 release the company is taking the `first steps `__ towards such -an integration by recognizing VUnit testbenches and +an integration by recognizing VUnit testbenches and supporting their creation. I'm very much looking forward to the coming releases of Sigasi Studio! - + diff --git a/docs/posts/2017_10_31_vunit_3_0_color_logging/post.rst b/docs/blog/2017_10_31_vunit_3_0_color_logging.rst similarity index 83% rename from docs/posts/2017_10_31_vunit_3_0_color_logging/post.rst rename to docs/blog/2017_10_31_vunit_3_0_color_logging.rst index 95a3eeb50..99641f250 100644 --- a/docs/posts/2017_10_31_vunit_3_0_color_logging/post.rst +++ b/docs/blog/2017_10_31_vunit_3_0_color_logging.rst @@ -1,20 +1,17 @@ -.. post:: Oct 31, 2017 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit 3.0 Color Logging ======================= -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/color_logging.jpg :alt: VUnit 3.0 Color Logging :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - VUnit 3.0, our next major release, is around the corner and with it comes a number of updates and additions. This first preview post will demonstrate color logging, one of the updates we made to our logging @@ -23,7 +20,7 @@ framework. Color logging is exactly what it sounds like, the addition of colors to simplify browsing of logs. For example -.. figure:: log_example.jpg +.. figure:: img/log_example.jpg :alt: Log Example :align: center diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/post.rst b/docs/blog/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017.rst similarity index 94% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/post.rst rename to docs/blog/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017.rst index 66e822cf4..3a1e8b43c 100644 --- a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/post.rst +++ b/docs/blog/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017.rst @@ -1,20 +1,17 @@ -.. post:: Nov 7, 2017 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit 3.0 - While Waiting for VHDL-2017 ======================================= -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/vunit_waiting.jpg :alt: VUnit 3.0 - While Waiting for VHDL-2017 :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - Background ---------- @@ -140,11 +137,11 @@ messages to a custom logger named after the process path name. test_runner_cleanup(runner); end process; - end architecture; + end architecture; The resulting output is -.. figure:: log1.jpg +.. figure:: img/log1.jpg :align: center It may look like we created a logger named `tb:main` but the colon in @@ -153,7 +150,7 @@ loggers with parent/child relationships. So the single call to `get_logger` will create two loggers if they don't already exist. One logger is named `main` and is the child of the other logger named `tb`. Note that the debug message isn't visible. By default the `log -level` is set not to include such details. +level` is set not to include such details. Now let's create a dummy verification component. It will just take a logger as a generic (not possible prior to VUnit 3.0) and then do some @@ -163,14 +160,14 @@ logging on that logger. Here is the entity declaration. entity verification_component is generic (logger : logger_t := verification_component_logger); - end entity; + end entity; If this component is instantiated without assigning the `logger` generic it will use `verification_component_logger` instead. This is a logger defined by the verification component itself and placed in an -associated package. +associated package. -.. figure:: log2.jpg +.. figure:: img/log2.jpg :align: center To make the log more readable and the example more interesting I'm @@ -189,7 +186,7 @@ provide them with their own loggers. beta : entity work.verification_component generic map (logger => beta_logger); - end block; + end block; What I've done here is to collect all my verification components in a separate block labelled `vc`. `vc` has its own `vc_logger` based on the path @@ -198,9 +195,9 @@ name just like I did for `main_logger`. The loggers for the `alpha` and than providing a complete hierarchical name to `get_logger` I just provide a simple name and the parent logger. -My log output will now look like this +My log output will now look like this -.. figure:: log3.jpg +.. figure:: img/log3.jpg :align: center Now that we have our hierarchy of loggers we can start controlling @@ -215,7 +212,7 @@ being logged on file is handled separately by the file handler. constant main_logger : logger_t := get_logger(main'path_name); begin test_runner_setup(runner, runner_cfg); - + show(main_logger, display_handler, debug); info(main_logger, "Starting testbench"); @@ -226,13 +223,13 @@ being logged on file is handled separately by the file handler. The result is -.. figure:: log4.jpg +.. figure:: img/log4.jpg :align: center I can also control the loggers for `alpha` and `beta` individually but it's also possible to address them collectively by controlling a shared ancestor in the hierarchy. Let's add a configuration process -to `vc`. +to `vc`. .. code-block:: vhdl @@ -257,7 +254,7 @@ to `vc`. The visibility setting applied to `vc_logger` will also be inherited and applied to all its descendants, in this case `alpha` and `beta`. -.. figure:: log5.jpg +.. figure:: img/log5.jpg :align: center That's all for now. Hopefully you've learned something new about diff --git a/docs/posts/2017_11_23_vunit_matlab_integration/post.rst b/docs/blog/2017_11_23_vunit_matlab_integration.rst similarity index 96% rename from docs/posts/2017_11_23_vunit_matlab_integration/post.rst rename to docs/blog/2017_11_23_vunit_matlab_integration.rst index 189681ccc..7de043c2d 100644 --- a/docs/posts/2017_11_23_vunit_matlab_integration/post.rst +++ b/docs/blog/2017_11_23_vunit_matlab_integration.rst @@ -1,26 +1,23 @@ -.. post:: Nov 23, 2017 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit Matlab Integration ======================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/vunit_matlab.jpg :alt: VUnit Matlab Integration :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - Recently I got a question from an ASIC team if it is possible to integrate their VUnit simulations with Matlab. I've been getting this question several times lately so this post will show you how it can be done. It will be based on their use case but hopefully it will serve as inspiration if you have other Matlab use cases or want to integrate -VUnit with some other program. +VUnit with some other program. The Testbench ------------- @@ -35,14 +32,14 @@ samples are generated by repeatedly calling a function loop. The samples generated form a simple ramp function as shown in the figure below. -.. figure:: figure.jpg +.. figure:: img/matlab_figure.jpg :alt: Testbench Progress :align: center Here is the main loop of my VUnit testbench. .. code-block:: vhdl - + test_runner: process is variable data_set : integer_array_t; begin @@ -79,20 +76,20 @@ were discussed in my previous `post In this case I have a one-dimensional array (a vector) created by .. code-block:: vhdl - + data_set := new_1d; The created vecor is empty by default and grows dynamically with every new sample I append. .. code-block:: vhdl - + append(data_set, get_output_sample); Once I have a complete data set I save that to a CSV file and then deallocate the data set such that I can repeat the procedure. .. code-block:: vhdl - + save_csv(data_set, file_name => join(output_path(runner_cfg), "data_set_" & to_string(set) & ".csv")); deallocate(data_set); @@ -104,7 +101,7 @@ The Run Script The run script is similar to most run scripts with a few additions .. code-block:: python - + prj = VUnit.from_argv() prj.add_array_util() @@ -125,20 +122,20 @@ The VUnit array support is an add-on not compiled into `vunit_lib` by default. To include it you have to add .. code-block:: vhdl - + prj.add_array_util() Next I want to create a configuration for my testbench. A VUnit configuration allows me to run my testbench with several different settings. In this example my testbench entity is called `tb_octave` and I get the testbench, compiled into `lib`, with the line .. code-block:: python - + tb_octave = lib.entity("tb_octave") Using the `add_config` method I can now add a configuration to the testbench which I named `Passing test`. .. code-block:: python - + tb_octave.add_config(name="Passing test", generics=dict(size_of_data_set=10, num_of_data_sets=10, @@ -154,7 +151,7 @@ called a `pre_config` function. This is a function that VUnit calls before starting the simulation. .. code-block:: python - + def pre_config(output_path): p = run(["octave", join(root, "octave", "visualize.m"), output_path, plot_title, str(num_of_data_sets)]) @@ -165,7 +162,7 @@ same directory we saw in the testbench before. Note that the `output_path` name doesn't mean that it can't be used for simulation input. A `pre_config` function can for example be used to generate and store an input data file in `output_path` and let the testbench read -that data. +that data. In this example I use `pre_config` to call Matlab (or rather Octave which is a free Matlab clone) using the Python `run` function. Octave @@ -178,7 +175,7 @@ Octave should expect. But where are `plot_title` and `num_of_data_sets` defined? `pre_config` is called by VUnit and it can only provide arguments it knows about. VUnit knows about the `output_path` it created but doesn't know anything about the purpose of the `pre_config` function and what it needs to fulfill that purpose. I can hardcode these values but what if I want to reuse `pre_config` with different values? The trick is to generate the `pre_config` function. .. code-block:: python - + def make_pre_config(plot_title, num_of_data_sets): def pre_config(output_path): p = run(["octave", join(root, "octave", "visualize.m"), output_path, plot_title, str(num_of_data_sets)]) @@ -309,11 +306,11 @@ still a number of flaws with this solution. For example split the data into sets and store them in separate files. I would prefer writing and reading a single open file. - Copying and modifying code is not good reuse. I need to raise the - abstraction and remove details. + abstraction and remove details. - Responsibility for the plot is all over the place. The testbench is in charge of the data, the title is set by the Python script, axis labels are controlled by the M script, and some properties are - hardcoded. + hardcoded. It seems that I will have to revisit this post. Until then... diff --git a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/post.rst b/docs/blog/2017_12_14_vunit_bfms_as_simple_as_emailing.rst similarity index 95% rename from docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/post.rst rename to docs/blog/2017_12_14_vunit_bfms_as_simple_as_emailing.rst index e240f1f3f..36e948541 100644 --- a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/post.rst +++ b/docs/blog/2017_12_14_vunit_bfms_as_simple_as_emailing.rst @@ -1,18 +1,17 @@ -.. post:: Dec 14, 2017 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit BFMs - as Simple as Emailing ================================== -.. figure:: image.jpg +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/vunit_emailing.jpg :alt: VUnit BFMs - as Simple as Emailing :align: center -This article was originally posted on `LinkedIn `__ where you may find some comments on its contents. - VUnit 3.0, our next major release, is around the corner and with it comes a number of updates and additions. One area which we have improved is our support for creating advanced bus functional models @@ -42,7 +41,7 @@ the blue interface, then some data is put on the green port, then there is a write access on the red bus interface. Crisp and clear as a recipe. -.. figure:: sequential_recipe.jpg +.. figure:: img/sequential_recipe.jpg :alt: Sequential Recipe :align: center @@ -52,7 +51,7 @@ However, looking at a real recipe we can see an important difference 2. Split vanilla bean in half and scrape out the pulp. 3. While the oven is preheating, place the cream, vanilla bean and its pulp into a saucepan set over medium-high heat and bring to a - boil. + boil. 4. ... The recipe is sequential in nature but actually describes concurrent @@ -83,7 +82,7 @@ time. This allows the concurrent execution of the `put` and `write` transactions. Procedures like `init`, which await completion, will be blocking and consume simulation time. -.. figure:: parallel_recipe.jpg +.. figure:: img/parallel_recipe.jpg :alt: Parallel Recipe :align: center @@ -93,7 +92,7 @@ selecting what basic BFM procedure to run based on the type of command it receives, read or write for example. The challenging part, and the key to make the advanced BFM approach tractable, is the communication system connecting the test sequencer procedure calls with the BFM -components. +components. No Need to Reinvent the Wheel ----------------------------- @@ -118,7 +117,7 @@ in an email thread. In this example my test sequencer is communicating with a BFM connected to a bus of a pure memory device. For reasons that will appear later I'm calling this device *The Brain*. -.. figure:: the_brain.jpg +.. figure:: img/the_brain.jpg :alt: The Brain :align: center @@ -133,14 +132,14 @@ really communicating with the BFM but that's just a technical detail which I've excluded from the naming. .. code-block:: vhdl - + constant brain : actor_t := new_actor("The Brain"); The test sequencer may have direct access to the brain "email address" but it can also figure it out. .. code-block:: vhdl - + brain := find("The Brain"); Just like searching the contacts list in your email client. @@ -150,12 +149,12 @@ Sending an Email Now that the basics are covered we can start communicating. This is how you send a message to the brain BFM instructing it to start a -write transaction. +write transaction. .. code-block:: vhdl - + write_msg := new_msg(brain_write_msg); - push_integer(write_msg, address); + push_integer(write_msg, address); push_std_ulogic_vector(write_msg, data); send(net, brain, write_msg); @@ -176,7 +175,7 @@ allowed to have the same message type. To handle this a BFM can register its message types and get unique identifiers in return. .. code-block:: vhdl - + constant brain_write_msg : msg_type_t := new_msg_type("write"); When writing an email you add text but you can also attach pictures, @@ -206,14 +205,14 @@ Receiving an Email ------------------ The BFM receiving the test sequencer messages would have a process -with a body starting like this. +with a body starting like this. .. code-block:: vhdl - + begin receive(net, brain, command_msg); msg_type := message_type(command_msg); - + if msg_type = brain_write_msg then address := pop_integer(command_msg); data := pop_std_ulogic_vector(command_msg); @@ -237,7 +236,7 @@ continuation of the if statement above showing how the BFM replies to a read command. .. code-block:: vhdl - + elsif msg_type = brain_read_msg then address := pop_integer(command_msg); read(a, number, of, bus, interface, signals, address, data); @@ -258,7 +257,7 @@ reply of *that* message and ignore everything else. Messages ignored are not deleted but remain in the inbox until you are ready to read them. .. code-block:: vhdl - + -- Initiate a read transaction read_msg := new_msg(brain_read_msg); @@ -269,7 +268,7 @@ not deleted but remain in the inbox until you are ready to read them. receive_reply(net, read_msg, reply_msg); data := pop_std_ulogic_vector(reply_msg); - + Again, these are details that the test sequencer doesn't have to see. The first three lines can be encapsulated in a non-blocking read procedure `brain_read` that lets you do other things while waiting for @@ -281,7 +280,7 @@ promise of future data and `get` retrieves that data and may block if the data is yet to be received. .. code-block:: vhdl - + brain_read(net, address, future); get(net, future, data); @@ -289,7 +288,7 @@ Sometimes you just need a blocking read so a procedure bundling these two should also be provided. .. code-block:: vhdl - + brain_read(net, address, data); The difference between the two `brain_read` procedures is the type of @@ -312,7 +311,7 @@ part of normal emailing. You can make it more like emailing by signing your messages with a `test_sequencer` actor. .. code-block:: vhdl - + msg := new_msg(brain_write_msg, test_sequencer); If you do, all reply messages sent by `brain` will end up in the @@ -325,7 +324,7 @@ it will look for the message in the `brain` outbox. No privacy but we don't really need that. .. code-block:: vhdl - + receive_reply(net, read_msg, reply_msg); Sending Real Emails from within a Testbench @@ -350,7 +349,7 @@ receiver. Instead the message type was pushed/popped to/from the message just like any other message content. This is still possible but not the recommended way of doing it. The newer approach provides better debugging support as described in the `user guide -`__. +`__. .. raw:: html diff --git a/docs/posts/2018_02_12_vunit3/post.rst b/docs/blog/2018_02_12_vunit3.rst similarity index 97% rename from docs/posts/2018_02_12_vunit3/post.rst rename to docs/blog/2018_02_12_vunit3.rst index b6479d802..99aa7f2b5 100644 --- a/docs/posts/2018_02_12_vunit3/post.rst +++ b/docs/blog/2018_02_12_vunit3.rst @@ -1,13 +1,12 @@ -.. post:: Feb 12, 2018 - :tags: VUnit - :author: kraigher, lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: kraigher, lasplund +:image: 1 +:excerpt: 1 VUnit 3.0 ========= -.. figure:: VUnit3.0.png +.. figure:: img/VUnit3.0.png :alt: VUnit 3.0 :align: center @@ -102,7 +101,7 @@ VUnit 3.0 contains a number of logging framework enhancements that goes hand in hand with the verification components. We have improved the log source hierarchy support which allows loggers to be arranged in a tree structure. -.. figure:: logging_hierarchy.png +.. figure:: img/logging_hierarchy.png :alt: logging hierarchy :align: center @@ -143,7 +142,7 @@ tree depicted above and changes the visibilty for debug messages in the vc logge The result is that debug messages from the `main_logger` is hidden while debug messages from the alpha and beta components are visible. -.. figure:: log_output.png +.. figure:: img/log_output.png :alt: log output :align: center diff --git a/docs/posts/2018_03_22_vunit_community_developed_bfms/post.rst b/docs/blog/2018_03_22_vunit_community_developed_bfms.rst similarity index 75% rename from docs/posts/2018_03_22_vunit_community_developed_bfms/post.rst rename to docs/blog/2018_03_22_vunit_community_developed_bfms.rst index 8c9c506cb..98f7a3bc0 100644 --- a/docs/posts/2018_03_22_vunit_community_developed_bfms/post.rst +++ b/docs/blog/2018_03_22_vunit_community_developed_bfms.rst @@ -1,20 +1,17 @@ -.. post:: Mar 22, 2018 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 VUnit Community Developed BFMs ============================== -.. figure:: image.png +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/vunit_wishbone.png :alt: VUnit Community Developed BFMs :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - One month ago we released VUnit 3.0 which was a release focused on our BFM support. Our previous BFM building blocks were extended and further simplified, we added new building blocks, and also a number of diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/post.rst b/docs/blog/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework.rst similarity index 91% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/post.rst rename to docs/blog/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework.rst index 351d51cfb..6f5db5ced 100644 --- a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/post.rst +++ b/docs/blog/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework.rst @@ -1,20 +1,17 @@ -.. post:: Jul 22, 2018 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 Sigasi Deepens Its Commitment to the VUnit Testing Framework ============================================================ -.. figure:: image.png +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/sigasi_deep.png :alt: Sigasi Deepens Its Commitment to the VUnit Testing Framework :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - Sigasi started to support the VUnit testing framework when their Sigasi Studio IDE became VUnit-aware in the 3.6 release. That release introduced a feature to conveniently configure the VUnit library for @@ -26,7 +23,7 @@ integrated such that tests can be executed with a mouse click and verification progress and results can be monitored in a dedicated VUnit view. -.. figure:: vunit_view.png +.. figure:: img/vunit_view.png :alt: VUnit View :align: center @@ -42,7 +39,7 @@ have started to use VUnit to fully automate their `test environments Sigasi Studio is not the first tool to go beyond convenience support and start building on top of VUnit (see `Eksen `__ for an -example) but it +example) but it is the first commercial tool. This follows a line of logical steps I've seen over the years where the first wave of VUnit adopters are individuals spread around the world (see below), the second wave are @@ -50,7 +47,7 @@ the organizations for which these individuals are working, and the third wave are the tool providers acting to support these organizations. -.. figure:: world.png +.. figure:: img/world.png :alt: Global Activity :align: center @@ -67,7 +64,7 @@ announced VUnit 3.0 which was a release focusing on `BFMs and verification components in general `__. -.. figure:: vunit_popularity.png +.. figure:: img/vunit_popularity.png :alt: VUnit Popularity :align: center @@ -82,7 +79,7 @@ when looking for new employees. Universities include VUnit in their `courses `__ and their `textbooks `__. Tool -providers +providers starts adding convenience support like Sigasi did with the VUnit awareness in Sigasi Studio 3.6 or they bundle VUnit with their tools. Bundling is not something we've pursued due to our continuous @@ -104,7 +101,7 @@ like VUnit. It's the open many-to-many discussions that make sure that the tool stays relevant. Close to 150 users have been active on our website in this way: -.. figure:: contributors.png +.. figure:: img/contributors.png :alt: Contributors :align: center @@ -114,13 +111,13 @@ phase is when the users start investing time and money to contribute to the VUnit ecosystem. It can be anything from `fixing documentation typos `__ to `contributing a BFM `__ or something -independent like building +independent like building tools on top of VUnit in the way Sigasi is doing now. A lot of activity in the `pull request area `__ is a sign -that the project has entered this phase. +that the project has entered this phase. -.. figure:: activity.png +.. figure:: img/activity.png :alt: Active Community :align: center @@ -128,12 +125,12 @@ Having many users actively and publicly involved in maintaining and expanding the code base is something we highly encourage as it improves the pace and continuity of the project. So far 27 users have `contributed `__ -to the code. +to the code. That's all for now. If you're interested in a test drive of Sigasi Studio 4.1 I recommend that you have a look at the early preview which can be downloaded from the `Sigasi preview channel -`__. Make sure that you +`__. Make sure that you have the `latest VUnit version installed `__. Currently only VHDL is supported but SystemVerilog support is also planned. This is a release diff --git a/docs/posts/2018_09_22_sigasi_adds_full_vunit_support/post.rst b/docs/blog/2018_09_22_sigasi_adds_full_vunit_support.rst similarity index 74% rename from docs/posts/2018_09_22_sigasi_adds_full_vunit_support/post.rst rename to docs/blog/2018_09_22_sigasi_adds_full_vunit_support.rst index 3079625ac..3d9839194 100644 --- a/docs/posts/2018_09_22_sigasi_adds_full_vunit_support/post.rst +++ b/docs/blog/2018_09_22_sigasi_adds_full_vunit_support.rst @@ -1,20 +1,17 @@ -.. post:: Sep 12, 2018 - :tags: VUnit - :author: lasplund - :image: 1 - :excerpt: 1 +:tags: VUnit +:author: lasplund +:excerpt: 1 Sigasi Adds Full VUnit Support ============================== -.. figure:: image.png +.. NOTE:: This article was originally posted on `LinkedIn `__ + where you may find some comments on its contents. + +.. figure:: img/sigasi_full.png :alt: Sigasi Adds Full VUnit Support :align: center -This article was originally posted on `LinkedIn -`__ -where you may find some comments on its contents. - Some time ago I gave a `preview `__ of this update of Sigasi Studio and diff --git a/docs/blog/2020_08_12_continuous_integration_with_vunit_action_in_10_lines_of_code.rst b/docs/blog/2020_08_12_continuous_integration_with_vunit_action_in_10_lines_of_code.rst new file mode 100644 index 000000000..89c2089cb --- /dev/null +++ b/docs/blog/2020_08_12_continuous_integration_with_vunit_action_in_10_lines_of_code.rst @@ -0,0 +1,119 @@ +:tags: VUnit +:author: lasplund +:excerpt: 1 + +Continuous Integration With VUnit Action in 10 Lines of Code +============================================================ + +.. figure:: img/gha_top_image.png + :align: center + +The other week, `semiengineering.com `__ published an +`article `__ on open-source verification. +It had one, rather obvious, conclusion. + + *Verification is required to answer the question, 'Do you trust the piece of hardware you received?'* + + -- Neil Hand, director of marketing for design verification technology at Mentor, a Siemens Business + +Despite being obvious, IP providers often make it hard to gain that trust. + + *When you buy IP, you usually get a very simple verification environment. This enables you to run a + few demo tests or check configurations. You do not usually get the entire verification environment.* + + -- Olivera Stojanovic, senior verification manager for Vtool + +This is not unique to commercial IPs. Our `study `__ +of VHDL projects on GitHub shows that less than half of all projects provide tests at all, and the trend +is declining (see Figure 1). + +.. figure:: img/repositories_providing_tests.png + :align: center + + Figure 1. Repositories providing tests. + +So, what are the reasons for not providing tests with the IPs? + + *With complex IPs, they don’t want to provide you with the verification environment, which is too + complicated and potentially may provide insights that they might want to keep from you.* + + -- Olivera Stojanovic, senior verification manager for Vtool + +Keeping secrets is not a reason for not providing tests with public projects on GitHub, as everything is +open/public. However, it can be complex to create a user-friendly online verification environment that +clearly shows what has been tested and the status of those tests. Thanks to +`VUnit Action `__ this is now much simpler, as it +provides a continuous integration flow with just 10 lines of code. + +If you're not familiar with VUnit, the following reading will set you up for the VUnit Action described +in the next section. + +1. `Installing VUnit in 1 minute `__ +2. `Compiling your project in 1 minute `__ +3. `Fully automating your testbench with 5 lines of code `__ + +VUnit Action +------------ + +GitHub’s continuous integration/continuous deployment (CI/CD) service is named +`GitHub Actions `__ (GHA). It allows to create automated workflows +for your repositories, which are defined through `YAML `__ files. +Workflows can be triggered by any event, such as push, issue creation or publication of releases. + +GHA provides virtual machines with GNU/Linux (Ubuntu), Windows or macOS. Hence, it is possible to create +a custom CI/CD workflow using bash, powershell, Python etc. However, there are also predefined workflow +tasks named Actions. Some Actions are provided by GitHub +(see `github.com/actions `__), and some are published in the +`GitHub marketplace `__. Nevertheless, any GitHub repository +can contain Actions. + +`VUnit Action `__ is a reusable Action, built on the +`GHDL simulator `__, and available in the marketplace +(`github.com/marketplace/actions/vunit-action `__). +It helps you build a workflow for running your HDL testbenches, and then present the results. + +To use VUnit Action for your project, you need to create a `YAML `__ file +(`some_name.yml`) and place that in a directory named `.github\\workflows` (located directly under your project root), +in the default branch of your repository. The YAML file should contain, at least, the following piece of code. + +.. code-block:: yaml + + name: VUnit Tests + on: + push: + pull_request: + jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: VUnit/vunit_action@v0.1.0 + +Whenever someone pushes code to the project or makes a pull request, this workflow is triggered. First, the code +is checked out using the `checkout action `__. Then, the VUnit +Action is triggered, to run the `run.py` script located in the root of your repository. If the VUnit run script is +located elsewhere, you specify it in the YAML file: + +.. code-block:: yaml + + - uses: VUnit/vunit_action@v0.1.0 + with: + run_file: path/to/vunit_run_script.py + +To build trust with the user community by clearly showing that you have tests up and running, we recommend that +you add a badge/shield to the `README.md` of your project. It will show the latest status of you tests: + +.. code-block:: + + [![](https://github.com///workflows/VUnit%20Tests/badge.svg)](https://github.com///actions) + +Clicking the badge/shield will take you to a list of workflow runs, and then further to the results of those runs: + +.. figure:: img/gha_flow.png + :align: center + + Figure 2. Presenting Test Results. + +The simple solution presented here will get you started and you can read more about the details in our +`documentation `__. Once you have that working there are a +number of extra steps you can take and that will be the topic of the next post on continuous integration. diff --git a/docs/posts/2015_09_24_short_introduction_to_vunit/4x.png b/docs/blog/img/4x.png similarity index 100% rename from docs/posts/2015_09_24_short_introduction_to_vunit/4x.png rename to docs/blog/img/4x.png diff --git a/docs/posts/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl/CPU_load.png b/docs/blog/img/CPU_load.png similarity index 100% rename from docs/posts/2015_12_15_free_and_open_source_verification_with_vunit_and_ghdl/CPU_load.png rename to docs/blog/img/CPU_load.png diff --git a/docs/posts/2018_02_12_vunit3/VUnit3.0.png b/docs/blog/img/VUnit3.0.png similarity index 100% rename from docs/posts/2018_02_12_vunit3/VUnit3.0.png rename to docs/blog/img/VUnit3.0.png diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/activity.png b/docs/blog/img/activity.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/activity.png rename to docs/blog/img/activity.png diff --git a/docs/posts/2016_11_15_vunit_the_best_value_for_initial_effort_part 1/image.jpg b/docs/blog/img/bestvalue1.jpg similarity index 100% rename from docs/posts/2016_11_15_vunit_the_best_value_for_initial_effort_part 1/image.jpg rename to docs/blog/img/bestvalue1.jpg diff --git a/docs/posts/2016_11_16_vunit_the_best_value_for_initial_effort_part 2/image.jpg b/docs/blog/img/bestvalue2.jpg similarity index 100% rename from docs/posts/2016_11_16_vunit_the_best_value_for_initial_effort_part 2/image.jpg rename to docs/blog/img/bestvalue2.jpg diff --git a/docs/posts/2016_11_22_vunit_the_best_value_for_initial_effort_part 3/image.jpg b/docs/blog/img/bestvalue3.jpg similarity index 100% rename from docs/posts/2016_11_22_vunit_the_best_value_for_initial_effort_part 3/image.jpg rename to docs/blog/img/bestvalue3.jpg diff --git a/docs/posts/2016_02_01_website_updates/blogging_chat_room_documentation.png b/docs/blog/img/blogging_chat_room_documentation.png similarity index 100% rename from docs/posts/2016_02_01_website_updates/blogging_chat_room_documentation.png rename to docs/blog/img/blogging_chat_room_documentation.png diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/caesar_encoder.png b/docs/blog/img/caesar_encoder.png similarity index 100% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/caesar_encoder.png rename to docs/blog/img/caesar_encoder.png diff --git a/docs/posts/2017_10_31_vunit_3_0_color_logging/image.jpg b/docs/blog/img/color_logging.jpg similarity index 100% rename from docs/posts/2017_10_31_vunit_3_0_color_logging/image.jpg rename to docs/blog/img/color_logging.jpg diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/contributors.png b/docs/blog/img/contributors.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/contributors.png rename to docs/blog/img/contributors.png diff --git a/docs/blog/img/gha_flow.png b/docs/blog/img/gha_flow.png new file mode 100644 index 000000000..de5de856b Binary files /dev/null and b/docs/blog/img/gha_flow.png differ diff --git a/docs/blog/img/gha_top_image.png b/docs/blog/img/gha_top_image.png new file mode 100644 index 000000000..d54dd8d35 Binary files /dev/null and b/docs/blog/img/gha_top_image.png differ diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log1.jpg b/docs/blog/img/log1.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log1.jpg rename to docs/blog/img/log1.jpg diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log2.jpg b/docs/blog/img/log2.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log2.jpg rename to docs/blog/img/log2.jpg diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log3.jpg b/docs/blog/img/log3.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log3.jpg rename to docs/blog/img/log3.jpg diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log4.jpg b/docs/blog/img/log4.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log4.jpg rename to docs/blog/img/log4.jpg diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log5.jpg b/docs/blog/img/log5.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/log5.jpg rename to docs/blog/img/log5.jpg diff --git a/docs/posts/2017_10_31_vunit_3_0_color_logging/log_example.jpg b/docs/blog/img/log_example.jpg similarity index 100% rename from docs/posts/2017_10_31_vunit_3_0_color_logging/log_example.jpg rename to docs/blog/img/log_example.jpg diff --git a/docs/posts/2018_02_12_vunit3/log_output.png b/docs/blog/img/log_output.png similarity index 100% rename from docs/posts/2018_02_12_vunit3/log_output.png rename to docs/blog/img/log_output.png diff --git a/docs/posts/2018_02_12_vunit3/logging_hierarchy.png b/docs/blog/img/logging_hierarchy.png similarity index 100% rename from docs/posts/2018_02_12_vunit3/logging_hierarchy.png rename to docs/blog/img/logging_hierarchy.png diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/low_throughput.png b/docs/blog/img/low_throughput.png similarity index 100% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/low_throughput.png rename to docs/blog/img/low_throughput.png diff --git a/docs/posts/2017_11_23_vunit_matlab_integration/figure.jpg b/docs/blog/img/matlab_figure.jpg similarity index 100% rename from docs/posts/2017_11_23_vunit_matlab_integration/figure.jpg rename to docs/blog/img/matlab_figure.jpg diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/message_passing_model.png b/docs/blog/img/message_passing_model.png similarity index 100% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/message_passing_model.png rename to docs/blog/img/message_passing_model.png diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/orange_man.jpeg b/docs/blog/img/orange_man.jpeg similarity index 100% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/orange_man.jpeg rename to docs/blog/img/orange_man.jpeg diff --git a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/parallel_recipe.jpg b/docs/blog/img/parallel_recipe.jpg similarity index 100% rename from docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/parallel_recipe.jpg rename to docs/blog/img/parallel_recipe.jpg diff --git a/docs/blog/img/repositories_providing_tests.png b/docs/blog/img/repositories_providing_tests.png new file mode 100644 index 000000000..919e95e58 Binary files /dev/null and b/docs/blog/img/repositories_providing_tests.png differ diff --git a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/sequential_recipe.jpg b/docs/blog/img/sequential_recipe.jpg similarity index 100% rename from docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/sequential_recipe.jpg rename to docs/blog/img/sequential_recipe.jpg diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/image.png b/docs/blog/img/sigasi_deep.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/image.png rename to docs/blog/img/sigasi_deep.png diff --git a/docs/posts/2018_09_22_sigasi_adds_full_vunit_support/image.png b/docs/blog/img/sigasi_full.png similarity index 100% rename from docs/posts/2018_09_22_sigasi_adds_full_vunit_support/image.png rename to docs/blog/img/sigasi_full.png diff --git a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/the_brain.jpg b/docs/blog/img/the_brain.jpg similarity index 100% rename from docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/the_brain.jpg rename to docs/blog/img/the_brain.jpg diff --git a/docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/third_interface.png b/docs/blog/img/third_interface.png similarity index 100% rename from docs/posts/2016_02_21_improving_vhdl_testbench_design_with_message_passing/third_interface.png rename to docs/blog/img/third_interface.png diff --git a/docs/posts/2015_10_08_who_is_using_UVM/uvm.png b/docs/blog/img/uvm.png similarity index 100% rename from docs/posts/2015_10_08_who_is_using_UVM/uvm.png rename to docs/blog/img/uvm.png diff --git a/docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/image.jpg b/docs/blog/img/vunit_emailing.jpg similarity index 100% rename from docs/posts/2017_12_14_vunit_bfms _as_simple_as_emailing/image.jpg rename to docs/blog/img/vunit_emailing.jpg diff --git a/docs/posts/2016_01_26_welcome_to_our_new_website/vunit_github_io.png b/docs/blog/img/vunit_github_io.png similarity index 100% rename from docs/posts/2016_01_26_welcome_to_our_new_website/vunit_github_io.png rename to docs/blog/img/vunit_github_io.png diff --git a/docs/posts/2017_11_23_vunit_matlab_integration/image.jpg b/docs/blog/img/vunit_matlab.jpg similarity index 100% rename from docs/posts/2017_11_23_vunit_matlab_integration/image.jpg rename to docs/blog/img/vunit_matlab.jpg diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/vunit_popularity.png b/docs/blog/img/vunit_popularity.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/vunit_popularity.png rename to docs/blog/img/vunit_popularity.png diff --git a/docs/posts/2017_09_28_sigasi_adds_support_for_vunit_testing_framework/image.jpg b/docs/blog/img/vunit_sigasistudio.jpg similarity index 100% rename from docs/posts/2017_09_28_sigasi_adds_support_for_vunit_testing_framework/image.jpg rename to docs/blog/img/vunit_sigasistudio.jpg diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/vunit_view.png b/docs/blog/img/vunit_view.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/vunit_view.png rename to docs/blog/img/vunit_view.png diff --git a/docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/image.jpg b/docs/blog/img/vunit_waiting.jpg similarity index 100% rename from docs/posts/2017_11_07_vunit_3_0_while_waiting_for_vhdl_2017/image.jpg rename to docs/blog/img/vunit_waiting.jpg diff --git a/docs/posts/2018_03_22_vunit_community_developed_bfms/image.png b/docs/blog/img/vunit_wishbone.png similarity index 100% rename from docs/posts/2018_03_22_vunit_community_developed_bfms/image.png rename to docs/blog/img/vunit_wishbone.png diff --git a/docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/world.png b/docs/blog/img/world.png similarity index 100% rename from docs/posts/2018_07_22_sigasi_deepens_its_commitment_to_the_vunit_testing_framework/world.png rename to docs/blog/img/world.png diff --git a/docs/blog/index.rst b/docs/blog/index.rst new file mode 100644 index 000000000..4dbe37f04 --- /dev/null +++ b/docs/blog/index.rst @@ -0,0 +1,12 @@ +.. _blog: + +Blog +================ +The Blog + +.. toctree:: + :titlesonly: + :glob: + :reversed: + + * \ No newline at end of file diff --git a/docs/ci/container.rst b/docs/ci/container.rst new file mode 100644 index 000000000..2b8d0e716 --- /dev/null +++ b/docs/ci/container.rst @@ -0,0 +1,115 @@ +.. _continuous_integration:container: + +Containers and/or Virtual Machines +################################## + +The 'classic' approach to virtual machines is through tools such as `VirtualBox `_, +`QEMU `_ or `VMware `_. However, for most use cases sharing complete system +images is overkill. Here, `containerization or operating-system-level virtualization `_ +comes into the game. Without going into technical details, containers are a kind of lightweight virtual machines, and the +most known product that uses the technology is `Docker `_. + +.. HINT:: Products such as `Vagrant `_ are meant to simplify the usage of virtual machines and/or + containers by providing a common (black) box approach. In the end, there are enough open/non-open and free/non-free + solutions for each user/company to choose the one that best fits their needs. From the hardware designer point-of-view, + we 'just' need a box (no matter the exact underlying technology) that includes VUnit and a simulator. + +Contributors of project `GHDL `_ provide ready-to-use docker images at `hub.docker.com/u/ghdl/dashboard `_. +Some of these include not only GHDL but also VUnit. Precisely, ``ghdl/vunit:{mcode|llvm|gcc}`` are images based on Debian +Buster image, with GHDL built from the latest commit of the master branch, and the latest release of VUnit installed through +``pip``. ``ghdl/vunit:{mcode|llvm|gcc}-master`` images include the latest commit of VUnit from the master branch. There are +other ready-to-use images with additional tools. For example, ``ghdl/ext`` includes `GTKWave `_. + +As a result, the burden for the adoption of continuous integration for VUnit users is significantly reduced by using +containers; which are available in GNU/Linux, FreeBSD, Windows and macOS, and are supported on most cloud services +(`GitHub Actions `_, `Travis CI `_, +`AWS `_, `Codefresh `_, etc.) or CI frameworks +(`Jenkins `_, `Drone `_, `GitLab Runner `_, etc.). + +For example, script :vunit_file:`examples/vhdl/docker_runall.sh ` shows how to run all the +:ref:`VHDL examples ` on any x86 platform: + +.. code-block:: bash + + docker run --rm \ + -v /$(pwd)://work \ + -w //work \ + ghdl/vunit:llvm-master sh -c ' \ + VUNIT_SIMULATOR=ghdl; \ + for f in $(find ./ -name 'run.py'); do python3 $f; done \ + ' + +where: + +* ``run``: create and start a container. +* ``--rm``: automatically remove the container when it exits. +* ``-v``: bind mount a volume, to share a folder between the host and the container. In this example the current path in the + host is used (``$(pwd)``), and it is bind to `/work` inside the container. Note that both paths must be absolute. +* ``-w``: sets the working directory inside the container, i.e. where the commands we provide as arguments are executed. +* ``ghdl/vunit:llvm-master``: the image we want to create a container from. +* ``sh -c``: the command that is executed as soon as the container is created. + +Note that, the arguments to ``sh -c`` are the same commands that you would execute locally, shall all the dependencies be +installed on the host: + +.. code-block:: bash + + VUNIT_SIMULATOR=ghdl + for f in $(find ./ -name 'run.py'); do python3 $f; done + +.. HINT:: The leading slashes in ``/$(pwd)`` and ``//work`` are only required for the paths to be properly handled in MINGW64 + shells, and are ignored in other shells. See `docker/for-win#1509 `_. + +.. NOTE:: Docker offers two variants Community Edition (CE) and Enterprise Edition (EE). Any of them can be used. Moreover, + part of Docker is being split to `Moby project `_. + + * `Announcing Docker Enterprise Edition `_ + * `Introducing Moby Project: a new open-source project to advance the software containerization movement `_ + +.. HINT:: If you don't want or cannot install docker, you can still use it online. `Play with Docker `_ + (PWD) *"is a Docker playground which allows users to run Docker commands in a matter of seconds. It provides a free Alpine + Linux Virtual Machine in browser, where you can build and run Docker containers and even create clusters"*. + +.. NOTE:: Both GHDL and VUnit are free software. Docker is almost fully open source, but it depends on the host platform. + See `Is Docker still free and open source? `_. + +.. NOTE:: + + * `What is a container `_ + * `What is docker `_ + * `docs.docker.com/engine/reference `_ + * `run `_ + * `commandline/run `_ + +.. _continuous_integration:container:customizing: + +Customizing existing images +*************************** + +All the (automated) flow to generate images in `ghdl/docker `_ is open source and public. +Hence, any user can learn and extend it. However, many users will want to just add a few dependencies to an existing image, +without the hassle of handling credentials to access `hub.docker.com `_. That can be achieved with +a short ``Dockerfile``. For instance: + +.. code-block:: Dockerfile + + FROM ghdl/vunit:llvm-master + + RUN pip install pytest matplotlib + +Then, in the CI workflow: + +.. code-block:: bash + + docker build -t imageName - < path/to/Dockerfile + docker run ... imageName ... + +Packaging non-FLOSS simulators +****************************** + +Although the licenses of most commercial simulators do not allow to share ready-to-use docker images, it is straightforward +to mimic the process for in-house usage. Unlike GHDL, many commercial simulators provide a GUI and/or require a GUI for executing +the installer. In those contexts, `mviereck/x11docker `_ and +`mviereck/runx `_ can be useful. +See `mviereck/x11docker#201 `_. + diff --git a/docs/ci/intro.rst b/docs/ci/intro.rst new file mode 100644 index 000000000..7b4d68425 --- /dev/null +++ b/docs/ci/intro.rst @@ -0,0 +1,28 @@ +.. _continuous_integration: + +Introduction +############ + +Because VUnit features the functionality needed to realize continuous and automated testing of HDL code, it is a very valuable +resource in Continuous Integration (CI) environments. Once a project ``run.py`` has been setup, tests can be run in a headless +environment. Optionally, a standardized `Xunit `_ style output +can be saved to a file; which allows dynamic interpretation of results and avoids custom (and error-prone) parsing of the logs. +After tests have finished running, the ``test_output.xml`` file can be parsed using standard xUnit test parsers such as +`Jenkins xUnit Plugin `_. + +.. code-block:: console + :caption: Execute VUnit tests on CI server with XML output + + python run.py --xunit-xml test_output.xml + +Furthermore, VUnit can be easily executed on many different platforms (either operating systems or architectures), because it +is written in Python, which is an interpreted language. However, besides own HDL sources and VUnit, a +`HDL compiler/simulator `_ is required in order to run the tests. Since +most HDL simulators are written using compiled languages, releases are typically platform specific. Hence, installation +and setup might be non trivial. This is specially so with non-free tools that require license servers to be configured. This +might represent a burden for the adoption of continuous integration in hardware development teams, as it falls into the +category of dev ops. + +Nevertheless, thanks to free and public CI/CD services, along with the striking research about portable development +environment solutions, there are a bunch of alternatives to ease the path. In this section, solutions are grouped in three +categories: :ref:`continuous_integration:script`, :ref:`continuous_integration:container` and :ref:`continuous_integration:manual`. diff --git a/docs/ci/manual.rst b/docs/ci/manual.rst new file mode 100644 index 000000000..c164aaa3e --- /dev/null +++ b/docs/ci/manual.rst @@ -0,0 +1,29 @@ +.. _continuous_integration:manual: + +Manual setup +############ + +Since CI/CD services typically provide full-featured Ubuntu/Debian, Windows and/or macOS environments, regular installation +procedures can be used (see :ref:`installing`). That is, an HDL simulator and Python need to be installed by any means. + +.. IMPORTANT:: When installing the development version of VUnit, remember to install the dependencies (see :vunit_file:`requirements.txt`). + +Due to the single supported open source simulator being GHDL, most users on GitHub are likely to install it along with VUnit. +There are six possible procedures to setup GHDL: + +* `ghdl.rtfd.io: Releases and sources `_: + + * Use a package manager, such as ``apt`` or ``pacman``. + + * Get and extract a tarball/zipfile from the *latest stable* release: `github.com/ghdl/ghdl/releases/latest `_. + + * Get and extract a tarball/zipfile from the *nightly* pre-release: `github.com/ghdl/ghdl/releases/nightly `_. + + * (On GitHub Actions only) Use Action `ghdl/setup-ghdl-ci `_. + +* Use one of the Docker/OCI images provided in `ghdl/docker `_. + +* Build it from sources: `ghdl.rtfd.io: Building GHDL from Sources `_. + +.. HINT:: Since building GHDL each time is time-consuming, it is recommented to use pre-built tarballs/zipfiles or Docker/OCI + images. Images/containers usually provide the fastest startup time, because all the dependencies can be pre-installed already. diff --git a/docs/ci/script.rst b/docs/ci/script.rst new file mode 100644 index 000000000..db100838a --- /dev/null +++ b/docs/ci/script.rst @@ -0,0 +1,116 @@ +.. _continuous_integration:script: + +Setup/configuration scripts +########################### + +Keeping testing environments up to date with rapidly evolving tools can be time-consuming and can lead to frustration. In this +section, scripts and configuration tools to automate the setup and/or configuration of simulators and VUnit are presented. + +.. _continuous_integration:gha: + +GitHub Actions +************** + +GitHub's CI/CD service is named `GitHub Actions `_ (GHA). It allows to create automated +*workflows* for your repositories, which are defined through `YAML `_ files. Workflows +can be triggered by any event, such as push, issue creation or publication of releases. + +GHA provides virtual machines with GNU/Linux (Ubuntu), Windows or macOS. Hence, it is possible to write the steps/tasks using +the default shells/terminals (*bash*, *powershell*, etc.), as in any other CI/CD service. By the same token, any language can +be used (Python, JavaScript, Ruby, Go, Rust, etc.). However, there are also predefined tasks named *Actions*. Those are +written either in *JavaScript/TypeScript* (for any OS) or packaged in a *Container Action* (GNU/Linux only). Some Actions are +provided by GitHub (see `github.com/actions `_), and some are published in the +`GitHub marketplace `_. Nevertheless, any GitHub repository can contain *Actions*. + +Hence, the recommended procedure to create workflows is to pick and reuse existing Actions +(see `docs.github.com/actions `_). + +Anyway, when further customization is required, the procedures explained in sections *Containers and/or Virtual Machines* and/or +*Manual setup* can also be used in GHA workflows. + +.. NOTE:: Implementation differences between *JavaScript* and *Container* actions can be found at + `docs.github.com/actions/creating-actions `_. + +.. NOTE:: GitHub Actions is free (as in *free beer*) for public (open source) repositories. For private repositories, + 2000-3000 minutes are included per month. See section "*Simple, pay-as-you-go pricing*" at `GitHub Actions `_. + +.. _continuous_integration:gha:vunit: + +VUnit Action +============ + +`VUnit Action `_ is a reusable *Action*, which is published in the marketplace +(`github.com/marketplace/actions/vunit-action `_). It helps you build a +workflow for running your HDL testbenches, and then present the results. + +To use *VUnit Action* for your project, you need to create a `YAML `_ file (``some_name.yml``) +and place that in a directory named ``.github\workflows`` (located directly under your project root), in the default branch +of your repository. The YAML file should contain, at least, the following piece of code. + +.. code-block:: yaml + + name: VUnit Tests + + on: + push: + pull_request: + + jobs: + + test: + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v2 + + - uses: VUnit/vunit_action@v0.1.0 + +.. IMPORTANT:: Currently, VUnit Action is implemented as a *Container Action*. As a result, tests are executed in a Docker/OCI + container, regardless of the CI/CD *host* being `ubuntu-latest`. + +Whenever someone pushes code to the project or makes a pull request, this workflow is triggered. First, the code is checked +out using the `checkout action `_. Then, the *VUnit Action* is triggered, +to run the ``run.py`` script located in the root of your repository. If the VUnit run script is located elsewhere, you specify +it in the YAML file: + +.. code-block:: yaml + + - uses: VUnit/vunit_action@v0.1.0 + with: + run_file: path/to/vunit_run_script.py + +To build trust with the user community by clearly showing that you have tests up and running, we recommend that you add a +badge/shield to the README of your project. It will show the latest status of you tests: + +.. code-block:: md + + [![](https://github.com///workflows/VUnit%20Tests/badge.svg)](https://github.com///actions) + +.. HINT:: `shields.io `_ is another badge/shield provider which allows customizing some characteristics, + such as shape, color, labels, icons, etc. When combining shields corresponding to different services, it is suggested to + use *shields.io* in order to get an homogeneous result. Moreover, *shields.io* provides ready-to-copy snippets for multiple + languages (Markdown, reStructuredText, HTML, etc.). + +Clicking the badge/shield will take you to a list of workflow runs, and then further to the results of those runs: + +.. figure:: ../blog/img/gha_flow.png + :align: center + :alt: Presenting GHA test results. + + Presenting GHA test results. + +Self-hosted runners +=================== + +By default, GitHub Actions workflows are executed on GitHub's servers. However, it is possible to setup so-called +*self-hosted runners*. Those are machines owned by users/developers/organizations/companies, where a client service is +executed. Then, users can assign specific workflows to be executed on self-hosted runners. See `docs.github.com/actions/hosting-your-own-runners `_. + +As explained in `docs.github.com/actions/hosting-your-own-runners: Self-hosted runner security with public repositories `_, +it is strongly discouraged to use self-hosted runners with public repositorites, in order to avoid PRs executing potentially +dangerous code. That is mainly because self-hosted runners have access to the tools available on the host. Yet, for that same +reason, using self-hosted runners is a suitable solution for having CI with non-FLOSS simulators. + +.. IMPORTANT:: VUnit is currently tested in CI with GHDL only. Specific companies provide a limited set of licenses for + non-FLOSS simulators, which some developers can use locally. Ideally, companies interested in supporting VUnit would + provide a machine to serve a self-hosted runner in a private fork. If you want to contribute, get in touch! diff --git a/docs/ci/usecases.rst b/docs/ci/usecases.rst new file mode 100644 index 000000000..78dc547ce --- /dev/null +++ b/docs/ci/usecases.rst @@ -0,0 +1,66 @@ +.. _continuous_integration:usecases: + +Practical use cases +################### + +Workflow `tests.yml `_ from repo `VUnit/tdd-intro `_ +showcases five procedures to setup continuous integration in GitHub Actions, using GHDL and VUnit as a regression framework. +The entrypoint to all the jobs is the same `pytest `_ script (`test.py `_), +thus, all jobs are equivalent solutions. Tests called through pytest can be defined in any language: VUnit run.py scripts, +bash/shell scripts, makefiles, etc. + +It is suggested for new users to clone/fork this template repository, and then remove the jobs they don't want to use. Since +all are equivalent, using a single job is enough to have HDL designs tested. However, it might be useful to have designs +tested on different platforms. + +lin-vunit +********* + +Uses *Docker Action* `VUnit/vunit_action `_, based on image ``ghdl/vunit:llvm`` (see +`ghdl/docker: VUnit `_). It takes a +single optional argument: the path to the ``run.py``. See `VUnit/vunit_action: README.md `_ +for further info. + +This is the most straightforward solution, and the one with fastest startup. + +lin-docker +********** + +Docker based job, which can be used in any CI system. An (optional) `Dockerfile `_ is used to add some packages on top of image ``ghdl/vunit:llvm`` (see :ref:`continuous_integration:container:customizing`). However, the same procedure can be used with any other image. + +This is equivalent to *lin-vunit*, but it is slightly more verbose. + +lin-setup +********* + +Uses *JavaScript Action* `ghdl/setup-ghdl-ci `_ to install GHDL on the Ubuntu host/VM. +Then, additional system packages and Python packages are installed explicitly. + +Compared to previous approaches, in this case runtime dependencies are not pre-installed. As a result, startup is slightly +slower. + +win-setup +********* + +Uses Actions `ghdl/setup-ghdl-ci `_ and `msys2/setup-msys2 `_ +to install latest *nightly* GHDL, other MSYS2 packages and Python packages in a *clean* MINGW64 environment. + +This is the recommended approach to run tests on Windows. Action setup-msys2 caches installed packages/dependencies +automatically. + +win-stable +********** + +The *traditional* procedure of downloading a tarball/zipfile from GHDL's latest *stable* release. Additional Python packages +are installed explicitly. + +This is more verbose than the previous approach, but it's currently the only solution to use latest *stable* GHDL without +building it from sources. + +Repositories using VUnit for CI +******************************* + +This is a non-exhaustive list of projects where VUnit is used for testing HDL designs: + +* `VUnit/vunit `_ +* `ghdl/ghdl-cosim `_ diff --git a/docs/cli.rst b/docs/cli.rst index 81799dda8..c326a7739 100644 --- a/docs/cli.rst +++ b/docs/cli.rst @@ -2,6 +2,7 @@ Command Line Interface ====================== + A :class:`VUnit ` object can be created from command line arguments by using the :meth:`from_argv ` method effectively creating a custom @@ -194,87 +195,6 @@ Test Output Path Length Environment Variables output path margin on Windows. By default the test output path is shortened to allow a 100 character margin. -.. _continuous_integration: - -Continuous Integration (CI) Environment ---------------------------------------- - -Because VUnit features the functionality needed to realize continuous and automated testing of HDL code, it is a very valuable resource in continuous integration environments. Once a project ``run.py`` has been setup, tests can be run in a headless environment with standardized `Xunit `_ style output to a file; which allows dynamic interpretation of results avoiding custom (and error-prone) parsing of the logs. - -.. code-block:: console - :caption: Execute VUnit tests on CI server with XML output - - python run.py --xunit-xml test_output.xml - -After tests have finished running, the ``test_output.xml`` file can be parsed -using standard xUnit test parsers such as `Jenkins xUnit Plugin `_. - -Furthermore, VUnit can be easily executed in many different platforms (either operating systems or architectures), because it is written in Python, which is an interpreted language. However, besides the sources and VUnit, a `HDL compiler/simulator `_ is required in order to run the tests. Due to performance, all the HDL simulators are written in compiled languages, which makes the releases platform specific. I.e., each simulator needs to be specifically compiled for a given architecture and operating system. This might represent a burden for the adoption of continuous integration in hardware development teams, as it falls into the category of dev ops. - -Nevertheless, thanks to the striking research about portable development environment solutions in the last decade, there are a bunch of alternatives to ease the path. The 'classic' approach is to use virtual machines with tools -such as `VirtualBox `_, `QEMU `_ or `VMware `_. This is still an option, but for most use cases sharing complete system images is overkill. Here, `containerization or operating-system-level virtualization `_ comes into the game. Without going into technical details, containers are a kind of lightweight virtual machines, and the most known product that uses such a technology is `Docker `_. Indeed, products such as `Vagrant `_ are meant to simplify the usage of virtual machines and/or containers by providing a common (black) box approach. In the end, there are enough open/non-open and free/non-free solutions for each user/company to choose the one that best fits their needs. From the hardware designer point-of-view, we 'just' need a box (no matter the exact underlying technology) that includes VUnit and a simulator. - -Fortunately, contributors of project `GHDL `_ provide ready-to-use docker images at `hub.docker.com/u/ghdl/dashboard `_. Some of these include not only GHDL but also VUnit: - -* ``ghdl/ext:vunit``: Debian Stretch image with GHDL built from the latest commit of the master branch, and the latest release of VUnit installed through ``pip``. -* ``ghdl/ext:vunit-master``: Debian Stretch with GHDL built from the latest commit of the master branch, and the latest commit of VUnit from the master branch. - -As a result, the burden for the adoption of continuous integration for VUnit users is reduced to using docker; which is available in GNU/Linux, FreeBSD, Windows and macOS, and is supported in most cloud services (`Travis CI `_, `AWS `_, `Codefresh `_, etc.) or CI frameworks (`Jenkins `_, `Drone `_, `GitLab Runner `_, etc.). - -For example, script :vunit_file:`examples/vhdl/docker_runall.sh ` shows how to run all the VHDL examples in any x86 platform: - -.. code-block:: bash - - docker run --rm -t \ - -v /$(pwd)://work \ - -w //work \ - ghdl/ext:vunit-master sh -c ' \ - VUNIT_SIMULATOR=ghdl; \ - for f in $(find ./ -name 'run.py'); do python3 $f; done \ - ' - -where: - -* ``run``: create and start a container. -* ``--rm``: automatically remove the container when it exits. -* ``-t``: allocate a pseudo-TTY, to get the stdout of the container forwarded. -* ``-v``: bind mount a volume, to share a folder between the host and the container. In this example the current path in the host is used (``$(pwd)``), and it is bind to `/work` inside the container. Note that both paths must be absolute. -* ``-w``: sets the working directory inside the container, i.e. where the commands we provide as arguments are executed. -* ``ghdl/ext:vunit-master``: the image we want to create a container from. -* ``sh -c``: the command that is executed as soon as the container is created. - -Note that: - -* The arguments to ``sh -c`` are the same commands that you would execute locally, shall all the dependencies be installed in the host: - - .. code-block:: bash - - VUNIT_SIMULATOR=ghdl - for f in $(find ./ -name 'run.py'); do python3 $f; done - -* The leading slashes in ``/$(pwd)`` and ``//work`` are only required for the paths to be properly handled in MINGW shells, and are ignored in other shells. See `docker/for-win#1509 `_. - -Final comments: - -* All the (automated) flow to generate ``ghdl`` docker images is open source and public, in order to let any user learn and extend it. You can easily replicate it to build you own images with other development dependencies you use. - * There are ready-to-use images available with additional tools on top of GHDL and VUnit. For example, ``ghdl/ext:vunit-gtkwave`` includes `GTKWave `_. -* Although the licenses of most commercial simulators do not allow to share ready-to-use docker images, it is straightforward to mimic the process. - * If the installation of a tool needs to be executed with a GUI, a slightly different approach is required. See `Propietary applications inside a docker container `_ -* Both GHDL and VUnit are free software. Docker is almost fully open source, but this depends on the host platform. See `Is Docker still free and open source? `_. - -Further info: - -* `What is a container `_ -* `What is docker `_ -* `docs.docker.com/engine/reference `_ - * `run `_ - * `commandline/run `_ -* Docker offers two variants Community Edition (CE) and Enterprise Edition (EE). Any of them can be used. Moreover, part of Docker is being split to `Moby project `_. - * `Announcing Docker Enterprise Edition `_ - * `Introducing Moby Project: a new open-source project to advance the software containerization movement `_ -* If you don't want or cannot install docker, you can still use it online. `Play with Docker `_ (PWD) *"is a Docker playground which allows users to run Docker commands in a matter of seconds. It gives the experience of having a free Alpine Linux Virtual Machine in browser, where you can build and run Docker containers and even create clusters"*. - - .. _json_export: JSON Export diff --git a/docs/conf.py b/docs/conf.py index 64c6eb1cb..2de32d8bf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,360 +1,86 @@ # -*- coding: utf-8 -*- -# vunit build configuration file, created by -# `ablog start` on Fri Jan 29 21:15:19 2016. -# -# Note that not all possible configuration values are present in this file. -# All configuration values have a default; values that are commented out -# serve to show the default. - import os import sys -import ablog -import alabaster -from os.path import join - -# -- General ABlog Options ---------------------------------------------------- - -# A path relative to the configuration directory for blog archive pages. -# blog_path = 'blog' - -# The “title” for the blog, used in acthive pages. Default is ``'Blog'``. -blog_title = u"VUnit Blog" - -# Base URL for the website, required for generating feeds. -# e.g. blog_baseurl = "http://example.com/" -blog_baseurl = "http://vunit.github.io" - -# Choose to archive only post titles. Archiving only titles can speed -# up project building. -# blog_archive_titles = False - -# -- Blog Authors, Languages, and Locations ----------------------------------- - -# A dictionary of author names mapping to author full display names and -# links. Dictionary keys are what should be used in ``post`` directive -# to refer to the author. Default is ``{}``. -blog_authors = {"Olof Kraigher": ("kraigher", None), "Lars Asplund": ("lasplund", None)} - - -# A dictionary of language code names mapping to full display names and -# links of these languages. Similar to :confval:`blog_authors`, dictionary -# keys should be used in ``post`` directive to refer to the locations. -# Default is ``{}``. -# blog_languages = { -# 'en': ('English', None), -# } - - -# A dictionary of location names mapping to full display names and -# links of these locations. Similar to :confval:`blog_authors`, dictionary -# keys should be used in ``post`` directive to refer to the locations. -# Default is ``{}``. -# blog_locations = { -# 'Earth': ('The Blue Planet', 'http://en.wikipedia.org/wiki/Earth), -# } - - -# -- Blog Post Related -------------------------------------------------------- - -# post_date_format = '%b %d, %Y' - - -# Number of paragraphs (default is ``1``) that will be displayed as an excerpt -# from the post. Setting this ``0`` will result in displaying no post excerpt -# in archive pages. This option can be set on a per post basis using -# post_auto_excerpt = 1 - -# Index of the image that will be displayed in the excerpt of the post. -# Default is ``0``, meaning no image. Setting this to ``1`` will include -# the first image, when available, to the excerpt. This option can be set -# on a per post basis using :rst:dir:`post` directive option ``image``. -# post_auto_image = 0 - -# Number of seconds (default is ``5``) that a redirect page waits before -# refreshing the page to redirect to the post. -# post_redirect_refresh = 5 - -# When ``True``, post title and excerpt is always taken from the section that -# contains the :rst:dir:`post` directive, instead of the document. This is the -# behavior when :rst:dir:`post` is used multiple times in a document. Default -# is ``False``. -# post_always_section = False - -# -- ABlog Sidebars ------------------------------------------------------- - -# There are seven sidebars you can include in your HTML output. -# postcard.html provides information regarding the current post. -# recentposts.html lists most recent five posts. Others provide -# a link to a archive pages generated for each tag, category, and year. -# In addition, there are authors.html, languages.html, and locations.html -# sidebars that link to author and location archive pages. -html_sidebars = { - "**": [ - "about.html", - "quicklinks.html", - "postcard.html", - "navigation.html", - "recentposts.html", - "tagcloud.html", - "categories.html", - "archives.html", - "searchbox.html", - ] -} - -# -- Blog Feed Options -------------------------------------------------------- - -# Turn feeds by setting :confval:`blog_baseurl` configuration variable. -# Choose to create feeds per author, location, tag, category, and year, -# default is ``False``. -# blog_feed_archives = False - -# Choose to display full text in blog feeds, default is ``False``. -# blog_feed_fulltext = False - -# Blog feed subtitle, default is ``None``. -# blog_feed_subtitle = None - -# Choose to feed only post titles, default is ``False``. -# blog_feed_titles = False - -# Specify number of recent posts to include in feeds, default is ``None`` -# for all posts. -# blog_feed_length = None - -# -- Font-Awesome Options ----------------------------------------------------- - -# ABlog templates will use of Font Awesome icons if one of the following -# is ``True`` - -# Link to `Font Awesome`_ at `Bootstrap CDN`_ and use icons in sidebars -# and post footers. Default: ``False`` -fontawesome_link_cdn = ( - "https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css" -) - -# Sphinx_ theme already links to `Font Awesome`_. Default: ``False`` -# fontawesome_included = False - -# Alternatively, you can provide the path to `Font Awesome`_ :file:`.css` -# with the configuration option: fontawesome_css_file -# Path to `Font Awesome`_ :file:`.css` (default is ``None``) that will -# be linked to in HTML output by ABlog. -# fontawesome_css_file = None - -# -- Disqus Integration ------------------------------------------------------- - -# You can enable Disqus_ by setting ``disqus_shortname`` variable. -# Disqus_ short name for the blog. -disqus_shortname = "vunitframework" - -# Choose to disqus pages that are not posts, default is ``False``. -# disqus_pages = False - -# Choose to disqus posts that are drafts (without a published date), -# default is ``False``. -# disqus_drafts = False +from pathlib import Path # -- Sphinx Options ----------------------------------------------------------- # If your project needs a minimal Sphinx version, state it here. -needs_sphinx = "1.2" +needs_sphinx = "3.0" -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.extlinks", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinxarg.ext", # Automatic argparse command line argument documentation - "alabaster", - "ablog", ] -autodoc_default_flags = ["members"] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates", ablog.get_html_templates_path()] +autodoc_default_options = { + "members": True, +} # The suffix(es) of source filenames. -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' +source_suffix = { + ".rst": "restructuredtext", + # '.txt': 'markdown', + # '.md': 'markdown', +} -# The master toctree document. master_doc = "index" -# General information about the project. project = u"VUnit" -copyright = u"2014-2018, Lars Asplund" -author = u"lasplund" +copyright = u"2014-2020, Lars Asplund" +author = u"LarsAsplund, kraigher and contributors" -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. version = "" -# The full version, including alpha/beta/rc tags. release = "" -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. language = None -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. exclude_patterns = ["release_notes/*.*"] -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False - # -- Options for HTML output ---------------------------------------------- -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" +html_theme_path = ["."] +html_theme = "_theme" -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. html_theme_options = { - "github_button": True, - "github_type": "star", + "analytics_id": "UA-112393863-1", + "logo_only": True, + "vcs_pageview_mode": "blob", + "style_nav_header_background": "#0c479d", + "home_breadcrumbs": False, +} +html_context = { + "conf_py_path": "%s/" % Path(__file__).parent.name, + "display_github": True, "github_user": "VUnit", "github_repo": "vunit", - "description": "A test framework for HDL", - "logo": "VUnit_logo_420x420.png", - "logo_name": True, - "travis_button": True, - "page_width": "75%", + "github_version": "master/", } -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = [alabaster.get_path()] - -html_context = {"css_files": ["_static/style.css"]} -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -html_favicon = join(html_static_path[0], "vunit.ico") - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True +html_logo = str(Path(html_static_path[0]) / "VUnit_logo_175x175.png") -# If true, the index is split into individual pages for each letter. -# html_split_index = False +html_favicon = str(Path(html_static_path[0]) / "vunit.ico") -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' +# Output file base name for HTML help builder. +htmlhelp_basename = "VUnitDoc" -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} +# -- InterSphinx ---------------------------------------------------------- -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' +intersphinx_mapping = { + "python": ("https://docs.python.org/3.8/", None), +} -# Output file base name for HTML help builder. -htmlhelp_basename = "vunitdoc" +# -- ExtLinks ------------------------------------------------------------- extlinks = { "vunit_example": ("https://github.com/VUnit/vunit/tree/master/examples/%s/", ""), diff --git a/docs/contributing.rst b/docs/contributing.rst index 6d100fb07..674ea752e 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -24,7 +24,7 @@ Copyright is given by adding the copyright notice to the beginning of each file. # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # - # Copyright (c) 2014-2018, Lars Asplund lars.anders.asplund@gmail.com + # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com Python related @@ -132,9 +132,10 @@ style. For example :vunit_file:`examples/vhdl/uart/src/uart_tx.vhd` Continous Integration --------------------- -VUnit runs all test and lint checks on both Windows using AppVeyor and -Linux using Travis CI with several versions of Python. GHDL is used to -run the VHDL tests of all our libraries and examples. +VUnit runs all test and lint checks on both GNU/Linux and Windows +with several versions of Python (typically, the oldest and newest +supported by both VUnit and the CI environment). `GHDL `_ +is used to run the VHDL tests of all our libraries and examples. All tests will be automatically run on any pull request and they are expected to pass for us to approve the merge. @@ -204,7 +205,7 @@ To create a new tagged release commit: commits have to be merged into origin/master. -Travic CI makes a release by uploading a new package to PyPI when a tag +The CI service makes a release by uploading a new package to PyPI when a tag named ``vX.Y.Z`` is found in Git. A new release will not be made if: - The ``X.Y.Z`` release is already on PyPI. diff --git a/docs/data_types/cosim.png b/docs/data_types/cosim.png new file mode 100644 index 000000000..87eadc889 Binary files /dev/null and b/docs/data_types/cosim.png differ diff --git a/docs/data_types/ext_integer_vector.rst b/docs/data_types/ext_integer_vector.rst new file mode 100644 index 000000000..3f8a5967b --- /dev/null +++ b/docs/data_types/ext_integer_vector.rst @@ -0,0 +1,7 @@ +.. _ext_integer_vector_pkg: + +*external integer vector* package +--------------------------------- +.. literalinclude:: ../../vunit/vhdl/data_types/src/api/external_integer_vector_pkg.vhd + :language: vhdl + :lines: 7- diff --git a/docs/data_types/ext_string.rst b/docs/data_types/ext_string.rst new file mode 100644 index 000000000..0f5216d2c --- /dev/null +++ b/docs/data_types/ext_string.rst @@ -0,0 +1,7 @@ +.. _ext_string_pkg: + +*external string* package +------------------------- +.. literalinclude:: ../../vunit/vhdl/data_types/src/api/external_string_pkg.vhd + :language: vhdl + :lines: 7- \ No newline at end of file diff --git a/docs/data_types/external_api.rst b/docs/data_types/external_api.rst new file mode 100644 index 000000000..f3f467e3d --- /dev/null +++ b/docs/data_types/external_api.rst @@ -0,0 +1,26 @@ +.. figure:: cosim.png + :alt: Interfacing VHDL and foreign languages with VUnit + :align: center + +In version 4.3.0 the prototype of an external VHDL API was introduced. This experimental feature exposes a subset of resources from some of VUnit's internal data types. Creation functions for these data types accept two new optional parameters: + +* **mode**: selects between *internal* (default), external access (*extacc*) or external function (*extfnc*). +* **eid**: when the selected mode is external, this parameter is to be used between VHDL and the foreign language as a unique identifier for the vector. + +In mode *extacc*, VHDL retrieves a pointer to an already allocated buffer, and then data is directly read/written from VHDL. Hence, it is required for the pointer, that must be allocated externally, to be accesible to both the VHDL and the foreign language. The expected use case for this mode is to wrap the VHDL simulation in C/C++ or Python, so that the simulation is a child of the main process. + +Conversely, mode *extfnc* is to be used when data is not available in the same memory space where the VHDL simulation is executed. Each time a value needs to be read/written, a function callback is executed (``read_*``/``write_*``), providing ``eid`` as an argument. + +List of types that are currently in the external VHDL API: + +* **string_ptr**, and **byte_vector_ptr** as an alias (:ref:`External string API `) +* **integer_vector_ptr** (:ref:`External integer vector API `) + +.. important:: By default, bodies of the external API functions/procedures include forced failure assertions. Hence, using ``mode/=internal`` without providing a *bridge* to a foreign language will make tests fail. Bridges must be provided through `vu.add_builtins(external=)`, where `` defaults to ``{"string": False, "integer_vector": False}``. Each field should contain a list of VHDL files to replace the *dummy* default. See `VUnit/cosim `_ for reference implementations of bridges and examples. + +.. toctree:: + :hidden: + + ext_string + ext_integer_vector + diff --git a/docs/data_types/integer_array.rst b/docs/data_types/integer_array.rst new file mode 100644 index 000000000..24da1eb42 --- /dev/null +++ b/docs/data_types/integer_array.rst @@ -0,0 +1,8 @@ +.. _integer_array_pkg: + +*integer_array* package +----------------------- + +.. literalinclude:: ../../vunit/vhdl/data_types/src/integer_array_pkg.vhd + :language: vhdl + :lines: 7- \ No newline at end of file diff --git a/docs/data_types/queue.rst b/docs/data_types/queue.rst new file mode 100644 index 000000000..cbec0bc67 --- /dev/null +++ b/docs/data_types/queue.rst @@ -0,0 +1,8 @@ +.. _queue_pkg: + +*queue* package +--------------- + +.. literalinclude:: ../../vunit/vhdl/data_types/src/queue_pkg.vhd + :language: vhdl + :lines: 7- \ No newline at end of file diff --git a/docs/data_types/user_guide.rst b/docs/data_types/user_guide.rst index f952e9a22..017812f00 100644 --- a/docs/data_types/user_guide.rst +++ b/docs/data_types/user_guide.rst @@ -1,35 +1,30 @@ .. _data_types_library: Data Types -========== +########## -Introduction ------------- VUnit comes with a number of convenient data types included: -:queue_t: A queue (fifo) which to which any VHDL primitive data type - can be pushed and popped by serializing the data to bytes - internally. This queue can be used to for example push - expected data from a driver process to a checker process in - a test bench. :ref:`Queue API ` +* **queue_t** (:ref:`Queue API `) + Queue (FIFO) to which any VHDL primitive data type can be pushed and + popped (by serializing data to bytes internally). This queue can be + used to, for example, push expected data from a driver process to a + checker process in a test bench. -:integer_array_t: An dynamic array of integers in up to 3 dimensions. - Supports dynamic append and reshape operations. - Supports reading and writing data to/from *.csv* or *.raw* byte files. - :ref:`Integer array API ` +* **integer_array_t** (:ref:`Integer array API `) + Dynamic array of integers in up to 3 dimensions. Supports dynamic + append and reshape operations, and reading/writing data to/from + *.csv* or *.raw* byte files. -.. _queue_pkg: +.. toctree:: + :hidden: -queue package -------------- -.. literalinclude:: ../../vunit/vhdl/data_types/src/queue_pkg.vhd - :language: vhdl - :lines: 7- + queue + integer_array -.. _integer_array_pkg: +.. _data_types_library:external: -integer_array package ----------------------- -.. literalinclude:: ../../vunit/vhdl/data_types/src/integer_array_pkg.vhd - :language: vhdl - :lines: 7- +External VHDL API +================= + +.. include:: external_api.rst diff --git a/docs/documentation.rst b/docs/documentation.rst deleted file mode 100644 index e4255abc2..000000000 --- a/docs/documentation.rst +++ /dev/null @@ -1,12 +0,0 @@ -Documentation -============= -.. toctree:: - :maxdepth: 2 - - user_guide - cli - py/ui - vhdl_libraries - examples - -* :ref:`genindex` diff --git a/docs/genindex.rst b/docs/genindex.rst new file mode 100644 index 000000000..398e88470 --- /dev/null +++ b/docs/genindex.rst @@ -0,0 +1,6 @@ +.. # This file is a placeholder and will be replaced + +.. _genindex: + +Index +##### diff --git a/docs/index.rst b/docs/index.rst index 624727327..f5d25eb0d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,42 +1,67 @@ -.. vunit index file, created by `ablog start` on Fri Jan 29 21:15:19 2016. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. +.. centered:: |shieldRepo|_ |shieldPyPI|_ |shieldGitter|_ |shieldTwitter|_ + +.. |shieldRepo| image:: https://img.shields.io/badge/VUnit/vunit-0c479d.svg?longCache=true&style=flat-square&logo=github +.. _shieldRepo: https://github.com/VUnit/vunit + +.. |shieldPyPI| image:: https://img.shields.io/pypi/v/vunit_hdl?longCache=true&style=flat-square&label=PyPI&logo=PyPI&logoColor=FFF +.. _shieldPyPI: https://pypi.org/project/vunit-hdl/ + +.. |shieldGitter| image:: https://img.shields.io/gitter/room/VUnit/vunit.svg?longCache=true&style=flat-square&logo=gitter&logoColor=4db797&color=4db797 +.. _shieldGitter: https://gitter.im/VUnit/vunit + +.. |shieldTwitter| image:: https://img.shields.io/twitter/follow/VUnitFramework.svg?longCache=true&style=flat-square&color=1DA1F2&label=%40VUnitFramework&logo=twitter&logoColor=fff +.. _shieldTwitter: https://www.twitter.com/VUnitFramework + +VUnit: a test framework for HDL +=============================== -VUnit -===== VUnit is an open source unit testing framework for VHDL/SystemVerilog released under the terms of Mozilla Public License, v. 2.0. It features the functionality needed to realize continuous and automated testing of your HDL code. VUnit doesn't replace but rather complements -traditional testing methodologies by supporting a "test early and -often" approach through automation. :ref:`Read more ` - -.. image:: vunit_demo.gif +traditional testing methodologies by supporting a *"test early and +often"* approach through automation. :ref:`Read more ` -Latest Posts ------------- +.. image:: _static/vunit_demo.gif -.. postlist:: 5 - :date: %B %d, %Y - :format: {title} by {author} on {date} - :list-style: disk - :excerpts: - - -.. `toctree` directive, below, contains list of non-post `.rst` files. - This is how they appear in Navigation sidebar. Note that directive - also contains `:hidden:` option so that it is not included inside the page. +.. toctree:: + :hidden: - Posts are excluded from this directive so that they aren't double listed - in the sidebar both under Navigation and Recent Posts. + blog/index .. toctree:: + :caption: About :hidden: about installing - documentation testimonials/testimonials - contributing + +.. toctree:: + :caption: Documentation + :hidden: + + user_guide + cli + py/ui + vhdl_libraries + examples + +.. toctree:: + :caption: Continuous Integration + :hidden: + + ci/intro + ci/script + ci/container + ci/manual + ci/usecases + +.. toctree:: + :caption: Appendix + :hidden: + release_notes + contributing + genindex diff --git a/docs/installing.rst b/docs/installing.rst index 6c4750922..c18982b93 100644 --- a/docs/installing.rst +++ b/docs/installing.rst @@ -2,6 +2,50 @@ Installing ========== + +Requirements +------------ + +VUnit supports VHDL (93, 2002, 2008 and 2019), Verilog and (experimentally) SystemVerilog; and it is known to work on +GNU/Linux, Windows and Mac OS; on x86, x64, armv7 and aarch64. Full VUnit functionality requires Python (3.6 or higher) and +a simulator supported by the VUnit Python test runner (see list below). However, VUnit can run with limited functionality +entirely within VHDL using the :doc:`VHDL test runner <./run/user_guide>`. + +Simulators: + +.. admonition:: Only VHDL + + - `Aldec Riviera-PRO`_: Tested with Riviera-PRO 2015.06, 2015.10, 2016.02, 2016.10 (x64/x86). + - `Aldec Active-HDL`_: Tested with Active-HDL 9.3, 10.1, 10.2, 10.3 (x64/x86) + - `GHDL`_ + + - Works with versions >= 0.33 + - Tested with LLVM and mcode backends, gcc backend might work aswell. + - Integrated support for using `GTKWave`_ to view waveforms. + +.. admonition:: VHDL or SystemVerilog + + - `Mentor Graphics ModelSim/Questa`_: Tested with 10.1 - 10.5 + +.. CAUTION:: + + - `Cadence Incisive`_ (**Experimental**) + + - Community contribution by `Colin Marquardt `_. + VUnit maintainers do not have access to this simulator to verify the functionality. + + - Run ``incisive_vhdl_fixup.py`` to remove VHDL constructs that are + not compatible with Incisive + +.. _Aldec Riviera-PRO: https://www.aldec.com/en/products/functional_verification/riviera-pro +.. _Aldec Active-HDL: https://www.aldec.com/en/products/fpga_simulation/active-hdl +.. _Mentor Graphics ModelSim/Questa: http://www.mentor.com/products/fv/modelsim/ +.. _Cadence Incisive: https://www.cadence.com/content/cadence-www/global/en_US/home/tools/system-design-and-verification/simulation-and-testbench-verification/incisive-enterprise-simulator.html +.. _GHDL: https://github.com/ghdl/ghdl +.. _GTKWave: http://gtkwave.sourceforge.net/ + +.. _installing_pypi: + Using the Python Package Manager -------------------------------- The recommended way to get VUnit is to install the :ref:`latest stable release ` via `pip `__: @@ -16,6 +60,7 @@ Once installed, VUnit may be updated to new versions via a similar method: > pip install -U vunit_hdl +.. _installing_master: Using the Development Version ----------------------------- @@ -23,9 +68,9 @@ Start by cloning our `GIT repository on GitHub .. code-block:: console - git clone --recursive https://github.com/VUnit/vunit.git + git clone --recurse-submodules https://github.com/VUnit/vunit.git -The ``--recursive`` option initializes `OSVVM `__ which is included as a submodule in the VUnit repository. +The ``--recurse-submodules`` option initializes `OSVVM `__ which is included as a submodule in the VUnit repository. To be able to import :class:`VUnit ` in your ``run.py`` script you need to make it visible to Python or else the following error @@ -59,8 +104,11 @@ There are three methods to make VUnit importable in your ``run.py`` script.: sys.path.append("/path/to/vunit_repo_root/") import vunit +.. _installing_dev: + For VUnit Developers -~~~~~~~~~~~~~~~~~~~~ +-------------------- + For those interested in development of VUnit, it is best to install VUnit so that the sources from git are installed in-place instead of to the Python site-packages directory. This can be achieved by using diff --git a/docs/py/opts.rst b/docs/py/opts.rst index e52e9a8eb..8646c401c 100644 --- a/docs/py/opts.rst +++ b/docs/py/opts.rst @@ -7,10 +7,13 @@ Compilation options allow customization of compilation behavior. Since simulator differing options available, generic options may be specified through this interface. The following compilation options are known. -``ghdl.flags`` +``ghdl.a_flags`` Extra arguments passed to ``ghdl -a`` command during compilation. Must be a list of strings. +``ghdl.flags`` + Deprecated alias of ``ghdl.a_flags``. It will be removed in future releases. + ``incisive.irun_vhdl_flags`` Extra arguments passed to the Incisive ``irun`` command when compiling VHDL files. Must be a list of strings. @@ -43,6 +46,11 @@ The following compilation options are known. Extra arguments passed to Active HDL ``vcom`` command. Must be a list of strings. +``enable_coverage`` + Enables compilation flags needed for code coverage and tells VUnit to handle + the coverage files created at compilation. Only used for coverage with GHDL. + Must be a boolean value. Default is False. + .. note:: Only affects source files added *before* the option is set. @@ -70,17 +78,22 @@ The following simulation options are known. Must be a boolean value. Default is False. When coverage is enabled VUnit only takes the minimal steps required - to make the simulator creates an unique coverage file for the - simulation run. The VUnit users must still set :ref:`sim + to make the simulator create a unique coverage file for the + simulation run. + + For RiverieraPRO and Modelsim/Questa, the VUnit users must still set :ref:`sim ` and :ref:`compile ` options to configure the simulator specific coverage options they want. The reason for this to allow the VUnit users maximum control of their coverage settings. + For GHDL with GCC backend there is less configurability for coverage, and all + necessary flags are set by the the ``enable_coverage`` sim and compile options. + An example of a ``run.py`` file using coverage can be found :vunit_example:`here `. - .. note: Supported by RivieraPRO and Modelsim/Questa simulators. + .. note: Supported by GHDL with GCC backend, RivieraPRO and Modelsim/Questa simulators. ``pli`` diff --git a/docs/py/vunit.rst b/docs/py/vunit.rst index 230788481..96256f6e7 100644 --- a/docs/py/vunit.rst +++ b/docs/py/vunit.rst @@ -1,5 +1,5 @@ vunit.ui -============== +======== .. autoclass:: vunit.ui.VUnit() :exclude-members: add_preprocessor, diff --git a/docs/release_notes/4.4.0.rst b/docs/release_notes/4.4.0.rst new file mode 100644 index 000000000..078eea7ec --- /dev/null +++ b/docs/release_notes/4.4.0.rst @@ -0,0 +1,20 @@ +- Update year and update license test to 2020. +- Bump OSVVM to latest version. +- Add possibility to configure random stalls for AXI Stream. :vunit_issue:`557` +- JSON-for-VHDL: use base16 encodings. :vunit_issue:`595` +- First release requiring Python 3.6 or higher. Python 2.7, 3.4 and 3.5 are not supported anymore. :vunit_issue:`596` :vunit_issue:`601` +- Start adding type annotations to the Python sources; add mypy (a static type checker) to the list of linters. :vunit_issue:`601` :vunit_issue:`626` +- Move co-simulation (VHPIDIRECT) sources (implementation and example) to `VUnit/cosim `_. :vunit_issue:`606` +- ghdl interface: with ``ghdl_e``, save runtime args to JSON file. :vunit_issue:`606` +- Add missing mode assertions to ``-93`` sources of ``integer_vector_ptr`` and ``string_ptr``. :vunit_issue:`607` +- Add method ``get_simulator_name()`` to public Python API. :vunit_issue:`610` +- Start replacing ``join``, ``dirname``, etc. with ``pathlib``. :vunit_issue:`612` :vunit_issue:`626` :vunit_issue:`632` +- Fix parsing adjacent hyphens in a literal. :vunit_issue:`616` +- Fix ``ghdl.flags`` error in documentation. :vunit_issue:`620` +- Rename compile option ``ghdl.flags`` to ``ghdl.a_flags``. :vunit_issue:`624` +- Move ``project.Library`` to separate file. +- Remove Travis CI and AppVeyor, use GitHub Actions only. +- Remove Sphinx extension ABlog; handle posts as regular pages in subdir ``blog``. +- Update GHDL to v0.37 in Windows CI jobs. +- Fix regression in GHDL (``prefix of array attribute must be an object name``). :vunit_issue:`631` :vunit_issue:`635` +- Add code coverage support for GHDL. :vunit_issue:`627` diff --git a/docs/user_guide.rst b/docs/user_guide.rst index 3a2df0c3e..ffbc30884 100644 --- a/docs/user_guide.rst +++ b/docs/user_guide.rst @@ -5,6 +5,7 @@ User Guide Introduction ------------ + VUnit is invoked by a user-defined project specified in a Python script. At minimum, a VUnit project consists of a set of HDL source files mapped to libraries. The project serves as single point of entry @@ -66,6 +67,11 @@ how to choose which one to use :ref:`here `. VHDL Test Benches ----------------- + +.. HINT:: + + Example code available at :vunit_example:`vhdl/user_guide`. + In its simplest form a VUnit VHDL test bench looks like this: .. literalinclude:: ../examples/vhdl/user_guide/tb_example.vhd @@ -101,13 +107,15 @@ From ``tb_example_many.vhd``'s ``run()`` calls, two test cases are created: * ``lib.tb_example_many.test_pass`` * ``lib.tb_example_many.test_fail`` - -The above example code can be found in :vunit_example:`vhdl/user_guide`. - .. _sv_test_benches: SystemVerilog Test Benches -------------------------- + +.. HINT:: + + Example code available at :vunit_example:`verilog/user_guide`. + In its simplest form a VUnit SystemVerilog test bench looks like this: .. literalinclude:: ../examples/verilog/user_guide/tb_example.sv @@ -125,44 +133,39 @@ Each test is run in an individual simulation. Putting multiple tests in the same test bench is a good way to share a common test environment. -The above example code can be found in :vunit_example:`verilog/user_guide`. - .. _test_bench_scanning: Scanning for Test Benches ------------------------- + VUnit will recognize a module or entity as a test bench and run it if it has a ``runner_cfg`` generic or parameter. A SystemVerilog test bench using the ``TEST_SUITE`` macro will have a ``runner_cfg`` parameter created by the macro and thus match the criteria. -A warning will be given if the test bench entity or module name does -not match the pattern ``tb_*`` or ``*_tb``. +.. WARNING:: A warning will be given if: + + * The test bench entity or module name **does not match** the pattern + ``tb_*`` or ``*_tb``. -A warning will be given if the name *does* match the above pattern but -lacks a ``runner_cfg`` generic or parameter preventing it to be run -by VUnit. + * The name **does match** the above pattern **but lacks** a ``runner_cfg`` + generic or parameter preventing it to be run by VUnit. .. _special_generics: Special generics/parameters --------------------------- -A VUnit test bench can have several special generics/parameters. -Optional generics are filled in automatically by VUnit if detected on -the test bench. -- ``runner_cfg : string`` +- [**required**] ``runner_cfg : string``, used by VUnit to pass private information + between Python and the HDL test runner. - Required by VUnit to pass private information between Python and the HDL test runner +- [**optional**] ``output_path : string``, path to the output directory of the + current test; this is useful to create additional output files that can + be checked after simulation by a **post_check** Python function. -- ``output_path : string`` - - Optional path to the output directory of the current test. - This is useful to create additional output files that can be checked - after simulation by a **post_check** Python function. - -- ``tb_path : string`` - - Optional path to the directory containing the test bench. - This is useful to read input data with a known location relative to +- [**optional**] ``tb_path : string``, path to the directory containing the test + bench; this is useful to read input data with a known location relative to the test bench location. + +.. HINT:: Optional generics/parameters are filled in automatically by VUnit if detected + on the test bench. diff --git a/docs/verification_components/user_guide.rst b/docs/verification_components/user_guide.rst index 8dc67ee0c..705e47b53 100644 --- a/docs/verification_components/user_guide.rst +++ b/docs/verification_components/user_guide.rst @@ -41,26 +41,15 @@ single VC typically implements several VCIs. For example an AXI-lite VC or RAM master VC can support the same generic bus master and synchronization VCI while also supporting their own bus specific VCIs. -The main benefit of generic VCIs is to reduce redundancy between VCs -and allow the user to write generic code that will work regardless of -the specific VC instance used. For example control registers might be -defined as a RAM-style bus in a submodule but be mapped to an AXI-lite -interface on the top level. The same testbench code for talking to the -submodule can be used in both the submodule test bench as well as the -top level test bench regardless of the fact that two different VCs -have been used. Without generic VCIs copy pasting the code and -changing the type of read/write procedure call would be required. - -Included verification component interfaces (VCIs): - -:ref:`Bus master ` - Generic read and write of bus with address and byte enable. - -:ref:`Stream ` - Push and pop of data stream without address. - -:ref:`Synchronization ` - Wait for time and events. +.. TIP:: The main benefit of generic VCIs is to reduce redundancy between + VCs and allow the user to write generic code that will work regardless + of the specific VC instance used. For example control registers might be + defined as a RAM-style bus in a submodule but be mapped to an AXI-lite + interface on the top level. The same testbench code for talking to the + submodule can be used in both the submodule test bench as well as the + top level test bench regardless of the fact that two different VCs + have been used. Without generic VCIs copy pasting the code and + changing the type of read/write procedure call would be required. Neither a VC or a VCI there is the :ref:`memory model ` which is a model of a memory space such as the DRAM address space in a @@ -75,11 +64,35 @@ reading and writing data. memory_model +.. _verification_component_interfaces: + +Verification Component Interfaces +--------------------------------- + +A verification component interface (VCI) is a procedural interface to +a VC. A VCI is defined as procedures in a package file. Several VCs can +support the same generic VCI to enable code re-use both for the users +and the VC-developers. + +List of VCIs included in the main repository: + +Included verification component interfaces (VCIs): + +* :ref:`Bus master `: generic read and write of bus with address and byte enable. +* :ref:`Stream `: push and pop of data stream without address. +* :ref:`Synchronization `: wait for time and events. + +.. toctree:: + :maxdepth: 1 + :hidden: + + vci .. _verification_components: Verification Components ----------------------- + A verification component (VC) is an entity that is normally connected to the DUT via a bus signal interface such as AXI-Lite. The main test sequence in the test bench sends messages to the VCs that will then @@ -92,21 +105,3 @@ sending to and receiving messages from the VC. Each VC instance is associated with a handle that is created in the test bench and set as a generic on the VC instantiation. The handle is given as and argument to the procedure calls to direct messages to the specfic VC instance. - -.. _verification_component_interfaces: - -Verification Component Interfaces ---------------------------------- -A verification component interface (VCI) is a procedural interface to -a VC. A VCI is defined as procedures in a package file. Several VC can -support the same generic VCI to enable code re-use both for the users -and the VC-developers. - - -.. toctree:: - :maxdepth: 1 - :hidden: - - vci/bus_master - vci/stream - vci/sync diff --git a/docs/verification_components/vci.rst b/docs/verification_components/vci.rst new file mode 100644 index 000000000..3fdf1de33 --- /dev/null +++ b/docs/verification_components/vci.rst @@ -0,0 +1,37 @@ +.. _bus_master_vci: + +Bus Master VCI +============== + +.. literalinclude:: ../../vunit/vhdl/verification_components/src/bus_master_pkg.vhd + :caption: Bus master verification component interface + :language: vhdl + :lines: 7- + +.. _stream_vci: + +Stream Master VCI +================= + +.. literalinclude:: ../../vunit/vhdl/verification_components/src/stream_master_pkg.vhd + :caption: Stream master verification component interface + :language: vhdl + :lines: 7- + +Stream Slave VCI +================ + +.. literalinclude:: ../../vunit/vhdl/verification_components/src/stream_slave_pkg.vhd + :caption: Stream slave verification component interface + :language: vhdl + :lines: 7- + +.. _sync_vci: + +Synchronization VCI +=================== + +.. literalinclude:: ../../vunit/vhdl/verification_components/src/sync_pkg.vhd + :caption: Synchronization verification component interface + :language: vhdl + :lines: 7- diff --git a/docs/verification_components/vci/bus_master.rst b/docs/verification_components/vci/bus_master.rst deleted file mode 100644 index cbc359631..000000000 --- a/docs/verification_components/vci/bus_master.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _bus_master_vci: - -Bus Master VCI -============== - -.. literalinclude:: ../../../vunit/vhdl/verification_components/src/bus_master_pkg.vhd - :caption: Bus master verification component interface - :language: vhdl - :lines: 7- diff --git a/docs/verification_components/vci/stream.rst b/docs/verification_components/vci/stream.rst deleted file mode 100644 index f8a0a273b..000000000 --- a/docs/verification_components/vci/stream.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. _stream_vci: - -Stream Master VCI -================= - -.. literalinclude:: ../../../vunit/vhdl/verification_components/src/stream_master_pkg.vhd - :caption: Stream master verification component interface - :language: vhdl - :lines: 7- - -Stream Slave VCI -================ - -.. literalinclude:: ../../../vunit/vhdl/verification_components/src/stream_slave_pkg.vhd - :caption: Stream slave verification component interface - :language: vhdl - :lines: 7- diff --git a/docs/verification_components/vci/sync.rst b/docs/verification_components/vci/sync.rst deleted file mode 100644 index 6cc0a0e81..000000000 --- a/docs/verification_components/vci/sync.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _sync_vci: - -Synchronization VCI -=================== - -.. literalinclude:: ../../../vunit/vhdl/verification_components/src/sync_pkg.vhd - :caption: Synchronization verification component interface - :language: vhdl - :lines: 7- diff --git a/examples/vhdl/array_axis_vcs/src/fifo.vhd b/examples/vhdl/array_axis_vcs/src/fifo.vhd index 84a204d81..61e6ccb3d 100644 --- a/examples/vhdl/array_axis_vcs/src/fifo.vhd +++ b/examples/vhdl/array_axis_vcs/src/fifo.vhd @@ -35,27 +35,17 @@ architecture arch of fifo is begin --- Assertions - process(clkw, clkr) + PslChecks : block is constant dx : std_logic_vector(d'left downto 0) := (others => 'X'); constant du : std_logic_vector(d'left downto 0) := (others => 'U'); begin - if rising_edge(clkw) then - if ( wr and ( d?=dx or d?=du ) ) then - assert false report "wrote X|U to fIfO" severity failure; - end if; - if (f and wr) then - assert false report "wrote to fIfO while full" severity failure; - end if; - end if; - if rising_edge(clkr) then - if (e and rd) then - assert false report "Read from fIfO while empty" severity failure; - end if; - end if; - end process; - --- + assert always (not rst and wr -> not (d ?= dx or d ?= du))@rising_edge(clkw) + report "wrote X|U to FIFO"; + assert always (not rst and f -> not wr)@rising_edge(clkw) + report "Wrote to FIFO while full"; + assert always (not rst and e -> not rd)@rising_edge(clkr) + report "Read from FIFO while empty"; + end block PslChecks; process(clkw) begin if rising_edge(clkw) then diff --git a/examples/vhdl/array_axis_vcs/src/test/tb_axis_loop.vhd b/examples/vhdl/array_axis_vcs/src/test/tb_axis_loop.vhd index 301c8a38c..51379e104 100644 --- a/examples/vhdl/array_axis_vcs/src/test/tb_axis_loop.vhd +++ b/examples/vhdl/array_axis_vcs/src/test/tb_axis_loop.vhd @@ -39,11 +39,6 @@ architecture tb of tb_axis_loop is constant master_axi_stream : axi_stream_master_t := new_axi_stream_master(data_length => data_width); constant slave_axi_stream : axi_stream_slave_t := new_axi_stream_slave(data_length => data_width); - -- Signals to/from the UUT from/to the verification components - - signal m_valid, m_ready, m_last, s_valid, s_ready, s_last : std_logic; - signal m_data, s_data : std_logic_vector(data_length(master_axi_stream)-1 downto 0); - -- tb signals and variables signal clk, rst, rstn : std_logic := '0'; @@ -134,53 +129,15 @@ begin -- - vunit_axism: entity vunit_lib.axi_stream_master - generic map ( - master => master_axi_stream - ) - port map ( - aclk => clk, - tvalid => m_valid, - tready => m_ready, - tdata => m_data, - tlast => m_last - ); - - vunit_axiss: entity vunit_lib.axi_stream_slave - generic map ( - slave => slave_axi_stream - ) - port map ( - aclk => clk, - tvalid => s_valid, - tready => s_ready, - tdata => s_data, - tlast => s_last - ); - --- - - uut: entity work.axis_buffer + uut_vc: entity work.vc_axis generic map ( - data_width => data_width, - fifo_depth => 4 + m_axis => master_axi_stream, + s_axis => slave_axi_stream, + data_width => data_width ) port map ( - s_axis_clk => clk, - s_axis_rstn => rstn, - s_axis_rdy => m_ready, - s_axis_data => m_data, - s_axis_valid => m_valid, - s_axis_strb => "1111", - s_axis_last => m_last, - - m_axis_clk => clk, - m_axis_rstn => rstn, - m_axis_valid => s_valid, - m_axis_data => s_data, - m_axis_rdy => s_ready, - m_axis_strb => open, - m_axis_last => s_last + clk => clk, + rstn => rstn ); end architecture; diff --git a/examples/vhdl/array_axis_vcs/src/test/vc_axis.vhd b/examples/vhdl/array_axis_vcs/src/test/vc_axis.vhd new file mode 100644 index 000000000..4497eb3f0 --- /dev/null +++ b/examples/vhdl/array_axis_vcs/src/test/vc_axis.vhd @@ -0,0 +1,82 @@ +-- This Source Code Form is subject to the terms of the Mozilla Public +-- License, v. 2.0. If a copy of the MPL was not distributed with this file, +-- You can obtain one at http://mozilla.org/MPL/2.0/. +-- +-- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com + +library ieee; +context ieee.ieee_std_context; + +library vunit_lib; +context vunit_lib.vunit_context; +context vunit_lib.vc_context; + +entity vc_axis is + generic ( + m_axis : axi_stream_master_t; + s_axis : axi_stream_slave_t; + data_width : natural := 32; + fifo_depth : natural := 4 + ); + port ( + clk, rstn: in std_logic + ); +end entity; + +architecture arch of vc_axis is + + signal m_valid, m_ready, m_last, s_valid, s_ready, s_last : std_logic; + signal m_data, s_data : std_logic_vector(data_length(m_axis)-1 downto 0); + +begin + + vunit_axism: entity vunit_lib.axi_stream_master + generic map ( + master => m_axis + ) + port map ( + aclk => clk, + tvalid => m_valid, + tready => m_ready, + tdata => m_data, + tlast => m_last + ); + + vunit_axiss: entity vunit_lib.axi_stream_slave + generic map ( + slave => s_axis + ) + port map ( + aclk => clk, + tvalid => s_valid, + tready => s_ready, + tdata => s_data, + tlast => s_last + ); + +-- + + uut: entity work.axis_buffer + generic map ( + data_width => data_width, + fifo_depth => fifo_depth + ) + port map ( + s_axis_clk => clk, + s_axis_rstn => rstn, + s_axis_rdy => m_ready, + s_axis_data => m_data, + s_axis_valid => m_valid, + s_axis_strb => "1111", + s_axis_last => m_last, + + m_axis_clk => clk, + m_axis_rstn => rstn, + m_axis_valid => s_valid, + m_axis_data => s_data, + m_axis_rdy => s_ready, + m_axis_strb => open, + m_axis_last => s_last + ); + +end architecture; diff --git a/examples/vhdl/coverage/run.py b/examples/vhdl/coverage/run.py index ca598a881..9fe002907 100644 --- a/examples/vhdl/coverage/run.py +++ b/examples/vhdl/coverage/run.py @@ -6,10 +6,13 @@ from pathlib import Path from vunit import VUnit +from subprocess import call def post_run(results): results.merge_coverage(file_name="coverage_data") + if VU.get_simulator_name() == "ghdl": + call(["gcovr", "coverage_data"]) VU = VUnit.from_argv() @@ -17,10 +20,12 @@ def post_run(results): LIB = VU.add_library("lib") LIB.add_source_files(Path(__file__).parent / "*.vhd") +LIB.set_sim_option("enable_coverage", True) + LIB.set_compile_option("rivierapro.vcom_flags", ["-coverage", "bs"]) LIB.set_compile_option("rivierapro.vlog_flags", ["-coverage", "bs"]) LIB.set_compile_option("modelsim.vcom_flags", ["+cover=bs"]) LIB.set_compile_option("modelsim.vlog_flags", ["+cover=bs"]) -LIB.set_sim_option("enable_coverage", True) +LIB.set_compile_option("enable_coverage", True) VU.main(post_run=post_run) diff --git a/examples/vhdl/external_buffer/cp.py b/examples/vhdl/external_buffer/cp.py deleted file mode 100644 index c8afd3ec7..000000000 --- a/examples/vhdl/external_buffer/cp.py +++ /dev/null @@ -1,28 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -# -# Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -from subprocess import check_call -from shutil import which -from pathlib import Path -from vunit import VUnit - -SRC_PATH = Path(__file__).parent / "src" - -C_OBJ = SRC_PATH / "cp.o" -# Compile C application to an object -check_call([which("gcc"), "-fPIC", "-c", str(SRC_PATH / "cp.c"), "-o", str(C_OBJ)]) - -# Enable the external feature for strings -VU = VUnit.from_argv(vhdl_standard="2008", compile_builtins=False) -VU.add_builtins({"string": True}) - -LIB = VU.add_library("lib") -LIB.add_source_files(SRC_PATH / "tb_extcp_*.vhd") - -# Add the C object to the elaboration of GHDL -VU.set_sim_option("ghdl.elab_flags", ["-Wl," + str(C_OBJ)]) - -VU.main() diff --git a/examples/vhdl/external_buffer/run.py b/examples/vhdl/external_buffer/run.py deleted file mode 100644 index 70f90120d..000000000 --- a/examples/vhdl/external_buffer/run.py +++ /dev/null @@ -1,76 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -# -# Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -""" -External Buffer ---------------- - -`Interfacing with foreign languages (C) through VHPIDIRECT `_ - -An array of type ``uint8_t`` is allocated in a C application and some values -are written to the first ``1/3`` positions. Then, the VHDL simulation is -executed, where the (external) array/buffer is used. - -In the VHDL testbenches, two vector pointers are created, each of them using -a different access mechanism (``extfunc`` or ``extacc``). One of them is used to copy -the first ``1/3`` elements to positions ``[1/3, 2/3)``, while incrementing each value -by one. The second one is used to copy elements from ``[1/3, 2/3)`` to ``[2/3, 3/3)``, -while incrementing each value by two. - -When the simulation is finished, the C application checks whether data was successfully -copied/modified. The content of the buffer is printed both before and after the -simulation. -""" - -from subprocess import check_call -from shutil import which -from pathlib import Path -from vunit import VUnit, ROOT - -SRC_PATH = Path(__file__).parent / "src" -EXT_SRCS = Path(ROOT) / "vunit" / "vhdl" / "data_types" / "src" / "external" / "ghdl" - -# Compile C applications to an objects -C_IOBJ = SRC_PATH / "imain.o" -C_BOBJ = SRC_PATH / "bmain.o" - -for val in [["int32_t", C_IOBJ], ["uint8_t", C_BOBJ]]: - check_call( - [ - which("gcc"), - "-fPIC", - "-DTYPE=" + val[0], - "-I", - EXT_SRCS, - "-c", - SRC_PATH / "main.c", - "-o", - val[1], - ] - ) - -# Enable the external feature for strings/byte_vectors and integer_vectors -VU = VUnit.from_argv(vhdl_standard="2008", compile_builtins=False) -VU.add_builtins({"string": True, "integer": True}) - -LIB = VU.add_library("lib") -LIB.add_source_files(SRC_PATH / "tb_ext_*.vhd") - -# Add the C object to the elaboration of GHDL -for tb in LIB.get_test_benches(pattern="*tb_ext*", allow_empty=False): - tb.set_sim_option( - "ghdl.elab_flags", - ["-Wl," + str(C_BOBJ), "-Wl,-Wl,--version-script=" + str(EXT_SRCS / "grt.ver")], - overwrite=True, - ) -for tb in LIB.get_test_benches(pattern="*tb_ext*_integer*", allow_empty=False): - tb.set_sim_option( - "ghdl.elab_flags", - ["-Wl," + str(C_IOBJ), "-Wl,-Wl,--version-script=" + str(EXT_SRCS / "grt.ver")], - overwrite=True, - ) - -VU.main() diff --git a/examples/vhdl/external_buffer/src/cp.c b/examples/vhdl/external_buffer/src/cp.c deleted file mode 100644 index 2db97ec66..000000000 --- a/examples/vhdl/external_buffer/src/cp.c +++ /dev/null @@ -1,94 +0,0 @@ -/* -External Buffer - -Interfacing with foreign languages (C) through VHPIDIRECT: -https://ghdl.readthedocs.io/en/latest/using/Foreign.html - -Two arrays of type uint8_t are allocated and some values are written to the first. -Then, the VHDL simulation is executed, where the (external) array/buffer -is used. When the simulation is finished, the results are checked. The content of -the buffer is printed both before and after the simulation. - -NOTE: This file is expected to be used along with tb_extcp_byte_vector.vhd or tb_extcp_string.vhd -*/ - -#include -#include -#include - -extern int ghdl_main (int argc, char **argv); - -uint8_t *D[2]; -const uint32_t length = 10; - -// Check procedure, to be executed when GHDL exits. -// The simulation is expected to copy the first 1/3 elements to positions [1/3, 2/3), -// while incrementing each value by one, and then copy elements from [1/3, 2/3) to -// [2/3, 3/3), while incrementing each value by two. -static void exit_handler(void) { - uint i; - for ( i=0; i -#include -#include -#include "vhpidirect_user.h" - -const uint32_t length = 5; - -/* - Check procedure, to be executed when GHDL exits. - The simulation is expected to copy the first 1/3 elements to positions [1/3, 2/3), - while incrementing each value by one, and then copy elements from [1/3, 2/3) to - [2/3, 3/3), while incrementing each value by two. -*/ -static void exit_handler(void) { - unsigned i, j, z, k; - TYPE expected, got; - k = 0; - for (j=0; j<3; j++) { - k += j; - for(i=0; i positive'value( jsonGetString(Content, "Image/0") ), - image_height => positive'value( jsonGetString(Content, "Image/1") ), - dump_debug_data => jsonGetBoolean(Content, "dump_debug_data") ); - end function decode; + -- Image dimensions as strings, get from the content from the JSON file + info("Image: " & jsonGetString(JSONContent, "Image/0") & ',' & jsonGetString(JSONContent, "Image/1")); - constant img : img_t := decode(JSONContent); + -- Some other content, deep in the JSON + info("Platform/ML505/FPGA: " & jsonGetString(JSONContent, "Platform/ML505/FPGA")); + info("Platform/KC705/IIC/0/Devices/0/Name: " & jsonGetString(JSONContent, "Platform/KC705/IIC/0/Devices/0/Name")); + end procedure; - -- get array of integers from JSON content - constant img_arr : integer_vector := jsonGetIntegerArray(JSONContent, "Image"); + procedure run_record_test(JSONContent : T_JSON) is + type img_t is record + image_width : positive; + image_height : positive; + dump_debug_data : boolean; + end record img_t; + + -- fill img_t with content extracted from a JSON input + constant img : img_t := ( + image_width => positive'value( jsonGetString(JSONContent, "Image/0") ), + image_height => positive'value( jsonGetString(JSONContent, "Image/1") ), + dump_debug_data => jsonGetBoolean(JSONContent, "dump_debug_data") + ); + begin + -- Image dimensions in a record, filled with data from the stringified generic + info("Image: " & integer'image(img.image_width) & ',' & integer'image(img.image_height)); + end procedure; + + variable JSONContent : T_JSON; -begin - main: process begin test_runner_setup(runner, runner_cfg); while test_suite loop - if run("test") then - -- Content extracted from the stringified generic - info("JSONContent: " & lf & JSONContent.Content); - - -- Full path of the JSON file, and extracted content - info("tb_path & tb_cfg_file: " & tb_path & tb_cfg_file); - info("JSONFileContent: " & lf & JSONFileContent.Content); - - -- Image dimensions in a record, filled by function decode with data from the stringified generic - info("Image: " & integer'image(img.image_width) & ',' & integer'image(img.image_height)); - - -- Integer array, extracted by function decode_array with data from the stringified generic - for i in 0 to img_arr'length-1 loop - info("Image array [" & integer'image(i) & "]: " & integer'image(img_arr(i))); - end loop; - - -- Image dimensions as strings, get from the content from the JSON file - info("Image: " & jsonGetString(JSONFileContent, "Image/0") & ',' & jsonGetString(JSONFileContent, "Image/1")); - - -- Some other content, deep in the JSON sources - info("Platform/ML505/FPGA: " & jsonGetString(JSONContent, "Platform/ML505/FPGA")); - info("Platform/ML505/FPGA: " & jsonGetString(JSONFileContent, "Platform/ML505/FPGA")); - - info("Platform/KC705/IIC/0/Devices/0/Name: " & jsonGetString(JSONContent, "Platform/KC705/IIC/0/Devices/0/Name")); - info("Platform/KC705/IIC/0/Devices/0/Name: " & jsonGetString(JSONFileContent, "Platform/KC705/IIC/0/Devices/0/Name")); + info("RAW generic: " & tb_cfg); + if run("stringified JSON generic") then + JSONContent := jsonLoad(tb_cfg); + run_test(JSONContent); + run_record_test(JSONContent); + elsif run("b16encoded stringified JSON generic") then + JSONContent := jsonLoad(tb_cfg); + run_test(JSONContent); + run_record_test(JSONContent); + elsif run("JSON file path generic") then + run_test(jsonLoad(tb_path & tb_cfg)); + elsif run("b16encoded JSON file path generic") then + run_test(jsonLoad(tb_cfg)); end if; end loop; test_runner_cleanup(runner); diff --git a/examples/vhdl/vivado/vivado_util.py b/examples/vhdl/vivado/vivado_util.py index 6ef8c5219..ac11b6e27 100644 --- a/examples/vhdl/vivado/vivado_util.py +++ b/examples/vhdl/vivado/vivado_util.py @@ -5,7 +5,7 @@ # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com import sys -from os.path import join, exists, abspath, dirname +from pathlib import Path from vunit.sim_if.factory import SIMULATOR_FACTORY from vunit.vivado import ( run_vivado, @@ -19,14 +19,16 @@ def add_vivado_ip(vunit_obj, output_path, project_file): Add vivado (and compile if necessary) vivado ip to vunit project. """ - if not exists(project_file): + if not Path(project_file).exists(): print("Could not find vivado project %s" % project_file) sys.exit(1) - standard_library_path = join(output_path, "standard") + opath = Path(output_path) + + standard_library_path = str(opath / "standard") compile_standard_libraries(vunit_obj, standard_library_path) - project_ip_path = join(output_path, "project_ip") + project_ip_path = str(opath / "project_ip") add_project_ip(vunit_obj, project_file, project_ip_path) @@ -34,12 +36,15 @@ def compile_standard_libraries(vunit_obj, output_path): """ Compile Xilinx standard libraries using Vivado TCL command """ - done_token = join(output_path, "all_done.txt") + done_token = str(Path(output_path) / "all_done.txt") simulator_class = SIMULATOR_FACTORY.select_simulator() - if not exists(done_token): - print("Compiling standard libraries into %s ..." % abspath(output_path)) + if not Path(done_token).exists(): + print( + "Compiling standard libraries into %s ..." + % str(Path(output_path).resolve()) + ) simname = simulator_class.name # Vivado calls rivierapro for riviera @@ -47,7 +52,7 @@ def compile_standard_libraries(vunit_obj, output_path): simname = "riviera" run_vivado( - join(dirname(__file__), "tcl", "compile_standard_libs.tcl"), + str(Path(__file__).parent / "tcl" / "compile_standard_libs.tcl"), tcl_args=[ simname, simulator_class.find_prefix().replace("\\", "/"), @@ -57,12 +62,13 @@ def compile_standard_libraries(vunit_obj, output_path): else: print( - "Standard libraries already exists in %s, skipping" % abspath(output_path) + "Standard libraries already exists in %s, skipping" + % str(Path(output_path).resolve()) ) for library_name in ["unisim", "unimacro", "unifast", "secureip", "xpm"]: - path = join(output_path, library_name) - if exists(path): + path = str(Path(output_path) / library_name) + if Path(path).exists(): vunit_obj.add_external_library(library_name, path) with open(done_token, "w") as fptr: @@ -79,16 +85,16 @@ def add_project_ip(vunit_obj, project_file, output_path, vivado_path=None, clean returns the list of SourceFile objects added """ - compile_order_file = join(output_path, "compile_order.txt") + compile_order_file = str(Path(output_path) / "compile_order.txt") - if clean or not exists(compile_order_file): + if clean or not Path(compile_order_file).exists(): create_compile_order_file( project_file, compile_order_file, vivado_path=vivado_path ) else: print( "Vivado project Compile order already exists, re-using: %s" - % abspath(compile_order_file) + % str(Path(compile_order_file).resolve()) ) return add_from_compile_order_file(vunit_obj, compile_order_file) diff --git a/tox.ini b/pyproject.toml similarity index 55% rename from tox.ini rename to pyproject.toml index 778d76690..4ed847cb6 100644 --- a/tox.ini +++ b/pyproject.toml @@ -1,6 +1,16 @@ +[build-system] +requires = [ + "setuptools >= 35.0.2", + "setuptools_scm >= 2.0.0, <3" +] +build-backend = "setuptools.build_meta" + +[tool.tox] +legacy_tox_ini = """ [tox] -envlist = py{36,37,38}-fmt, py{36,37,38}-{unit}, py{36,37,38}-{lint,docs}, py{36,37,38}-{acceptance,vcomponents}-{activehdl,ghdl,modelsim,rivierapro} +envlist = py{36,37,38}-{fmt,unit,lint,docs}, py{36,37,38}-{acceptance,vcomponents}-{activehdl,ghdl,modelsim,rivierapro}, py{36,37,38}-coverage skip_missing_interpreters = True +isolated_build = True [testenv] recreate=True @@ -12,10 +22,13 @@ deps= lint: pycodestyle lint: pylint lint: mypy + coverage: coverage + coverage: pycodestyle + coverage: pylint + coverage: mypy docs: docutils docs: sphinx docs: sphinx-argparse - docs: ablog setenv= acceptance-activehdl: VUNIT_SIMULATOR=activehdl @@ -24,9 +37,11 @@ setenv= acceptance-rivierapro: VUNIT_SIMULATOR=rivierapro commands= - fmt: {envpython} -m black ./ --exclude 'vunit\/vhdl\/JSON-for-VHDL|\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist' {posargs} + fmt: {envpython} -m black ./ --exclude 'vunit/vhdl/JSON-for-VHDL|.eggs|.git|.hg|.mypy_cache|.nox|.tox|.venv|_build|buck-out|build|dist' {posargs} unit: {envpython} -m pytest -v -ra tests/unit {posargs} lint: {envpython} -m pytest -v -ra tests/lint {posargs} docs: {envpython} tools/build_docs.py {envtmpdir}/docsbuild {posargs} acceptance: {envpython} -m pytest -v -ra tests/acceptance {posargs} vcomponents: {envpython} vunit/vhdl/verification_components/run.py --clean + coverage: {envpython} -m coverage run --branch --source vunit/ -m unittest discover tests/ +""" diff --git a/setup.py b/setup.py index 159e45b33..2011fe539 100644 --- a/setup.py +++ b/setup.py @@ -9,10 +9,16 @@ """ import os +import sys +from pathlib import Path from logging import warning from setuptools import setup -from vunit.about import version, doc -from vunit.builtins import osvvm_is_installed + +# Ensure that the source tree is on the sys path +sys.path.insert(0, str(Path(__file__).parent.resolve())) + +from vunit.about import version, doc # pylint: disable=wrong-import-position +from vunit.builtins import osvvm_is_installed # pylint: disable=wrong-import-position def find_all_files(directory, endings=None): @@ -24,15 +30,15 @@ def find_all_files(directory, endings=None): for filename in filenames: ending = os.path.splitext(filename)[-1] if endings is None or ending in endings: - result.append(os.path.join(root, filename)) + result.append(str(Path(root) / filename)) return result DATA_FILES = [] -DATA_FILES += find_all_files(os.path.join("vunit"), endings=[".tcl"]) -DATA_FILES += find_all_files(os.path.join("vunit", "vhdl")) +DATA_FILES += find_all_files("vunit", endings=[".tcl"]) +DATA_FILES += find_all_files(str(Path("vunit") / "vhdl")) DATA_FILES += find_all_files( - os.path.join("vunit", "verilog"), endings=[".v", ".sv", ".svh"] + str(Path("vunit") / "verilog"), endings=[".v", ".sv", ".svh"] ) DATA_FILES = [os.path.relpath(file_name, "vunit") for file_name in DATA_FILES] diff --git a/tests/acceptance/artificial/verilog/run.py b/tests/acceptance/artificial/verilog/run.py index 7d36e8202..2852d418e 100644 --- a/tests/acceptance/artificial/verilog/run.py +++ b/tests/acceptance/artificial/verilog/run.py @@ -4,14 +4,14 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit.verilog import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent VU = VUnit.from_argv() LIB = VU.add_library("lib") -LIB.add_source_files(join(root, "*.sv"), defines={"DEFINE_FROM_RUN_PY": ""}) +LIB.add_source_files(ROOT / "*.sv", defines={"DEFINE_FROM_RUN_PY": ""}) def configure_tb_with_parameter_config(): @@ -35,7 +35,7 @@ def configure_tb_with_parameter_config(): ) def post_check(output_path): - with open(join(output_path, "post_check.txt"), "r") as fptr: + with (Path(output_path) / "post_check.txt").open("r") as fptr: return fptr.read() == "Test 4 was here" tests[4].add_config( @@ -49,7 +49,7 @@ def post_check(output_path): def configure_tb_same_sim_all_pass(ui): def post_check(output_path): - with open(join(output_path, "post_check.txt"), "r") as fptr: + with (Path(output_path) / "post_check.txt").open("r") as fptr: return fptr.read() == "Test 3 was here" module = ui.library("lib").module("tb_same_sim_all_pass") @@ -59,6 +59,6 @@ def post_check(output_path): configure_tb_with_parameter_config() configure_tb_same_sim_all_pass(VU) LIB.module("tb_other_file_tests").scan_tests_from_file( - join(root, "other_file_tests.sv") + str(ROOT / "other_file_tests.sv") ) VU.main() diff --git a/tests/acceptance/artificial/vhdl/run.py b/tests/acceptance/artificial/vhdl/run.py index 42cff8e43..274d10efb 100644 --- a/tests/acceptance/artificial/vhdl/run.py +++ b/tests/acceptance/artificial/vhdl/run.py @@ -4,14 +4,14 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent VU = VUnit.from_argv() LIB = VU.add_library("lib") -LIB.add_source_files(join(root, "*.vhd")) +LIB.add_source_files(ROOT / "*.vhd") def configure_tb_with_generic_config(): @@ -33,7 +33,7 @@ def configure_tb_with_generic_config(): ) def post_check(output_path): - with open(join(output_path, "post_check.txt"), "r") as fptr: + with (Path(output_path) / "post_check.txt").open("r") as fptr: return "Test 4 was here" in fptr.read() tests[4].add_config( @@ -45,7 +45,7 @@ def post_check(output_path): def configure_tb_same_sim_all_pass(ui): def post_check(output_path): - with open(join(output_path, "post_check.txt"), "r") as fptr: + with (Path(output_path) / "post_check.txt").open("r") as fptr: return "Test 3 was here" in fptr.read() ent = ui.library("lib").entity("tb_same_sim_all_pass") @@ -67,6 +67,11 @@ def configure_tb_set_generic(ui): tb.set_generic("str_val", "4ns") tb.set_generic("str_space_val", "1 2 3") tb.set_generic("str_quote_val", 'a"b') + str_long_num = 512 + tb.set_generic("str_long_num", str_long_num) + tb.set_generic( + "str_long_val", "".join(["0123456789abcdef" for x in range(str_long_num)]) + ) def configure_tb_assert_stop_level(ui): @@ -88,6 +93,6 @@ def configure_tb_assert_stop_level(ui): LIB.entity("tb_no_generic_override").set_generic("g_val", False) LIB.entity("tb_ieee_warning").test("pass").set_sim_option("disable_ieee_warnings", True) LIB.entity("tb_other_file_tests").scan_tests_from_file( - join(root, "other_file_tests.vhd") + str(ROOT / "other_file_tests.vhd") ) VU.main() diff --git a/tests/acceptance/artificial/vhdl/tb_set_generic.vhd b/tests/acceptance/artificial/vhdl/tb_set_generic.vhd index 4173973f8..902407451 100644 --- a/tests/acceptance/artificial/vhdl/tb_set_generic.vhd +++ b/tests/acceptance/artificial/vhdl/tb_set_generic.vhd @@ -9,37 +9,48 @@ context vunit_lib.vunit_context; entity tb_set_generic is generic ( - runner_cfg : string; - is_ghdl : boolean; - true_boolean : boolean; - false_boolean : boolean; + runner_cfg : string; + is_ghdl : boolean; + true_boolean : boolean; + false_boolean : boolean; negative_integer : integer; positive_integer : integer; - negative_real : real := 0.0; - positive_real : real := 0.0; - time_val : time := 0 ns; - str_val : string; - str_space_val : string; - str_quote_val : string); + negative_real : real := 0.0; + positive_real : real := 0.0; + time_val : time := 0 ns; + str_val : string; + str_space_val : string; + str_quote_val : string; + str_long_num : integer := 64; + str_long_val : string); end entity; architecture tb of tb_set_generic is + impure function str_long(num: natural) return string is + variable str: string(1 to 16*num); + begin + for x in 1 to num loop + str((x-1)*16+1 to x*16) := "0123456789abcdef"; + end loop; + return str; + end; begin main : process begin test_runner_setup(runner, runner_cfg); - assert true_boolean = true; - assert false_boolean = false; + assert true_boolean = true; + assert false_boolean = false; assert negative_integer = -10000; assert positive_integer = 99999; if not is_ghdl then assert negative_real = -9999.9; assert positive_real = 2222.2; - assert time_val = 4 ns; + assert time_val = 4 ns; end if; - assert str_val = "4ns"; + assert str_val = "4ns"; assert str_space_val = "1 2 3"; assert str_quote_val = "a""b"; + assert str_long_val = str_long(str_long_num); test_runner_cleanup(runner); end process; end architecture; diff --git a/tests/acceptance/test_artificial.py b/tests/acceptance/test_artificial.py index bc30cc0cf..b6917f1f8 100644 --- a/tests/acceptance/test_artificial.py +++ b/tests/acceptance/test_artificial.py @@ -9,13 +9,15 @@ """ import unittest -from os.path import join, dirname +from pathlib import Path from os import environ from subprocess import call import sys from tests.common import check_report from vunit.sim_if.common import has_simulator, simulator_is +ROOT = Path(__file__).parent + @unittest.skipUnless(has_simulator(), "Requires simulator") class TestVunitArtificial(unittest.TestCase): @@ -26,18 +28,14 @@ class TestVunitArtificial(unittest.TestCase): def setUp(self): if simulator_is("activehdl"): - self.output_path = join(dirname(__file__), "artificial_out") + self.output_path = str(ROOT / "artificial_out") else: # Spaces in path intentional to verify that it is supported - self.output_path = join(dirname(__file__), "artificial _out") + self.output_path = str(ROOT / "artificial _out") - self.report_file = join(self.output_path, "xunit.xml") - self.artificial_run_vhdl = join( - dirname(__file__), "artificial", "vhdl", "run.py" - ) - self.artificial_run_verilog = join( - dirname(__file__), "artificial", "verilog", "run.py" - ) + self.report_file = str(Path(self.output_path) / "xunit.xml") + self.artificial_run_vhdl = str(ROOT / "artificial" / "vhdl" / "run.py") + self.artificial_run_verilog = str(ROOT / "artificial" / "verilog" / "run.py") @unittest.skipUnless( simulator_is("modelsim", "rivierapro"), diff --git a/tests/acceptance/test_dependencies.py b/tests/acceptance/test_dependencies.py index 7099a5d9c..4a89ab8b6 100644 --- a/tests/acceptance/test_dependencies.py +++ b/tests/acceptance/test_dependencies.py @@ -11,9 +11,11 @@ import unittest -from os.path import join, dirname +from pathlib import Path from vunit import VUnit +ROOT = Path(__file__).parent + class TestDependencies(unittest.TestCase): """ @@ -21,8 +23,8 @@ class TestDependencies(unittest.TestCase): """ def setUp(self): - self.data_path = join(dirname(__file__), "dependencies") - self.output_path = join(dirname(__file__), "dependencies_vunit_out") + self.data_path = str(ROOT / "dependencies") + self.output_path = str(ROOT / "dependencies_vunit_out") def test_package_body_dependencies(self): """ @@ -36,9 +38,11 @@ def run(value): Utility function to first run with pkg_body1 then pkg_body2 """ - tb_pkg_file_name = join(self.data_path, "tb_pkg.vhd") - pkg_file_name = join(self.data_path, "pkg.vhd") - pkg_body_file_name = join(self.data_path, "pkg_body%i.vhd" % value) + dpath = Path(self.data_path) + + tb_pkg_file_name = str(dpath / "tb_pkg.vhd") + pkg_file_name = str(dpath / "pkg.vhd") + pkg_body_file_name = str(dpath / ("pkg_body%i.vhd" % value)) argv = ["--output-path=%s" % self.output_path, "-v"] if value == 1: diff --git a/tests/acceptance/test_external_run_scripts.py b/tests/acceptance/test_external_run_scripts.py index 79f0a22ea..12e07d3b7 100644 --- a/tests/acceptance/test_external_run_scripts.py +++ b/tests/acceptance/test_external_run_scripts.py @@ -9,15 +9,17 @@ """ import unittest +from pathlib import Path from os import environ -from os.path import join, dirname from subprocess import call import sys from tests.common import check_report -from vunit import ROOT +from vunit import ROOT as RSTR from vunit.builtins import VHDL_PATH from vunit.sim_if.common import has_simulator, simulator_is, simulator_check +ROOT = Path(RSTR) + def simulator_supports_verilog(): """ @@ -34,15 +36,15 @@ class TestExternalRunScripts(unittest.TestCase): """ def test_vhdl_uart_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "uart", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "uart" / "run.py")) @unittest.skipUnless(simulator_supports_verilog(), "Verilog") def test_verilog_uart_example_project(self): - self.check(join(ROOT, "examples", "verilog", "uart", "run.py")) + self.check(str(ROOT / "examples" / "verilog" / "uart" / "run.py")) @unittest.skipUnless(simulator_supports_verilog(), "Verilog") def test_verilog_ams_example(self): - self.check(join(ROOT, "examples", "verilog", "verilog_ams", "run.py")) + self.check(str(ROOT / "examples" / "verilog" / "verilog_ams" / "run.py")) check_report( self.report_file, [ @@ -52,10 +54,10 @@ def test_verilog_ams_example(self): ) def test_vhdl_logging_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "logging", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "logging" / "run.py")) def test_vhdl_run_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "run", "run.py"), exit_code=1) + self.check(str(ROOT / "examples" / "vhdl" / "run" / "run.py"), exit_code=1) check_report( self.report_file, [ @@ -98,7 +100,7 @@ def test_vhdl_run_example_project(self): def test_vhdl_third_party_integration_example_project(self): self.check( - join(ROOT, "examples", "vhdl", "third_party_integration", "run.py"), + str(ROOT / "examples" / "vhdl" / "third_party_integration" / "run.py"), exit_code=1, ) check_report( @@ -117,10 +119,17 @@ def test_vhdl_third_party_integration_example_project(self): ) def test_vhdl_check_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "check", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "check" / "run.py")) + + @unittest.skipIf( + simulator_check(lambda simclass: not simclass.supports_coverage()), + "This simulator/backend does not support coverage", + ) + def test_vhdl_coverage_example_project(self): + self.check(str(ROOT / "examples" / "vhdl" / "coverage" / "run.py")) def test_vhdl_generate_tests_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "generate_tests", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "generate_tests" / "run.py")) check_report( self.report_file, [ @@ -137,7 +146,7 @@ def test_vhdl_generate_tests_example_project(self): ) def test_vhdl_composite_generics_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "composite_generics", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "composite_generics" / "run.py")) check_report( self.report_file, [ @@ -150,27 +159,21 @@ def test_vhdl_composite_generics_example_project(self): simulator_is("ghdl"), "Support complex JSON strings as generic" ) def test_vhdl_json4vhdl_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "json4vhdl", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "json4vhdl" / "run.py")) def test_vhdl_array_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "array", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "array" / "run.py")) def test_vhdl_array_axis_vcs_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "array_axis_vcs", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "array_axis_vcs" / "run.py")) def test_vhdl_axi_dma_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "axi_dma", "run.py")) - - @unittest.skipIf( - simulator_check(lambda simclass: not simclass.supports_vhpi()), - "This simulator/backend does not support interfacing with external C code", - ) - def test_vhdl_external_buffer_project(self): - self.check(join(ROOT, "examples", "vhdl", "external_buffer", "run.py")) - self.check(join(ROOT, "examples", "vhdl", "external_buffer", "cp.py")) + self.check(str(ROOT / "examples" / "vhdl" / "axi_dma" / "run.py")) def test_vhdl_user_guide_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "user_guide", "run.py"), exit_code=1) + self.check( + str(ROOT / "examples" / "vhdl" / "user_guide" / "run.py"), exit_code=1 + ) check_report( self.report_file, [ @@ -183,7 +186,7 @@ def test_vhdl_user_guide_example_project(self): @unittest.skipUnless(simulator_supports_verilog(), "Verilog") def test_verilog_user_guide_example_project(self): self.check( - join(ROOT, "examples", "verilog", "user_guide", "run.py"), exit_code=1 + str(ROOT / "examples" / "verilog" / "user_guide" / "run.py"), exit_code=1 ) check_report( self.report_file, @@ -202,85 +205,85 @@ def test_verilog_user_guide_example_project(self): ) def test_vhdl_com_example_project(self): - self.check(join(ROOT, "examples", "vhdl", "com", "run.py")) + self.check(str(ROOT / "examples" / "vhdl" / "com" / "run.py")) def test_array_vhdl_2008(self): - self.check(join(VHDL_PATH, "array", "run.py")) + self.check(str(VHDL_PATH / "array" / "run.py")) def test_data_types_vhdl_2008(self): - self.check(join(VHDL_PATH, "data_types", "run.py")) + self.check(str(VHDL_PATH / "data_types" / "run.py")) def test_data_types_vhdl_2002(self): - self.check(join(VHDL_PATH, "data_types", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "data_types" / "run.py"), vhdl_standard="2002") def test_data_types_vhdl_93(self): - self.check(join(VHDL_PATH, "data_types", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "data_types" / "run.py"), vhdl_standard="93") def test_random_vhdl_2008(self): - self.check(join(VHDL_PATH, "random", "run.py")) + self.check(str(VHDL_PATH / "random" / "run.py")) def test_check_vhdl_2008(self): - self.check(join(VHDL_PATH, "check", "run.py")) + self.check(str(VHDL_PATH / "check" / "run.py")) def test_check_vhdl_2002(self): - self.check(join(VHDL_PATH, "check", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "check" / "run.py"), vhdl_standard="2002") def test_check_vhdl_93(self): - self.check(join(VHDL_PATH, "check", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "check" / "run.py"), vhdl_standard="93") def test_logging_vhdl_2008(self): - self.check(join(VHDL_PATH, "logging", "run.py")) + self.check(str(VHDL_PATH / "logging" / "run.py")) def test_logging_vhdl_2002(self): - self.check(join(VHDL_PATH, "logging", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "logging" / "run.py"), vhdl_standard="2002") def test_logging_vhdl_93(self): - self.check(join(VHDL_PATH, "logging", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "logging" / "run.py"), vhdl_standard="93") def test_run_vhdl_2008(self): - self.check(join(VHDL_PATH, "run", "run.py")) + self.check(str(VHDL_PATH / "run" / "run.py")) def test_run_vhdl_2002(self): - self.check(join(VHDL_PATH, "run", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "run" / "run.py"), vhdl_standard="2002") def test_run_vhdl_93(self): - self.check(join(VHDL_PATH, "run", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "run" / "run.py"), vhdl_standard="93") def test_string_ops_vhdl_2008(self): - self.check(join(VHDL_PATH, "string_ops", "run.py")) + self.check(str(VHDL_PATH / "string_ops" / "run.py")) def test_string_ops_vhdl_2002(self): - self.check(join(VHDL_PATH, "string_ops", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "string_ops" / "run.py"), vhdl_standard="2002") def test_string_ops_vhdl_93(self): - self.check(join(VHDL_PATH, "string_ops", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "string_ops" / "run.py"), vhdl_standard="93") def test_dictionary_vhdl_2008(self): - self.check(join(VHDL_PATH, "dictionary", "run.py")) + self.check(str(VHDL_PATH / "dictionary" / "run.py")) def test_dictionary_vhdl_2002(self): - self.check(join(VHDL_PATH, "dictionary", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "dictionary" / "run.py"), vhdl_standard="2002") def test_dictionary_vhdl_93(self): - self.check(join(VHDL_PATH, "dictionary", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "dictionary" / "run.py"), vhdl_standard="93") def test_path_vhdl_2008(self): - self.check(join(VHDL_PATH, "path", "run.py")) + self.check(str(VHDL_PATH / "path" / "run.py")) def test_path_vhdl_2002(self): - self.check(join(VHDL_PATH, "path", "run.py"), vhdl_standard="2002") + self.check(str(VHDL_PATH / "path" / "run.py"), vhdl_standard="2002") def test_path_vhdl_93(self): - self.check(join(VHDL_PATH, "path", "run.py"), vhdl_standard="93") + self.check(str(VHDL_PATH / "path" / "run.py"), vhdl_standard="93") def test_com_vhdl_2008(self): - self.check(join(VHDL_PATH, "com", "run.py")) + self.check(str(VHDL_PATH / "com" / "run.py")) def setUp(self): - self.output_path = join(dirname(__file__), "external_run_out") - self.report_file = join(self.output_path, "xunit.xml") + self.output_path = str(Path(__file__).parent / "external_run_out") + self.report_file = str(Path(self.output_path) / "xunit.xml") - def check(self, run_file, args=None, vhdl_standard="2008", exit_code=0): + def check(self, run_file: Path, args=None, vhdl_standard="2008", exit_code=0): """ Run external run file and verify exit code """ @@ -290,7 +293,7 @@ def check(self, run_file, args=None, vhdl_standard="2008", exit_code=0): retcode = call( [ sys.executable, - run_file, + str(run_file), "--clean", "--output-path=%s" % self.output_path, "--xunit-xml=%s" % self.report_file, diff --git a/tests/lint/test_license.py b/tests/lint/test_license.py index 33afcc374..548d965f0 100644 --- a/tests/lint/test_license.py +++ b/tests/lint/test_license.py @@ -10,14 +10,17 @@ import unittest from warnings import simplefilter, catch_warnings -from os.path import join, splitext, abspath, commonprefix +from pathlib import Path +from os.path import commonprefix from os import walk import re -from vunit import ROOT +from vunit import ROOT as RSTR from vunit.builtins import VHDL_PATH from vunit import ostools from vunit.about import license_text +ROOT = Path(RSTR) + RE_LICENSE_NOTICE = re.compile( r"(?P#|--|//) This Source Code Form is subject to the terms of the Mozilla Public" + "\n" @@ -48,13 +51,13 @@ def test_that_a_valid_license_exists_in_source_files_and_that_global_licensing_i for file_name in find_licensed_files(): code = ostools.read_file(file_name) self._check_license(code, file_name) - if splitext(file_name)[1] in (".vhd", ".vhdl", ".v", ".sv"): + if Path(file_name).suffix in (".vhd", ".vhdl", ".v", ".sv"): self._check_no_trailing_whitespace(code, file_name) def test_that_license_file_matches_vunit_license_text(self): with catch_warnings(): simplefilter("ignore", category=DeprecationWarning) - with open(join(ROOT, "LICENSE.txt"), "rU") as lic: + with (ROOT / "LICENSE.rst").open("rU") as lic: self.assertEqual(lic.read(), license_text()) def _check_license(self, code, file_name): @@ -128,34 +131,38 @@ def find_licensed_files(): Return all licensed files """ licensed_files = [] - osvvm_directory = abspath(join(VHDL_PATH, "osvvm")) - json4vhdl_directory = abspath(join(VHDL_PATH, "JSON-for-VHDL")) - for root, _, files in walk(ROOT): + for root, _, files in walk(RSTR): for file_name in files: if "preprocessed" in root: continue if "codecs" in root: continue - if root == join(ROOT, "docs"): + if root == str(ROOT / "docs"): continue - if join(ROOT, "venv") in root: + if str(ROOT / "venv") in root: continue - if join(ROOT, ".tox") in root: + if str(ROOT / ".tox") in root: continue - if is_prefix_of(osvvm_directory, abspath(join(root, file_name))): + if is_prefix_of( + (VHDL_PATH / "osvvm").resolve(), + (Path(root) / file_name).resolve(), + ): continue - if is_prefix_of(json4vhdl_directory, abspath(join(root, file_name))): + if is_prefix_of( + (VHDL_PATH / "JSON-for-VHDL").resolve(), + (Path(root) / file_name).resolve(), + ): continue - if splitext(file_name)[1] in (".vhd", ".vhdl", ".py", ".v", ".sv"): - licensed_files.append(join(root, file_name)) + if Path(file_name).suffix in (".vhd", ".vhdl", ".py", ".v", ".sv"): + licensed_files.append(str(Path(root) / file_name)) return licensed_files -def is_prefix_of(prefix, of_path): +def is_prefix_of(prefix: Path, of_path: Path): """ Return True if 'prefix' is a prefix of 'of_path' """ - return commonprefix([prefix, of_path]) == prefix + return commonprefix([str(prefix), str(of_path)]) == str(prefix) def main(): diff --git a/tests/lint/test_mypy.py b/tests/lint/test_mypy.py index a64b22f28..d98cf2f51 100644 --- a/tests/lint/test_mypy.py +++ b/tests/lint/test_mypy.py @@ -19,5 +19,5 @@ class TestMyPy(unittest.TestCase): """ @staticmethod - def test_pycodestyle(): + def test_mypy(): check_call([sys.executable, "-m", "mypy", "vunit"]) diff --git a/tests/lint/test_pycodestyle.py b/tests/lint/test_pycodestyle.py index b730c41ed..9e48feeaf 100644 --- a/tests/lint/test_pycodestyle.py +++ b/tests/lint/test_pycodestyle.py @@ -12,8 +12,10 @@ from subprocess import check_call import sys from glob import glob -from os.path import join -from vunit import ROOT +from pathlib import Path +from vunit import ROOT as RSTR + +ROOT = Path(RSTR) class TestPycodestyle(unittest.TestCase): @@ -43,7 +45,7 @@ def get_files_and_folders(): """ Return all files and folders which shall be arguments to pycodestyle and pylint """ - ret = [join(ROOT, "vunit")] - ret += list(glob(join(ROOT, "*.py"))) - ret += list(glob(join(ROOT, "tools", "*.py"))) + ret = [str(ROOT / "vunit")] + ret += list(glob(str(ROOT / "*.py"))) + ret += list(glob(str(ROOT / "tools" / "*.py"))) return ret diff --git a/tests/lint/test_pylint.py b/tests/lint/test_pylint.py index b9ccdc02d..d71ce3054 100644 --- a/tests/lint/test_pylint.py +++ b/tests/lint/test_pylint.py @@ -11,7 +11,7 @@ import unittest from subprocess import check_call -from os.path import join, dirname +from pathlib import Path import sys from tests.lint.test_pycodestyle import get_files_and_folders @@ -28,7 +28,7 @@ def test_pylint(): sys.executable, "-m", "pylint", - "--rcfile=" + join(dirname(__file__), "pylintrc"), + "--rcfile=" + str(Path(__file__).parent / "pylintrc"), ] + get_files_and_folders() ) diff --git a/tests/lint/test_readme.py b/tests/lint/test_readme.py deleted file mode 100644 index a7b5e2f52..000000000 --- a/tests/lint/test_readme.py +++ /dev/null @@ -1,27 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -# -# Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -""" -Check that README.rst matches VUnit docstring -""" - -import unittest -from warnings import simplefilter, catch_warnings -from os.path import join -from vunit import ROOT -from vunit.about import doc - - -class TestReadMe(unittest.TestCase): - """ - Check that README.rst matches VUnit docstring - """ - - def test_that_readme_file_matches_vunit_docstring(self): - with catch_warnings(): - simplefilter("ignore", category=DeprecationWarning) - with open(join(ROOT, "README.rst"), "rU") as readme: - self.assertEqual(readme.read(), doc()) diff --git a/tests/unit/test_activehdl_interface.py b/tests/unit/test_activehdl_interface.py index 0848df920..d01952fc2 100644 --- a/tests/unit/test_activehdl_interface.py +++ b/tests/unit/test_activehdl_interface.py @@ -10,7 +10,7 @@ import unittest -from os.path import join, dirname, exists +from pathlib import Path import os from shutil import rmtree from unittest import mock @@ -58,18 +58,18 @@ def test_compile_project_vhdl_2008(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -93,18 +93,18 @@ def test_compile_project_vhdl_2002(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -128,18 +128,18 @@ def test_compile_project_vhdl_93(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -162,18 +162,18 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): source_file.set_compile_option("activehdl.vcom_flags", ["custom", "flags"]) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -190,7 +190,7 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.activehdl.Process", autospec=True) def test_compile_project_verilog(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + library_cfg = str(Path(self.output_path) / "library.cfg") simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -198,18 +198,18 @@ def test_compile_project_verilog(self, process, check_output): project.add_source_file("file.v", "lib", file_type="verilog") simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -225,7 +225,7 @@ def test_compile_project_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.activehdl.Process", autospec=True) def test_compile_project_system_verilog(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + library_cfg = str(Path(self.output_path) / "library.cfg") simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -233,18 +233,18 @@ def test_compile_project_system_verilog(self, process, check_output): project.add_source_file("file.sv", "lib", file_type="systemverilog") simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -260,7 +260,7 @@ def test_compile_project_system_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.activehdl.Process", autospec=True) def test_compile_project_verilog_extra_flags(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + library_cfg = str(Path(self.output_path) / "library.cfg") simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -269,18 +269,18 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): source_file.set_compile_option("activehdl.vlog_flags", ["custom", "flags"]) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -298,7 +298,7 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.activehdl.Process", autospec=True) def test_compile_project_verilog_include(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + library_cfg = str(Path(self.output_path) / "library.cfg") simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -308,18 +308,18 @@ def test_compile_project_verilog_include(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -336,7 +336,7 @@ def test_compile_project_verilog_include(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.activehdl.Process", autospec=True) def test_compile_project_verilog_define(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + library_cfg = str(Path(self.output_path) / "library.cfg") simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -346,18 +346,18 @@ def test_compile_project_verilog_define(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -390,7 +390,7 @@ def test_supports_vhdl_package_generics_false(self, find_prefix): self.assertFalse(simif.supports_vhdl_package_generics()) def setUp(self): - self.output_path = join(dirname(__file__), "test_activehdl_out") + self.output_path = str(Path(__file__).parent / "test_activehdl_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() @@ -398,7 +398,7 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.output_path): + if Path(self.output_path).exists(): rmtree(self.output_path) diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py index da56de9cc..aa6b598ea 100644 --- a/tests/unit/test_configuration.py +++ b/tests/unit/test_configuration.py @@ -13,7 +13,7 @@ import unittest import contextlib -from os.path import join +from pathlib import Path from unittest import mock from tests.common import with_tempdir, create_tempdir from tests.unit.test_test_bench import Entity @@ -61,10 +61,12 @@ def test_does_not_add_tb_path_generic(self): @with_tempdir def test_adds_tb_path_generic(self, tempdir): design_unit_tb_path = Entity( - "tb_entity_without_tb_path", file_name=join(tempdir, "file.vhd") + "tb_entity_without_tb_path", file_name=str(Path(tempdir) / "file.vhd") + ) + tb_path = str(Path(tempdir) / "other_path") + design_unit_tb_path.original_file_name = str( + Path(tb_path) / "original_file.vhd" ) - tb_path = join(tempdir, "other_path") - design_unit_tb_path.original_file_name = join(tb_path, "original_file.vhd") design_unit_tb_path.generic_names = ["runner_cfg", "tb_path"] config_tb_path = Configuration("name", design_unit_tb_path) self.assertEqual( @@ -298,7 +300,7 @@ def _create_config(**kwargs): Helper function to create a config """ with create_tempdir() as tempdir: - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] yield Configuration("name", design_unit, **kwargs) diff --git a/tests/unit/test_csv_logs.py b/tests/unit/test_csv_logs.py index 34fd1c9fb..cca5bd5a7 100644 --- a/tests/unit/test_csv_logs.py +++ b/tests/unit/test_csv_logs.py @@ -11,7 +11,7 @@ import unittest from shutil import rmtree from os import remove -from os.path import join +from pathlib import Path from tempfile import NamedTemporaryFile, mkdtemp from vunit.csv_logs import CsvLogs @@ -25,8 +25,8 @@ def setUp(self): self._log_files = [] self._all_fields_dir = mkdtemp() self._few_fields_dir = mkdtemp() - self._all_fields_files = join(self._all_fields_dir, "*.csv") - self._few_fields_files = join(self._few_fields_dir, "*.csv") + self._all_fields_files = str(Path(self._all_fields_dir) / "*.csv") + self._few_fields_files = str(Path(self._few_fields_dir) / "*.csv") def make_log(directory, contents): """ diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py index f01bd47ae..f07104bd6 100644 --- a/tests/unit/test_database.py +++ b/tests/unit/test_database.py @@ -9,7 +9,7 @@ """ import unittest -from os.path import join +from pathlib import Path from tests.common import with_tempdir from vunit.database import DataBase, PickledDataBase @@ -26,7 +26,7 @@ class TestDataBase(unittest.TestCase): @staticmethod def create_database(tempdir, new=False): - return DataBase(join(tempdir, "database"), new=new) + return DataBase(str(Path(tempdir) / "database"), new=new) def _test_add_items(self, tempdir): """ diff --git a/tests/unit/test_ghdl_interface.py b/tests/unit/test_ghdl_interface.py index af8f41c55..2698f4d2d 100644 --- a/tests/unit/test_ghdl_interface.py +++ b/tests/unit/test_ghdl_interface.py @@ -9,7 +9,7 @@ """ import unittest -from os.path import join, dirname, exists +from pathlib import Path import os from shutil import rmtree from unittest import mock @@ -120,7 +120,7 @@ def test_compile_project_2008(self, check_output): simif.compile_project(project) check_output.assert_called_once_with( [ - join("prefix", "ghdl"), + str(Path("prefix") / "ghdl"), "-a", "--workdir=lib_path", "--work=lib", @@ -146,7 +146,7 @@ def test_compile_project_2002(self, check_output): simif.compile_project(project) check_output.assert_called_once_with( [ - join("prefix", "ghdl"), + str(Path("prefix") / "ghdl"), "-a", "--workdir=lib_path", "--work=lib", @@ -172,7 +172,7 @@ def test_compile_project_93(self, check_output): simif.compile_project(project) check_output.assert_called_once_with( [ - join("prefix", "ghdl"), + str(Path("prefix") / "ghdl"), "-a", "--workdir=lib_path", "--work=lib", @@ -197,7 +197,7 @@ def test_compile_project_extra_flags(self, check_output): simif.compile_project(project) check_output.assert_called_once_with( [ - join("prefix", "ghdl"), + str(Path("prefix") / "ghdl"), "-a", "--workdir=lib_path", "--work=lib", @@ -211,9 +211,9 @@ def test_compile_project_extra_flags(self, check_output): ) def test_elaborate_e_project(self): - design_unit = Entity("tb_entity", file_name=join("tempdir", "file.vhd")) - design_unit.original_file_name = join( - "tempdir", "other_path", "original_file.vhd" + design_unit = Entity("tb_entity", file_name=str(Path("tempdir") / "file.vhd")) + design_unit.original_file_name = str( + Path("tempdir") / "other_path" / "original_file.vhd" ) design_unit.generic_names = ["runner_cfg", "tb_path"] @@ -228,17 +228,17 @@ def test_elaborate_e_project(self): self.assertEqual( simif._get_command( # pylint: disable=protected-access - config, join("output_path", "ghdl"), True + config, str(Path("output_path") / "ghdl"), True, True, None ), [ - join("prefix", "ghdl"), + str(Path("prefix") / "ghdl"), "-e", "--std=08", "--work=lib", "--workdir=lib_path", "-Plib_path", "-o", - join("output_path", "ghdl", "tb_entity-arch"), + str(Path("output_path") / "ghdl" / "tb_entity-arch"), "tb_entity", "arch", ], @@ -254,7 +254,7 @@ def test_compile_project_verilog_error(self): self.assertRaises(CompileError, simif.compile_project, project) def setUp(self): - self.output_path = join(dirname(__file__), "test_ghdl_interface_out") + self.output_path = str(Path(__file__).parent / "test_ghdl_interface_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() @@ -262,5 +262,5 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.output_path): + if Path(self.output_path).exists(): rmtree(self.output_path) diff --git a/tests/unit/test_incisive_interface.py b/tests/unit/test_incisive_interface.py index c584f991a..7f6829c10 100644 --- a/tests/unit/test_incisive_interface.py +++ b/tests/unit/test_incisive_interface.py @@ -12,7 +12,7 @@ import unittest -from os.path import join, dirname, exists, basename +from pathlib import Path import os from shutil import rmtree from unittest import mock @@ -44,9 +44,9 @@ def test_compile_project_vhdl_2008( "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_vhdl_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -58,10 +58,11 @@ def test_compile_project_vhdl_2008( "-nowarn DLCVAR", "-v200x -extv200x", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), - '-log "%s"' % join(self.output_path, "irun_compile_vhdl_file_lib.log"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), + '-log "%s"' + % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib_path", '"file.vhd"', "-endlib", @@ -69,7 +70,7 @@ def test_compile_project_vhdl_2008( ) self.assertEqual( - read_file(join(self.output_path, "cds.lib")), + read_file(str(Path(self.output_path) / "cds.lib")), """\ ## cds.lib: Defines the locations of compiled libraries. softinclude cds_root_irun/tools/inca/files/cds.lib @@ -98,9 +99,9 @@ def test_compile_project_vhdl_2002( "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_vhdl_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -112,10 +113,11 @@ def test_compile_project_vhdl_2002( "-nowarn DLCVAR", "-v200x -extv200x", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), - '-log "%s"' % join(self.output_path, "irun_compile_vhdl_file_lib.log"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), + '-log "%s"' + % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib_path", '"file.vhd"', "-endlib", @@ -138,9 +140,9 @@ def test_compile_project_vhdl_93( "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_vhdl_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -152,10 +154,11 @@ def test_compile_project_vhdl_93( "-nowarn DLCVAR", "-v93", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), - '-log "%s"' % join(self.output_path, "irun_compile_vhdl_file_lib.log"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), + '-log "%s"' + % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib_path", '"file.vhd"', "-endlib", @@ -177,9 +180,9 @@ def test_compile_project_vhdl_extra_flags( source_file = project.add_source_file("file.vhd", "lib", file_type="vhdl") source_file.set_compile_option("incisive.irun_vhdl_flags", ["custom", "flags"]) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_vhdl_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -191,12 +194,13 @@ def test_compile_project_vhdl_extra_flags( "-nowarn DLCVAR", "-v200x -extv200x", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), - '-log "%s"' % join(self.output_path, "irun_compile_vhdl_file_lib.log"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), + '-log "%s"' + % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", "custom", "flags", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib_path", '"file.vhd"', "-endlib", @@ -219,9 +223,9 @@ def test_compile_project_vhdl_hdlvar( write_file("file.vhd", "") project.add_source_file("file.vhd", "lib", file_type="vhdl") simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_vhdl_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -233,11 +237,12 @@ def test_compile_project_vhdl_hdlvar( "-nowarn DLCVAR", "-v200x -extv200x", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-hdlvar "custom_hdlvar"', - '-log "%s"' % join(self.output_path, "irun_compile_vhdl_file_lib.log"), + '-log "%s"' + % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib_path", '"file.vhd"', "-endlib", @@ -258,9 +263,9 @@ def test_compile_project_verilog( write_file("file.v", "") project.add_source_file("file.v", "lib", file_type="verilog") simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -272,12 +277,12 @@ def test_compile_project_verilog( "-nowarn DLCPTH", "-nowarn DLCVAR", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.v"', "-endlib", @@ -298,9 +303,9 @@ def test_compile_project_system_verilog( write_file("file.sv", "") project.add_source_file("file.sv", "lib", file_type="systemverilog") simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -312,12 +317,12 @@ def test_compile_project_system_verilog( "-nowarn DLCPTH", "-nowarn DLCVAR", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.sv"', "-endlib", @@ -325,7 +330,7 @@ def test_compile_project_system_verilog( ) self.assertEqual( - read_file(join(self.output_path, "cds.lib")), + read_file(str(Path(self.output_path) / "cds.lib")), """\ ## cds.lib: Defines the locations of compiled libraries. softinclude cds_root_irun/tools/inca/files/cds.lib @@ -355,9 +360,9 @@ def test_compile_project_verilog_extra_flags( "incisive.irun_verilog_flags", ["custom", "flags"] ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -371,12 +376,12 @@ def test_compile_project_verilog_extra_flags( "-work work", "custom", "flags", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.v"', "-endlib", @@ -399,9 +404,9 @@ def test_compile_project_verilog_include( "file.v", "lib", file_type="verilog", include_dirs=["include"] ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -413,13 +418,13 @@ def test_compile_project_verilog_include( "-nowarn DLCPTH", "-nowarn DLCVAR", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "include"', '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.v"', "-endlib", @@ -442,9 +447,9 @@ def test_compile_project_verilog_define( "file.v", "lib", file_type="verilog", defines=dict(defname="defval") ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -456,13 +461,13 @@ def test_compile_project_verilog_define( "-nowarn DLCPTH", "-nowarn DLCVAR", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', "-define defname=defval", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.v"', "-endlib", @@ -487,9 +492,9 @@ def test_compile_project_verilog_hdlvar( "file.v", "lib", file_type="verilog", defines=dict(defname="defval") ) simif.compile_project(project) - args_file = join(self.output_path, "irun_compile_verilog_file_lib.args") + args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") check_output.assert_called_once_with( - [join("prefix", "irun"), "-f", args_file], env=simif.get_env() + [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() ) self.assertEqual( read_file(args_file).splitlines(), @@ -501,14 +506,14 @@ def test_compile_project_verilog_hdlvar( "-nowarn DLCPTH", "-nowarn DLCVAR", "-work work", - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-hdlvar "custom_hdlvar"', '-log "%s"' - % join(self.output_path, "irun_compile_verilog_file_lib.log"), + % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', "-define defname=defval", - '-nclibdirname ""', + '-nclibdirname "."', "-makelib lib", '"file.v"', "-endlib", @@ -522,7 +527,7 @@ def test_create_cds_lib(self, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_virtuoso.return_value = None IncisiveInterface(prefix="prefix", output_path=self.output_path) self.assertEqual( - read_file(join(self.output_path, "cds.lib")), + read_file(str(Path(self.output_path) / "cds.lib")), """\ ## cds.lib: Defines the locations of compiled libraries. softinclude cds_root_irun/tools/inca/files/cds.lib @@ -541,7 +546,7 @@ def test_create_cds_lib_virtuoso(self, find_cds_root_irun, find_cds_root_virtuos find_cds_root_virtuoso.return_value = "cds_root_virtuoso" IncisiveInterface(prefix="prefix", output_path=self.output_path) self.assertEqual( - read_file(join(self.output_path, "cds.lib")), + read_file(str(Path(self.output_path) / "cds.lib")), """\ ## cds.lib: Defines the locations of compiled libraries. softinclude cds_root_irun/tools/inca/files/cds.lib @@ -574,20 +579,26 @@ def test_simulate_vhdl( config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -607,10 +618,10 @@ def test_simulate_vhdl( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_elaborate.log"), + % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -632,10 +643,10 @@ def test_simulate_vhdl( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_simulate.log"), + % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -666,20 +677,26 @@ def test_simulate_verilog( config = make_config(verilog=True) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -699,10 +716,10 @@ def test_simulate_verilog( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_elaborate.log"), + % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -724,10 +741,10 @@ def test_simulate_verilog( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_simulate.log"), + % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -749,20 +766,26 @@ def test_simulate_extra_flags( sim_options={"incisive.irun_sim_flags": ["custom", "flags"]} ) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -789,20 +812,26 @@ def test_simulate_generics_and_parameters( verilog=True, generics={"genstr": "genval", "genint": 1, "genbool": True} ) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -827,20 +856,26 @@ def test_simulate_hdlvar( ) config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -863,14 +898,18 @@ def test_elaborate(self, run_command, find_cds_root_irun, find_cds_root_virtuoso "suite_output_path", "test_suite_name", config, elaborate_only=True ) ) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" ) run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ) ] @@ -890,10 +929,10 @@ def test_elaborate(self, run_command, find_cds_root_irun, find_cds_root_virtuoso "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_elaborate.log"), + % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", "-access +r", '-input "@run"', @@ -912,14 +951,18 @@ def test_elaborate_fail( simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) config = make_config() self.assertFalse(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" ) run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ) ] @@ -938,20 +981,26 @@ def test_simulate_fail( simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) config = make_config() self.assertFalse(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -980,20 +1029,26 @@ def test_simulate_gui( simif.compile_project(project) config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = join( - "suite_output_path", simif.name, "irun_elaborate.args" + elaborate_args_file = str( + Path("suite_output_path") / simif.name / "irun_elaborate.args" + ) + simulate_args_file = str( + Path("suite_output_path") / simif.name / "irun_simulate.args" ) - simulate_args_file = join("suite_output_path", simif.name, "irun_simulate.args") run_command.assert_has_calls( [ mock.call( - [join("prefix", "irun"), "-f", basename(elaborate_args_file)], - cwd=dirname(elaborate_args_file), + [ + str(Path("prefix") / "irun"), + "-f", + Path(elaborate_args_file).name, + ], + cwd=str(Path(elaborate_args_file).parent), env=simif.get_env(), ), mock.call( - [join("prefix", "irun"), "-f", basename(simulate_args_file)], - cwd=dirname(simulate_args_file), + [str(Path("prefix") / "irun"), "-f", Path(simulate_args_file).name], + cwd=str(Path(simulate_args_file).parent), env=simif.get_env(), ), ] @@ -1012,10 +1067,10 @@ def test_simulate_gui( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_elaborate.log"), + % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +rwc", @@ -1037,10 +1092,10 @@ def test_simulate_gui( "-ncerror EVBSTR", "-ncerror EVBNAT", "-work work", - '-nclibdirname "%s"' % join(self.output_path, "libraries"), - '-cdslib "%s"' % join(self.output_path, "cds.lib"), + '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), + '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-log "%s"' - % join("suite_output_path", simif.name, "irun_simulate.log"), + % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +rwc", @@ -1050,7 +1105,7 @@ def test_simulate_gui( ) def setUp(self): - self.output_path = join(dirname(__file__), "test_incisive_out") + self.output_path = str(Path(__file__).parent / "test_incisive_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() @@ -1058,7 +1113,7 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.output_path): + if Path(self.output_path).exists(): rmtree(self.output_path) diff --git a/tests/unit/test_modelsim_interface.py b/tests/unit/test_modelsim_interface.py index f01e3ef7f..b30c005ab 100644 --- a/tests/unit/test_modelsim_interface.py +++ b/tests/unit/test_modelsim_interface.py @@ -10,7 +10,7 @@ import unittest -from os.path import join, dirname, exists +from pathlib import Path import os from shutil import rmtree from unittest import mock @@ -39,13 +39,13 @@ def test_compile_project_vhdl_2008(self, process, check_output): "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") ) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vcom"), + str(Path(self.prefix_path) / "vcom"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-2008", "-work", "lib", @@ -66,13 +66,13 @@ def test_compile_project_vhdl_2002(self, process, check_output): "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") ) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vcom"), + str(Path(self.prefix_path) / "vcom"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-2002", "-work", "lib", @@ -93,13 +93,13 @@ def test_compile_project_vhdl_93(self, process, check_output): "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") ) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vcom"), + str(Path(self.prefix_path) / "vcom"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-93", "-work", "lib", @@ -119,13 +119,13 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): source_file = project.add_source_file("file.vhd", "lib", file_type="vhdl") source_file.set_compile_option("modelsim.vcom_flags", ["custom", "flags"]) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vcom"), + str(Path(self.prefix_path) / "vcom"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "custom", "flags", "-2008", @@ -146,13 +146,13 @@ def test_compile_project_verilog(self, process, check_output): write_file("file.v", "") project.add_source_file("file.v", "lib", file_type="verilog") simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vlog"), + str(Path(self.prefix_path) / "vlog"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-work", "lib", "file.v", @@ -172,13 +172,13 @@ def test_compile_project_system_verilog(self, process, check_output): write_file("file.sv", "") project.add_source_file("file.sv", "lib", file_type="systemverilog") simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vlog"), + str(Path(self.prefix_path) / "vlog"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-sv", "-work", "lib", @@ -200,13 +200,13 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): source_file = project.add_source_file("file.v", "lib", file_type="verilog") source_file.set_compile_option("modelsim.vlog_flags", ["custom", "flags"]) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vlog"), + str(Path(self.prefix_path) / "vlog"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "custom", "flags", "-work", @@ -230,13 +230,13 @@ def test_compile_project_verilog_include(self, process, check_output): "file.v", "lib", file_type="verilog", include_dirs=["include"] ) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) check_args = [ - join(self.prefix_path, "vlog"), + str(Path(self.prefix_path) / "vlog"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-work", "lib", "file.v", @@ -259,13 +259,13 @@ def test_compile_project_verilog_define(self, process, check_output): "file.v", "lib", file_type="verilog", defines={"defname": "defval"} ) simif.compile_project(project) - process_args = [join(self.prefix_path, "vlib"), "-unix", "lib_path"] + process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) process_args = [ - join(self.prefix_path, "vlog"), + str(Path(self.prefix_path) / "vlog"), "-quiet", "-modelsimini", - join(self.output_path, "modelsim.ini"), + str(Path(self.output_path) / "modelsim.ini"), "-work", "lib", "file.v", @@ -275,10 +275,15 @@ def test_compile_project_verilog_define(self, process, check_output): ] check_output.assert_called_once_with(process_args, env=simif.get_env()) + def _get_inis(self): + return ( + str(Path(self.output_path) / "modelsim.ini"), + str(Path(self.prefix_path) / ".." / "modelsim.ini"), + str(Path(self.test_path) / "my_modelsim.ini"), + ) + def test_copies_modelsim_ini_file_from_install(self): - modelsim_ini = join(self.output_path, "modelsim.ini") - installed_modelsim_ini = join(self.prefix_path, "..", "modelsim.ini") - user_modelsim_ini = join(self.test_path, "my_modelsim.ini") + (modelsim_ini, installed_modelsim_ini, user_modelsim_ini) = self._get_inis() with open(installed_modelsim_ini, "w") as fptr: fptr.write("installed") @@ -293,9 +298,7 @@ def test_copies_modelsim_ini_file_from_install(self): self.assertEqual(fptr.read(), "installed") def test_copies_modelsim_ini_file_from_user(self): - modelsim_ini = join(self.output_path, "modelsim.ini") - installed_modelsim_ini = join(self.prefix_path, "..", "modelsim.ini") - user_modelsim_ini = join(self.test_path, "my_modelsim.ini") + (modelsim_ini, installed_modelsim_ini, user_modelsim_ini) = self._get_inis() with open(installed_modelsim_ini, "w") as fptr: fptr.write("installed") @@ -312,9 +315,7 @@ def test_copies_modelsim_ini_file_from_user(self): self.assertEqual(fptr.read(), "user") def test_overwrites_modelsim_ini_file_from_install(self): - modelsim_ini = join(self.output_path, "modelsim.ini") - installed_modelsim_ini = join(self.prefix_path, "..", "modelsim.ini") - user_modelsim_ini = join(self.test_path, "my_modelsim.ini") + (modelsim_ini, installed_modelsim_ini, user_modelsim_ini) = self._get_inis() with open(modelsim_ini, "w") as fptr: fptr.write("existing") @@ -332,9 +333,7 @@ def test_overwrites_modelsim_ini_file_from_install(self): self.assertEqual(fptr.read(), "installed") def test_overwrites_modelsim_ini_file_from_user(self): - modelsim_ini = join(self.output_path, "modelsim.ini") - installed_modelsim_ini = join(self.prefix_path, "..", "modelsim.ini") - user_modelsim_ini = join(self.test_path, "my_modelsim.ini") + (modelsim_ini, installed_modelsim_ini, user_modelsim_ini) = self._get_inis() with open(modelsim_ini, "w") as fptr: fptr.write("existing") @@ -354,13 +353,14 @@ def test_overwrites_modelsim_ini_file_from_user(self): self.assertEqual(fptr.read(), "user") def setUp(self): - self.test_path = join(dirname(__file__), "test_modelsim_out") - self.output_path = join(self.test_path, "modelsim") - self.prefix_path = join(self.test_path, "prefix", "bin") + self.test_path = str(Path(__file__).parent / "test_modelsim_out") + + self.output_path = str(Path(self.test_path) / "modelsim") + self.prefix_path = str(Path(self.test_path) / "prefix" / "bin") renew_path(self.test_path) renew_path(self.output_path) renew_path(self.prefix_path) - installed_modelsim_ini = join(self.prefix_path, "..", "modelsim.ini") + installed_modelsim_ini = str(Path(self.prefix_path) / ".." / "modelsim.ini") write_file(installed_modelsim_ini, "[Library]") self.project = Project() self.cwd = os.getcwd() @@ -368,5 +368,5 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.test_path): + if Path(self.test_path).exists(): rmtree(self.test_path) diff --git a/tests/unit/test_ostools.py b/tests/unit/test_ostools.py index 6b6fb76da..834a4085a 100644 --- a/tests/unit/test_ostools.py +++ b/tests/unit/test_ostools.py @@ -10,8 +10,8 @@ from unittest import TestCase +from pathlib import Path from shutil import rmtree -from os.path import exists, dirname, join, abspath import sys from vunit.ostools import Process, renew_path @@ -22,11 +22,11 @@ class TestOSTools(TestCase): """ def setUp(self): - self.tmp_dir = join(dirname(__file__), "test_ostools_tmp") + self.tmp_dir = str(Path(__file__).parent / "test_ostools_tmp") renew_path(self.tmp_dir) def tearDown(self): - if exists(self.tmp_dir): + if Path(self.tmp_dir).exists(): rmtree(self.tmp_dir) def make_file(self, file_name, contents): @@ -34,7 +34,7 @@ def make_file(self, file_name, contents): Create a file in the temporary directory with contents Returns the absolute path to the file. """ - full_file_name = abspath(join(self.tmp_dir, file_name)) + full_file_name = str((Path(self.tmp_dir) / file_name).resolve()) with open(full_file_name, "w") as outfile: outfile.write(contents) return full_file_name @@ -101,7 +101,7 @@ def test_output_is_parallel(self): self.assertEqual(message, "message") def test_non_utf8_in_output(self): - python_script = join(dirname(__file__), "non_utf8_printer.py") + python_script = str(Path(__file__).parent / "non_utf8_printer.py") output = [] process = Process([sys.executable, python_script]) process.consume_output(output.append) diff --git a/tests/unit/test_project.py b/tests/unit/test_project.py index 7fcfc30c8..58fdf878a 100644 --- a/tests/unit/test_project.py +++ b/tests/unit/test_project.py @@ -12,9 +12,9 @@ import unittest -from shutil import rmtree -from os.path import join, exists, dirname +from pathlib import Path import os +from shutil import rmtree from time import sleep import itertools from unittest import mock @@ -30,7 +30,7 @@ class TestProject(unittest.TestCase): # pylint: disable=too-many-public-methods """ def setUp(self): - self.output_path = join(dirname(__file__), "test_project_out") + self.output_path = str(Path(__file__).parent / "test_project_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() @@ -38,7 +38,7 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.output_path): + if Path(self.output_path).exists(): rmtree(self.output_path) def test_parses_entity_architecture(self): @@ -196,7 +196,7 @@ def test_multiple_identical_file_names_with_different_path_in_same_library(self) self.project.add_library("lib", "lib_path") a_foo = self.add_source_file( "lib", - join("a", "foo.vhd"), + str(Path("a") / "foo.vhd"), """ entity a_foo is end entity; @@ -205,7 +205,7 @@ def test_multiple_identical_file_names_with_different_path_in_same_library(self) b_foo = self.add_source_file( "lib", - join("b", "foo.vhd"), + str(Path("b") / "foo.vhd"), """ entity b_foo is end entity; @@ -891,7 +891,7 @@ def test_updating_creates_hash_files(self): for source_file in files: self.update(source_file) - self.assertTrue(exists(self.hash_file_name_of(source_file))) + self.assertTrue(Path(self.hash_file_name_of(source_file)).exists()) def test_should_not_recompile_updated_files(self): file1, file2, file3 = self.create_dummy_three_file_project() @@ -1732,8 +1732,8 @@ def test_dependencies_on_separated_architecture(self): def test_dependencies_on_verilog_component(self): """ - Create a projected containing an verilog file separated. - Dependency should involve it. + Create a projected containing an verilog file separated. + Dependency should involve it. """ self.project = Project() self.project.add_library("lib", "work_path") diff --git a/tests/unit/test_rivierapro_interface.py b/tests/unit/test_rivierapro_interface.py index 2e8cd846d..03574e9e4 100644 --- a/tests/unit/test_rivierapro_interface.py +++ b/tests/unit/test_rivierapro_interface.py @@ -10,7 +10,7 @@ import unittest -from os.path import join, dirname, exists +from pathlib import Path import os from shutil import rmtree from unittest import mock @@ -27,7 +27,10 @@ class TestRivieraProInterface(unittest.TestCase): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_vhdl_2019(self, process, check_output): + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_vhdl_2019(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -37,22 +40,22 @@ def test_compile_project_vhdl_2019(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, - "-2018", + "-2019", "-work", "lib", "file.vhd", @@ -62,7 +65,10 @@ def test_compile_project_vhdl_2019(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_vhdl_2008(self, process, check_output): + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_vhdl_2008(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -72,18 +78,18 @@ def test_compile_project_vhdl_2008(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -97,7 +103,10 @@ def test_compile_project_vhdl_2008(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_vhdl_2002(self, process, check_output): + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_vhdl_2002(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -107,18 +116,18 @@ def test_compile_project_vhdl_2002(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -132,7 +141,10 @@ def test_compile_project_vhdl_2002(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_vhdl_93(self, process, check_output): + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_vhdl_93(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -142,18 +154,18 @@ def test_compile_project_vhdl_93(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -167,7 +179,12 @@ def test_compile_project_vhdl_93(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_vhdl_extra_flags(self, process, check_output): + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_vhdl_extra_flags( + self, _find_prefix, process, check_output + ): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -176,18 +193,18 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): source_file.set_compile_option("rivierapro.vcom_flags", ["custom", "flags"]) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vcom"), + str(Path("prefix") / "vcom"), "-quiet", "-j", self.output_path, @@ -203,8 +220,11 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_verilog(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_verilog(self, _find_prefix, process, check_output): + library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -212,18 +232,18 @@ def test_compile_project_verilog(self, process, check_output): project.add_source_file("file.v", "lib", file_type="verilog") simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -238,8 +258,11 @@ def test_compile_project_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_system_verilog(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_system_verilog(self, _find_prefix, process, check_output): + library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -247,18 +270,18 @@ def test_compile_project_system_verilog(self, process, check_output): project.add_source_file("file.sv", "lib", file_type="systemverilog") simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -274,8 +297,13 @@ def test_compile_project_system_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_verilog_extra_flags(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_verilog_extra_flags( + self, _find_prefix, process, check_output + ): + library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -284,18 +312,18 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): source_file.set_compile_option("rivierapro.vlog_flags", ["custom", "flags"]) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -312,8 +340,11 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_verilog_include(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_verilog_include(self, _find_prefix, process, check_output): + library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -323,16 +354,18 @@ def test_compile_project_verilog_include(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], cwd=self.output_path, env=None + [str(Path("prefix") / "vlib"), "lib", "lib_path"], + cwd=self.output_path, + env=None, ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -348,8 +381,11 @@ def test_compile_project_verilog_include(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - def test_compile_project_verilog_define(self, process, check_output): - library_cfg = join(self.output_path, "library.cfg") + @mock.patch( + "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" + ) + def test_compile_project_verilog_define(self, _find_prefix, process, check_output): + library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -359,18 +395,18 @@ def test_compile_project_verilog_define(self, process, check_output): ) simif.compile_project(project) process.assert_any_call( - [join("prefix", "vlib"), "lib", "lib_path"], + [str(Path("prefix") / "vlib"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) process.assert_called_with( - [join("prefix", "vmap"), "lib", "lib_path"], + [str(Path("prefix") / "vmap"), "lib", "lib_path"], cwd=self.output_path, env=simif.get_env(), ) check_output.assert_called_once_with( [ - join("prefix", "vlog"), + str(Path("prefix") / "vlog"), "-quiet", "-lc", library_cfg, @@ -385,7 +421,7 @@ def test_compile_project_verilog_define(self, process, check_output): ) def setUp(self): - self.output_path = join(dirname(__file__), "test_rivierapro_out") + self.output_path = str(Path(__file__).parent / "test_rivierapro_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() @@ -393,5 +429,5 @@ def setUp(self): def tearDown(self): os.chdir(self.cwd) - if exists(self.output_path): + if Path(self.output_path).exists(): rmtree(self.output_path) diff --git a/tests/unit/test_simulator_interface.py b/tests/unit/test_simulator_interface.py index 936f7a1b6..7e6e76b66 100644 --- a/tests/unit/test_simulator_interface.py +++ b/tests/unit/test_simulator_interface.py @@ -9,8 +9,8 @@ """ import unittest -from os.path import join, dirname, exists -import os +from pathlib import Path +from os import chdir, getcwd import subprocess from shutil import rmtree from unittest import mock @@ -248,15 +248,15 @@ def find_prefix_from_path(cls): environ.get.assert_called_once_with("VUNIT_SIMNAME_PATH", None) def setUp(self): - self.output_path = join(dirname(__file__), "test_simulator_interface__out") + self.output_path = str(Path(__file__).parent / "test_simulator_interface__out") renew_path(self.output_path) self.project = Project() - self.cwd = os.getcwd() - os.chdir(self.output_path) + self.cwd = getcwd() + chdir(self.output_path) def tearDown(self): - os.chdir(self.cwd) - if exists(self.output_path): + chdir(self.cwd) + if Path(self.output_path).exists(): rmtree(self.output_path) diff --git a/tests/unit/test_test_bench.py b/tests/unit/test_test_bench.py index 8ca79360e..a6cfa6112 100644 --- a/tests/unit/test_test_bench.py +++ b/tests/unit/test_test_bench.py @@ -12,8 +12,7 @@ import unittest -from os.path import join - +from pathlib import Path from unittest import mock from tests.common import with_tempdir, get_vhdl_test_bench from vunit.test.bench import ( @@ -40,7 +39,7 @@ class TestTestBench(unittest.TestCase): @with_tempdir def test_that_single_vhdl_test_is_created(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) test_bench = TestBench(design_unit) tests = self.create_tests(test_bench) self.assert_has_tests(tests, ["lib.tb_entity.all"]) @@ -49,14 +48,14 @@ def test_that_single_vhdl_test_is_created(self, tempdir): @with_tempdir def test_no_architecture_at_creation(tempdir): design_unit = Entity( - "tb_entity", file_name=join(tempdir, "file.vhd"), no_arch=True + "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True ) TestBench(design_unit) @with_tempdir def test_no_architecture_gives_runtime_error(self, tempdir): design_unit = Entity( - "tb_entity", file_name=join(tempdir, "file.vhd"), no_arch=True + "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True ) test_bench = TestBench(design_unit) try: @@ -68,14 +67,14 @@ def test_no_architecture_gives_runtime_error(self, tempdir): @with_tempdir def test_that_single_verilog_test_is_created(self, tempdir): - design_unit = Module("tb_module", file_name=join(tempdir, "file.v")) + design_unit = Module("tb_module", file_name=str(Path(tempdir) / "file.v")) test_bench = TestBench(design_unit) tests = self.create_tests(test_bench) self.assert_has_tests(tests, ["lib.tb_module.all"]) @with_tempdir def test_create_default_test(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) tests = self.create_tests(test_bench) @@ -83,9 +82,11 @@ def test_create_default_test(self, tempdir): @with_tempdir def test_multiple_architectures_are_not_allowed_for_test_bench(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] - design_unit.add_architecture("arch2", file_name=join(tempdir, "arch2.vhd")) + design_unit.add_architecture( + "arch2", file_name=str(Path(tempdir) / "arch2.vhd") + ) try: TestBench(design_unit) except RuntimeError as exc: @@ -101,7 +102,7 @@ def test_multiple_architectures_are_not_allowed_for_test_bench(self, tempdir): def test_creates_tests_vhdl(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ if run("Test 1") --if run("Test 2") @@ -135,7 +136,7 @@ def test_creates_tests_vhdl(self, tempdir): def test_creates_tests_verilog(self, tempdir): design_unit = Module( "tb_module", - file_name=join(tempdir, "file.v"), + file_name=str(Path(tempdir) / "file.v"), contents="""\ `TEST_CASE("Test 1") `TEST_CASE ("Test 2") @@ -168,7 +169,9 @@ def test_creates_tests_verilog(self, tempdir): @with_tempdir def test_keyerror_on_non_existent_test(self, tempdir): design_unit = Entity( - "tb_entity", file_name=join(tempdir, "file.vhd"), contents='if run("Test")' + "tb_entity", + file_name=str(Path(tempdir) / "file.vhd"), + contents='if run("Test")', ) design_unit.generic_names = ["runner_cfg", "name"] test_bench = TestBench(design_unit) @@ -177,14 +180,14 @@ def test_keyerror_on_non_existent_test(self, tempdir): @with_tempdir def test_creates_tests_when_adding_architecture_late(self, tempdir): design_unit = Entity( - "tb_entity", file_name=join(tempdir, "file.vhd"), no_arch=True + "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True ) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) design_unit.add_architecture( "arch", - file_name=join(tempdir, "arch.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ if run("Test_1") --if run("Test_2") @@ -196,11 +199,11 @@ def test_creates_tests_when_adding_architecture_late(self, tempdir): @with_tempdir def test_scan_tests_from_file(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) - file_name = join(tempdir, "file.vhd") + file_name = str(Path(tempdir) / "file.vhd") write_file( file_name, """\ @@ -212,16 +215,15 @@ def test_scan_tests_from_file(self, tempdir): tests = self.create_tests(test_bench) self.assert_has_tests(tests, ["lib.tb_entity.Test_1", "lib.tb_entity.Test_2"]) - @with_tempdir - def test_scan_tests_from_file_location_unix(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + def _test_scan_tests_from_file_location(self, tempdir, code): + fstr = str(Path(tempdir) / "file.vhd") + + design_unit = Entity("tb_entity", file_name=fstr) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) - file_name = join(tempdir, "file.vhd") - code = 'foo \n bar \n if run("Test_1")' - write_file(file_name, code) - test_bench.scan_tests_from_file(file_name) + write_file(fstr, code) + test_bench.scan_tests_from_file(fstr) tests = self.create_tests(test_bench) test_info = tests[0].test_information location = test_info["lib.tb_entity.Test_1"].location @@ -229,24 +231,20 @@ def test_scan_tests_from_file_location_unix(self, tempdir): assert location.length == len("Test_1") @with_tempdir - def test_scan_tests_from_file_location_dos(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) - design_unit.generic_names = ["runner_cfg"] - test_bench = TestBench(design_unit) + def test_scan_tests_from_file_location_unix(self, tempdir): + self._test_scan_tests_from_file_location( + tempdir, 'foo \n bar \n if run("Test_1")' + ) - file_name = join(tempdir, "file.vhd") - code = 'foo \r\n bar \r\n if run("Test_1")' - write_file(file_name, code) - test_bench.scan_tests_from_file(file_name) - tests = self.create_tests(test_bench) - test_info = tests[0].test_information - location = test_info["lib.tb_entity.Test_1"].location - assert location.offset == code.find("Test_1") - assert location.length == len("Test_1") + @with_tempdir + def test_scan_tests_from_file_location_dos(self, tempdir): + self._test_scan_tests_from_file_location( + tempdir, 'foo \r\n bar \r\n if run("Test_1")' + ) @with_tempdir def test_scan_tests_from_missing_file(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) @@ -259,7 +257,7 @@ def test_scan_tests_from_missing_file(self, tempdir): @with_tempdir def test_does_not_add_all_suffix_with_named_configurations(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) @@ -274,7 +272,7 @@ def test_does_not_add_all_suffix_with_named_configurations(self, tempdir): def test_that_run_in_same_simulation_attribute_works(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ -- vunit: run_all_in_same_sim if run("Test_1") @@ -291,7 +289,7 @@ def test_that_run_in_same_simulation_attribute_works(self, tempdir): @with_tempdir def test_add_config(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg", "value", "global_value"] test_bench = TestBench(design_unit) @@ -331,7 +329,7 @@ def test_add_config(self, tempdir): def test_test_case_add_config(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents=""" if run("test 1") if run("test 2") @@ -389,7 +387,7 @@ def test_runtime_error_on_configuration_of_individual_test_with_same_sim( ): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ -- vunit: run_all_in_same_sim if run("Test 1") @@ -407,7 +405,7 @@ def test_runtime_error_on_configuration_of_individual_test_with_same_sim( def test_run_all_in_same_sim_can_be_configured(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ -- vunit: run_all_in_same_sim if run("Test 1") @@ -436,7 +434,7 @@ def test_run_all_in_same_sim_can_be_configured(self, tempdir): def test_global_user_attributes_not_supported_yet(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ -- vunit: .attr0 if run("Test 1") @@ -451,7 +449,7 @@ def test_global_user_attributes_not_supported_yet(self, tempdir): self.assertEqual( str(exc), "File global attributes are not yet supported: .attr0 in %s line 1" - % join(tempdir, "file.vhd"), + % str(Path(tempdir) / "file.vhd"), ) else: assert False, "RuntimeError not raised" @@ -460,7 +458,7 @@ def test_global_user_attributes_not_supported_yet(self, tempdir): def test_error_on_global_attributes_on_tests(self, tempdir): design_unit = Entity( "tb_entity", - file_name=join(tempdir, "file.vhd"), + file_name=str(Path(tempdir) / "file.vhd"), contents="""\ if run("Test 1") -- vunit: run_all_in_same_sim @@ -475,14 +473,14 @@ def test_error_on_global_attributes_on_tests(self, tempdir): self.assertEqual( str(exc), "Attribute run_all_in_same_sim is global and cannot be associated with test Test 1: %s line 2" - % join(tempdir, "file.vhd"), + % str(Path(tempdir) / "file.vhd"), ) else: assert False, "RuntimeError not raised" @with_tempdir def test_test_information(self, tempdir): - file_name = join(tempdir, "file.vhd") + file_name = str(Path(tempdir) / "file.vhd") for same_sim in [True, False]: contents = get_vhdl_test_bench( @@ -521,7 +519,7 @@ def test_test_information(self, tempdir): @with_tempdir def test_fail_on_unknown_sim_option(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) self.assertRaises(ValueError, test_bench.set_sim_option, "unknown", "value") diff --git a/tests/unit/test_test_bench_list.py b/tests/unit/test_test_bench_list.py index d926ab36e..7cb920eae 100644 --- a/tests/unit/test_test_bench_list.py +++ b/tests/unit/test_test_bench_list.py @@ -11,7 +11,7 @@ """ import unittest -from os.path import join +from pathlib import Path from unittest import mock from tests.unit.test_test_bench import Entity, Module from tests.common import with_tempdir @@ -29,19 +29,21 @@ def test_get_test_benches_in_empty_library(self): @with_tempdir def test_tb_filter_requires_runner_cfg(self, tempdir): - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + fname = str(Path(tempdir) / "file.vhd") + + design_unit = Entity("tb_entity", file_name=fname) design_unit.generic_names = ["runner_cfg"] self.assertTrue(tb_filter(design_unit)) - design_unit = Entity("tb_entity", file_name=join(tempdir, "file.vhd")) + design_unit = Entity("tb_entity", file_name=fname) design_unit.generic_names = [] self.assertFalse(tb_filter(design_unit)) - design_unit = Module("tb_module", file_name=join(tempdir, "file.vhd")) + design_unit = Module("tb_module", file_name=fname) design_unit.generic_names = ["runner_cfg"] self.assertTrue(tb_filter(design_unit)) - design_unit = Module("tb_module", file_name=join(tempdir, "file.vhd")) + design_unit = Module("tb_module", file_name=fname) design_unit.generic_names = [] self.assertFalse(tb_filter(design_unit)) @@ -51,7 +53,9 @@ def test_tb_filter_match_prefix_and_suffix_only(self, tempdir): Issue #263 """ with mock.patch("vunit.test.bench_list.LOGGER", autospec=True) as logger: - design_unit = Entity("mul_tbl_scale", file_name=join(tempdir, "file.vhd")) + design_unit = Entity( + "mul_tbl_scale", file_name=str(Path(tempdir) / "file.vhd") + ) self.assertFalse(tb_filter(design_unit)) self.assertFalse(logger.warning.called) @@ -59,7 +63,9 @@ def test_tb_filter_match_prefix_and_suffix_only(self, tempdir): def test_tb_filter_warning_on_missing_runner_cfg_when_matching_tb_pattern( self, tempdir ): - design_unit = Module("tb_module_not_ok", file_name=join(tempdir, "file.vhd")) + design_unit = Module( + "tb_module_not_ok", file_name=str(Path(tempdir) / "file.vhd") + ) design_unit.generic_names = [] with mock.patch("vunit.test.bench_list.LOGGER", autospec=True) as logger: @@ -82,7 +88,7 @@ def test_tb_filter_warning_on_missing_runner_cfg_when_matching_tb_pattern( @with_tempdir def test_tb_filter_warning_on_runner_cfg_but_not_matching_tb_pattern(self, tempdir): design_unit = Entity( - "entity_ok_but_warning", file_name=join(tempdir, "file.vhd") + "entity_ok_but_warning", file_name=str(Path(tempdir) / "file.vhd") ) design_unit.generic_names = ["runner_cfg"] diff --git a/tests/unit/test_test_report.py b/tests/unit/test_test_report.py index 71225e6e6..1e51f2f9b 100644 --- a/tests/unit/test_test_report.py +++ b/tests/unit/test_test_report.py @@ -10,7 +10,7 @@ from unittest import TestCase import os -from os.path import basename, dirname, join +from pathlib import Path from xml.etree import ElementTree from vunit.test.report import TestReport, PASSED, SKIPPED, FAILED from vunit.ui.common import TEST_OUTPUT_PATH @@ -26,7 +26,7 @@ def setUp(self): self.printer = StubPrinter() self.output_file_contents = 'Output file contents\n&13!--"<\\xml>' - self.output_file_name = join(dirname(__file__), "test_report_output.txt") + self.output_file_name = str(Path(__file__).parent / "test_report_output.txt") with open(self.output_file_name, "w") as fwrite: fwrite.write(self.output_file_contents) @@ -277,15 +277,15 @@ def test_junit_report_with_testcase_classname(self): ) def test_dict_report_with_all_passed_tests(self): - opath = dirname(dirname(self.output_file_name)) - test_path = join(opath, TEST_OUTPUT_PATH, "unit") - output_file_name = join(test_path, basename(self.output_file_name)) + opath = Path(self.output_file_name).parent.parent + test_path = opath / TEST_OUTPUT_PATH / "unit" + output_file_name = test_path / Path(self.output_file_name).name results = Results( opath, None, self._report_with_all_passed_tests(output_file_name) ) report = results.get_report() for _, test in report.tests.items(): - self.assertEqual(basename(test.path), test.relpath) + self.assertEqual(test.path.name, test.relpath) test0 = report.tests["passed_test0"] test1 = report.tests["passed_test1"] self.assertEqual( diff --git a/tests/unit/test_test_runner.py b/tests/unit/test_test_runner.py index 3f498d818..0b4c84a28 100644 --- a/tests/unit/test_test_runner.py +++ b/tests/unit/test_test_runner.py @@ -8,7 +8,7 @@ Test the test runner """ -from os.path import join, abspath +from pathlib import Path import unittest from unittest import mock from tests.common import with_tempdir @@ -143,8 +143,9 @@ def test_create_output_path_on_linux(self): test_output = create_output_path(output_path, test_name) self.assertEqual( test_output, - join( - abspath(output_path), test_name + "_" + hash_string(test_name) + str( + Path(output_path).resolve() + / (test_name + "_" + hash_string(test_name)) ), ) @@ -153,8 +154,9 @@ def test_create_output_path_on_linux(self): test_output = create_output_path(output_path, test_name) self.assertEqual( test_output, - join( - abspath(output_path), test_name + "_" + hash_string(test_name) + str( + Path(output_path).resolve() + / (test_name + "_" + hash_string(test_name)) ), ) @@ -164,8 +166,9 @@ def test_create_output_path_on_linux(self): test_output = create_output_path(output_path, test_name) self.assertEqual( test_output, - join( - abspath(output_path), safe_name + "_" + hash_string(test_name) + str( + Path(output_path).resolve() + / (safe_name + "_" + hash_string(test_name)) ), ) @@ -185,8 +188,9 @@ def test_create_output_path_on_windows(self): test_output = create_output_path(output_path, test_name) self.assertEqual( test_output, - join( - abspath(output_path), test_name + "_" + hash_string(test_name) + str( + Path(output_path).resolve() + / (test_name + "_" + hash_string(test_name)) ), ) @@ -195,7 +199,8 @@ def test_create_output_path_on_windows(self): test_name = "_" * 400 test_output = create_output_path(output_path, test_name) self.assertEqual( - test_output, join(abspath(output_path), hash_string(test_name)) + test_output, + str(Path(output_path).resolve() / hash_string(test_name)), ) @staticmethod diff --git a/tests/unit/test_test_suites.py b/tests/unit/test_test_suites.py index 40d4b386c..abee45e8e 100644 --- a/tests/unit/test_test_suites.py +++ b/tests/unit/test_test_suites.py @@ -8,7 +8,7 @@ Test the test suites """ -from os.path import join +from pathlib import Path from unittest import TestCase from tests.common import create_tempdir from vunit.test.suites import TestRun @@ -94,9 +94,9 @@ def _read_test_results(self, expected, contents): Helper method to test the read_test_results function """ with create_tempdir() as path: - file_name = join(path, "vunit_results") + file_name = Path(path) / "vunit_results" if contents is not None: - with open(file_name, "w") as fptr: + with file_name.open("w") as fptr: fptr.write(contents) run = TestRun( @@ -162,9 +162,9 @@ def _test_exit_code( Helper method to test the check_results function """ with create_tempdir() as path: - file_name = join(path, "vunit_results") + file_name = Path(path) / "vunit_results" if contents is not None: - with open(file_name, "w") as fptr: + with file_name.open("w") as fptr: fptr.write(contents) sim_if = SimulatorInterface diff --git a/tests/unit/test_ui.py b/tests/unit/test_ui.py index 1338a5868..f7644a934 100644 --- a/tests/unit/test_ui.py +++ b/tests/unit/test_ui.py @@ -13,8 +13,7 @@ import unittest from string import Template from pathlib import Path -import os -from os.path import join, dirname, basename, exists, abspath +from os import chdir, getcwd import json import re from re import MULTILINE @@ -35,17 +34,17 @@ class TestUi(unittest.TestCase): """ def setUp(self): - self.tmp_path = join(dirname(__file__), "test_ui_tmp") + self.tmp_path = str(Path(__file__).parent / "test_ui_tmp") renew_path(self.tmp_path) - self.cwd = os.getcwd() - os.chdir(self.tmp_path) + self.cwd = getcwd() + chdir(self.tmp_path) - self._output_path = join(self.tmp_path, "output") - self._preprocessed_path = join(self._output_path, "preprocessed") + self._output_path = str(Path(self.tmp_path) / "output") + self._preprocessed_path = str(Path(self._output_path) / "preprocessed") def tearDown(self): - os.chdir(self.cwd) - if exists(self.tmp_path): + chdir(self.cwd) + if Path(self.tmp_path).exists(): rmtree(self.tmp_path) def test_global_custom_preprocessors_should_be_applied_in_the_order_they_are_added( @@ -75,10 +74,11 @@ def test_global_custom_preprocessors_should_be_applied_in_the_order_they_are_add end architecture; """ ) - with open(join(self._preprocessed_path, "lib", basename(file_name))) as fread: + fname = Path(file_name).name + with (Path(self._preprocessed_path) / "lib" / fname).open() as fread: self.assertEqual( fread.read(), - pp_source.substitute(entity="ent0", file=basename(file_name)), + pp_source.substitute(entity="ent0", file=fname), ) def test_global_check_and_location_preprocessors_should_be_applied_after_global_custom_preprocessors( @@ -90,8 +90,8 @@ def test_global_check_and_location_preprocessors_should_be_applied_after_global_ ui.enable_check_preprocessing() ui.add_preprocessor(TestPreprocessor()) - file_name = self.create_entity_file() - ui.add_source_files(file_name, "lib") + entity_file = Path(self.create_entity_file()) + ui.add_source_files(str(entity_file), "lib") pp_source = Template( """\ @@ -113,10 +113,10 @@ def test_global_check_and_location_preprocessors_should_be_applied_after_global_ end architecture; """ ) - with open(join(self._preprocessed_path, "lib", basename(file_name))) as fread: + with (Path(self._preprocessed_path) / "lib" / entity_file.name).open() as fread: self.assertEqual( fread.read(), - pp_source.substitute(entity="ent0", file=basename(file_name)), + pp_source.substitute(entity="ent0", file=entity_file.name), ) def test_locally_specified_preprocessors_should_be_used_instead_of_any_globally_defined_preprocessors( @@ -151,9 +151,11 @@ def test_locally_specified_preprocessors_should_be_used_instead_of_any_globally_ """ ) self.assertFalse( - exists(join(self._preprocessed_path, "lib", basename(file_name1))) + (Path(self._preprocessed_path) / "lib" / Path(file_name1).name).exists() ) - with open(join(self._preprocessed_path, "lib", basename(file_name2))) as fread: + with ( + Path(self._preprocessed_path) / "lib" / Path(file_name2).name + ).open() as fread: expectd = pp_source.substitute( entity="ent2", report='log("Here I am!"); -- VUnitfier preprocessor: Report turned off, keeping original code.', @@ -183,16 +185,17 @@ def test_recovers_from_preprocessing_error(self, logger): end architecture; """ ) - file_name = join(self.tmp_path, "ent1.vhd") + file_name = Path(self.tmp_path) / "ent1.vhd" contents = source_with_error.substitute(entity="ent1") - self.create_file(file_name, contents) + self.create_file(str(file_name), contents) ui.add_source_file(file_name, "lib") logger.assert_called_once_with( - "Failed to preprocess %s", Path(file_name).resolve() + "Failed to preprocess %s", str(Path(file_name).resolve()) + ) + self.assertFalse( + (Path(self._preprocessed_path) / "lib" / file_name.name).exists() ) - pp_file = join(self._preprocessed_path, "lib", basename(file_name)) - self.assertFalse(exists(pp_file)) def test_supported_source_file_suffixes(self): """Test adding a supported filetype, of any case, is accepted.""" @@ -221,23 +224,26 @@ def test_unsupported_source_file_suffixes(self): def test_exception_on_adding_zero_files(self): ui = self._create_ui() lib = ui.add_library("lib") + dname = Path(__file__).parent self.assertRaisesRegex( ValueError, "Pattern.*missing1.vhd.*", lib.add_source_files, - join(dirname(__file__), "missing1.vhd"), + str(dname / "missing1.vhd"), ) self.assertRaisesRegex( ValueError, "File.*missing2.vhd.*", lib.add_source_file, - join(dirname(__file__), "missing2.vhd"), + str(dname / "missing2.vhd"), ) def test_no_exception_on_adding_zero_files_when_allowed(self): ui = self._create_ui() lib = ui.add_library("lib") - lib.add_source_files(join(dirname(__file__), "missing.vhd"), allow_empty=True) + lib.add_source_files( + str(Path(__file__).parent / "missing.vhd"), allow_empty=True + ) def test_get_test_benchs_and_test(self): ui = self._create_ui() @@ -475,7 +481,7 @@ def test_filtering_tests(self, tempdir): def setup(ui): " Setup the project " lib = ui.add_library("lib") - file_name = join(tempdir, "tb_filter.vhd") + file_name = str(Path(tempdir) / "tb_filter.vhd") create_vhdl_test_bench_file( "tb_filter", file_name, @@ -554,17 +560,18 @@ def check_stdout(ui, expected): @with_tempdir def test_export_json(self, tempdir): - json_file = join(tempdir, "export.json") + tdir = Path(tempdir) + json_file = str(tdir / "export.json") ui = self._create_ui("--export-json", json_file) lib1 = ui.add_library("lib1") lib2 = ui.add_library("lib2") - file_name1 = join(tempdir, "tb_foo.vhd") + file_name1 = str(tdir / "tb_foo.vhd") create_vhdl_test_bench_file("tb_foo", file_name1) lib1.add_source_file(file_name1) - file_name2 = join(tempdir, "tb_bar.vhd") + file_name2 = str(tdir / "tb_bar.vhd") create_vhdl_test_bench_file( "tb_bar", file_name2, @@ -595,7 +602,12 @@ def test_export_json(self, tempdir): # Check the contents of the files section self.assertEqual( set((item["library_name"], item["file_name"]) for item in data["files"]), - set([("lib1", abspath(file_name1)), ("lib2", abspath(file_name2))]), + set( + [ + ("lib1", str(Path(file_name1).resolve())), + ("lib2", str(Path(file_name2).resolve())), + ] + ), ) # Check the contents of the tests section @@ -752,7 +764,7 @@ def check(action): lib = ui.add_library("lib") action(ui, lib) add_source_file.assert_called_once_with( - Path("verilog.v").resolve(), + str(Path("verilog.v").resolve()), "lib", file_type="verilog", include_dirs=all_include_dirs, @@ -788,7 +800,7 @@ def check(action): lib = ui.add_library("lib") action(ui, lib) add_source_file.assert_called_once_with( - Path("verilog.v").resolve(), + str(Path("verilog.v").resolve()), "lib", file_type="verilog", include_dirs=all_include_dirs, @@ -825,7 +837,7 @@ def test_add_source_files_has_no_parse(self): lib.add_source_file(file_name, no_parse=no_parse) add_source_file.assert_called_once_with( - Path("verilog.v").resolve(), + str(Path("verilog.v").resolve()), "lib", file_type="verilog", include_dirs=all_include_dirs, diff --git a/tests/unit/test_verilog_parser.py b/tests/unit/test_verilog_parser.py index c963bb6aa..de368c9da 100644 --- a/tests/unit/test_verilog_parser.py +++ b/tests/unit/test_verilog_parser.py @@ -10,7 +10,7 @@ from unittest import TestCase, mock import os -from os.path import join, dirname, exists +from pathlib import Path import time import shutil from vunit.ostools import renew_path @@ -23,7 +23,7 @@ class TestVerilogParser(TestCase): # pylint: disable=too-many-public-methods """ def setUp(self): - self.output_path = join(dirname(__file__), "test_verilog_parser_out") + self.output_path = str(Path(__file__).parent / "test_verilog_parser_out") renew_path(self.output_path) self.cwd = os.getcwd() os.chdir(self.output_path) @@ -327,10 +327,10 @@ def test_cached_parsing_updated_by_includes(self): def test_cached_parsing_updated_by_higher_priority_file(self): cache = {} - include_paths = [self.output_path, join(self.output_path, "lower_prio")] + include_paths = [self.output_path, str(Path(self.output_path) / "lower_prio")] self.write_file( - join("lower_prio", "include.svh"), + str(Path("lower_prio") / "include.svh"), """ module mod_lower_prio; endmodule; @@ -381,11 +381,11 @@ def write_file(self, file_name, contents): """ Write file with contents into output path """ - full_name = join(self.output_path, file_name) - full_path = dirname(full_name) - if not exists(full_path): - os.makedirs(full_path) - with open(full_name, "w") as fptr: + full_name = Path(self.output_path) / file_name + full_path = full_name.parent + if not full_path.exists(): + os.makedirs(str(full_path)) + with full_name.open("w") as fptr: fptr.write(contents) def parse(self, code, include_paths=None, cache=None, defines=None): diff --git a/tests/unit/test_verilog_preprocessor.py b/tests/unit/test_verilog_preprocessor.py index 81ee6cad4..0ba3caa16 100644 --- a/tests/unit/test_verilog_preprocessor.py +++ b/tests/unit/test_verilog_preprocessor.py @@ -12,7 +12,7 @@ Test of the Verilog preprocessor """ -from os.path import join, dirname, exists +from pathlib import Path import os from unittest import TestCase, mock import shutil @@ -28,7 +28,7 @@ class TestVerilogPreprocessor(TestCase): """ def setUp(self): - self.output_path = join(dirname(__file__), "test_verilog_preprocessor_out") + self.output_path = str(Path(__file__).parent / "test_verilog_preprocessor_out") renew_path(self.output_path) self.cwd = os.getcwd() os.chdir(self.output_path) @@ -150,7 +150,7 @@ def test_preprocess_include_directive(self): '`include "include.svh"', include_paths=[self.output_path] ) result.assert_has_tokens("hello hey") - result.assert_included_files([join(self.output_path, "include.svh")]) + result.assert_included_files([str(Path(self.output_path) / "include.svh")]) def test_detects_circular_includes(self): self.write_file("include1.svh", '`include "include2.svh"') @@ -267,7 +267,7 @@ def test_preprocess_include_directive_from_define(self): include_paths=[self.output_path], ) result.assert_has_tokens("hello hey") - result.assert_included_files([join(self.output_path, "include.svh")]) + result.assert_included_files([str(Path(self.output_path) / "include.svh")]) def test_preprocess_include_directive_from_define_with_args(self): self.write_file("include.svh", "hello hey") @@ -278,7 +278,7 @@ def test_preprocess_include_directive_from_define_with_args(self): include_paths=[self.output_path], ) result.assert_has_tokens("hello hey") - result.assert_included_files([join(self.output_path, "include.svh")]) + result.assert_included_files([str(Path(self.output_path) / "include.svh")]) def test_preprocess_macros_are_recursively_expanded(self): result = self.preprocess( @@ -674,7 +674,7 @@ def test_preprocess_error_in_include_file(self): '\n\n`include "include.svh"', include_paths=[self.output_path] ) result.assert_has_tokens("\n\n") - result.assert_included_files([join(self.output_path, "include.svh")]) + result.assert_included_files([str(Path(self.output_path) / "include.svh")]) result.logger.warning.assert_called_once_with( "Verilog `include bad argument\n%s", "from fn.v line 3:\n" @@ -863,11 +863,11 @@ def write_file(self, file_name, contents): """ Write file with contents into output path """ - full_name = join(self.output_path, file_name) - full_path = dirname(full_name) - if not exists(full_path): - os.makedirs(full_path) - with open(full_name, "w") as fptr: + full_name = Path(self.output_path) / file_name + full_path = full_name.parent + if not full_path.exists(): + os.makedirs(str(full_path)) + with full_name.open("w") as fptr: fptr.write(contents) diff --git a/tools/build_docs.py b/tools/build_docs.py index d0c5c87d2..d56d565ff 100644 --- a/tools/build_docs.py +++ b/tools/build_docs.py @@ -9,11 +9,15 @@ """ from subprocess import check_call -from os.path import join, dirname +from pathlib import Path import sys from sys import argv +from shutil import copyfile from create_release_notes import create_release_notes -from docs_utils import examples +from docs_utils import examples, get_theme + + +DROOT = Path(__file__).parent.parent / 'docs' def main(): @@ -22,6 +26,11 @@ def main(): """ create_release_notes() examples() + copyfile(str(DROOT / '..' / 'LICENSE.rst'), str(DROOT / 'license.rst')) + get_theme( + DROOT, + "https://codeload.github.com/buildthedocs/sphinx.theme/tar.gz/v0" + ) check_call( [ sys.executable, @@ -30,7 +39,7 @@ def main(): ] + ([] if len(argv) < 2 else argv[2:]) + [ "-TEWanb", "html", - join(dirname(__file__), "..", "docs"), + Path(__file__).parent.parent / "docs", argv[1], ] ) diff --git a/tools/create_release_notes.py b/tools/create_release_notes.py index 6404d0899..ed796ed1b 100644 --- a/tools/create_release_notes.py +++ b/tools/create_release_notes.py @@ -8,21 +8,22 @@ Create monolithic release notes file from several input files """ -from os.path import join, dirname, basename, splitext, relpath +from pathlib import Path +from os.path import relpath from glob import glob from subprocess import check_output, CalledProcessError from shutil import which import datetime -def get_releases(source_path): +def get_releases(source_path: Path): """ Get all releases defined by release note files """ - release_notes = join(source_path, "release_notes") + release_notes = source_path / "release_notes" releases = [] for idx, file_name in enumerate( - sorted(glob(join(release_notes, "*.rst")), reverse=True) + sorted(glob(str(release_notes / "*.rst")), reverse=True) ): releases.append(Release(file_name, is_latest=idx == 0)) return releases @@ -32,15 +33,12 @@ def create_release_notes(): """ Create monolithic release notes file from several input files """ - source_path = join(dirname(__file__), "..", "docs") + source_path = Path(__file__).parent.parent / "docs" releases = get_releases(source_path) latest_release = releases[0] - def banner(fptr): - fptr.write("\n" + ("-" * 80) + "\n\n") - - with open(join(source_path, "release_notes.rst"), "w") as fptr: + with (source_path / "release_notes.rst").open("w") as fptr: fptr.write( """ .. _release_notes: @@ -48,7 +46,7 @@ def banner(fptr): Release notes ============= -For installation instructions read :ref:`this `. +.. NOTE:: For installation instructions read :ref:`this `. `Commits since last release `__ @@ -56,7 +54,7 @@ def banner(fptr): % latest_release.tag ) - banner(fptr) + fptr.write("\n\n") for idx, release in enumerate(releases): is_last = idx == len(releases) - 1 @@ -74,19 +72,20 @@ def banner(fptr): fptr.write(title + "\n") fptr.write("-" * len(title) + "\n\n") - fptr.write(".. include:: %s\n" % relpath(release.file_name, source_path)) - fptr.write( - "\n`Download from PyPI `__\n" + "\n`Download from PyPI `__" % release.name ) if not is_last: fptr.write( - "\n`Commits since previous release `__\n" + " | `Commits since previous release `__" % (releases[idx + 1].tag, release.tag) ) - banner(fptr) + + fptr.write("\n\n") + + fptr.write(".. include:: %s\n" % relpath(release.file_name, source_path)) class Release(object): @@ -96,7 +95,7 @@ class Release(object): def __init__(self, file_name, is_latest): self.file_name = file_name - self.name = splitext(basename(file_name))[0] + self.name = str(Path(file_name).with_suffix("").name) self.tag = "v" + self.name self.is_latest = is_latest @@ -119,10 +118,9 @@ def _get_date(commit): """ Get date """ - date_str = ( - check_output([which("git"), "log", "-1", "--format=%ci", commit]) - .decode() - .strip() - ) + git = which("git") + if git is None: + raise BaseException("'git' is required!") + date_str = check_output([git, "log", "-1", "--format=%ci", commit]).decode().strip() date_str = " ".join(date_str.split(" ")[0:2]) return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S") diff --git a/tools/docs_utils.py b/tools/docs_utils.py index 621719d42..ff2db2c20 100644 --- a/tools/docs_utils.py +++ b/tools/docs_utils.py @@ -10,25 +10,27 @@ import sys import inspect -from os.path import basename, dirname, isdir, isfile, join + from os import listdir, remove +from pathlib import Path +from subprocess import check_call -ROOT = join(dirname(__file__), "..", "docs") +ROOT = Path(__file__).parent.parent / "docs" def examples(): """ Traverses the examples directory and generates examples.rst with the docstrings """ - eg_path = join(ROOT, "..", "examples") - egs_fptr = open(join(ROOT, "examples.rst"), "w+") + eg_path = ROOT.parent / "examples" + egs_fptr = (ROOT / "examples.rst").open("w+") egs_fptr.write("\n".join([".. _examples:\n", "Examples", "========", "\n"])) for language, subdir in {"VHDL": "vhdl", "SystemVerilog": "verilog"}.items(): egs_fptr.write("\n".join([language, "~~~~~~~~~~~~~~~~~~~~~~~", "\n"])) - for item in listdir(join(eg_path, subdir)): - loc = join(eg_path, subdir, item) - if isdir(loc): + for item in listdir(str(eg_path / subdir)): + loc = eg_path / subdir / item + if loc.is_dir(): _data = _get_eg_doc( loc, "https://github.com/VUnit/vunit/tree/master/examples/%s/%s" @@ -38,40 +40,57 @@ def examples(): egs_fptr.write(_data) -def _get_eg_doc(location, ref): +def _get_eg_doc(location: Path, ref): """ Reads the docstring from a run.py file and rewrites the title to make it a ref """ - if not isfile(join(location, "run.py")): + nstr = str(location.name) + + if not (location / "run.py").is_file(): print( "WARNING: Example subdir '" - + basename(location) + + nstr + "' does not contain a 'run.py' file. Skipping..." ) return None - print("Generating '_main.py' from 'run.py' in '" + basename(location) + "'...") - with open(join(location, "run.py"), "r") as ifile: - with open(join(location, "_main.py"), "w") as ofile: + print("Generating '_main.py' from 'run.py' in '" + nstr + "'...") + with (location / "run.py").open("r") as ifile: + with (location / "_main.py").open("w") as ofile: ofile.writelines(["def _main():\n"]) ofile.writelines(["".join([" ", x]) for x in ifile]) - print("Extracting docs from '" + basename(location) + "'...") - sys.path.append(location) + print("Extracting docs from '" + nstr + "'...") + sys.path.append(str(location)) from _main import _main # pylint: disable=import-error,import-outside-toplevel eg_doc = inspect.getdoc(_main) del sys.modules["_main"] - sys.path.remove(location) - remove(join(location, "_main.py")) + sys.path.remove(str(location)) + remove(str(location / "_main.py")) if not eg_doc: print( "WARNING: 'run.py' file in example subdir '" - + basename(location) + + nstr + "' does not contain a docstring. Skipping..." ) return "" title = "`%s <%s/>`_" % (eg_doc.split("---", 1)[0][0:-1], ref) return "\n".join([title, "-" * len(title), eg_doc.split("---\n", 1)[1], "\n"]) + + +def get_theme(path: Path, url: str): + """ + Check if the theme is available locally, retrieve it with curl and tar otherwise + """ + tpath = path / "_theme" + if not tpath.is_dir() or not (tpath / "theme.conf").is_file(): + if not tpath.is_dir(): + tpath.mkdir() + zpath = path / "theme.tgz" + if not zpath.is_file(): + check_call(["curl", "-fsSL", url, "-o", str(zpath)]) + tar_cmd = ["tar", "--strip-components=1", "-C", str(tpath), "-xvzf", str(zpath)] + check_call(tar_cmd) diff --git a/tools/release.py b/tools/release.py old mode 100644 new mode 100755 index cdf4d389a..c3cbc8f48 --- a/tools/release.py +++ b/tools/release.py @@ -15,7 +15,7 @@ import json from urllib.request import urlopen # pylint: disable=no-name-in-module, import-error import sys -from os.path import dirname, join, exists +from pathlib import Path import subprocess from shutil import which @@ -67,8 +67,8 @@ def make_release_commit(version): """ Add release notes and make the release commit """ - run(["git", "add", release_note_file_name(version)]) - run(["git", "add", ABOUT_PY]) + run(["git", "add", str(release_note_file_name(version))]) + run(["git", "add", str(ABOUT_PY)]) run(["git", "commit", "-m", "Release %s" % version]) run(["git", "tag", "v%s" % version, "-a", "-m", "release %s" % version]) @@ -77,7 +77,7 @@ def make_next_pre_release_commit(version): """ Add release notes and make the release commit """ - run(["git", "add", ABOUT_PY]) + run(["git", "add", str(ABOUT_PY)]) run(["git", "commit", "-m", "Start of next release candidate %s" % version]) @@ -87,18 +87,18 @@ def validate_new_release(version, pre_tag): """ release_note = release_note_file_name(version) - if not exists(release_note): + if not release_note.exists(): print( "Not releasing version %s since release note %s does not exist" - % (version, release_note) + % (version, str(release_note)) ) sys.exit(1) - with open(release_note, "r") as fptr: + with release_note.open("r") as fptr: if not fptr.read(): print( "Not releasing version %s since release note %s is empty" - % (version, release_note) + % (version, str(release_note)) ) sys.exit(1) @@ -135,7 +135,7 @@ def set_version(version): Update vunit/about.py with correct version """ - with open(ABOUT_PY, "r") as fptr: + with ABOUT_PY.open("r") as fptr: content = fptr.read() print("Set local version to %s" % version) @@ -143,14 +143,14 @@ def set_version(version): 'VERSION = "%s"' % get_local_version(), 'VERSION = "%s"' % version ) - with open(ABOUT_PY, "w") as fptr: + with ABOUT_PY.open("w") as fptr: fptr.write(content) assert get_local_version() == version -def release_note_file_name(version): - return join(REPO_ROOT, "docs", "release_notes", version + ".rst") +def release_note_file_name(version) -> Path: + return REPO_ROOT / "docs" / "release_notes" / (version + ".rst") def get_local_version(): @@ -160,7 +160,7 @@ def get_local_version(): """ version = ( subprocess.check_output( - [sys.executable, join(REPO_ROOT, "setup.py"), "--version"] + [sys.executable, str(REPO_ROOT / "setup.py"), "--version"] ) .decode() .strip() @@ -180,8 +180,8 @@ def run(cmd): subprocess.check_call(cmd) -REPO_ROOT = join(dirname(__file__), "..") -ABOUT_PY = join(REPO_ROOT, "vunit", "about.py") +REPO_ROOT = Path(__file__).parent.parent +ABOUT_PY = REPO_ROOT / "vunit" / "about.py" if __name__ == "__main__": diff --git a/vunit/__init__.py b/vunit/__init__.py index af2cedcf1..77d52478a 100644 --- a/vunit/__init__.py +++ b/vunit/__init__.py @@ -8,15 +8,14 @@ Public VUnit interface """ -from os.path import dirname, join, abspath +from pathlib import Path import vunit.version_check from vunit.ui import VUnit from vunit.vunit_cli import VUnitCLI from vunit.about import version, doc -from vunit.json4vhdl import read_json, encode_json # Repository root -ROOT = abspath(join(dirname(__file__), "..")) +ROOT = str(Path(__file__).parent.parent.resolve()) __version__ = version() __doc__ = doc() # pylint: disable=redefined-builtin diff --git a/vunit/about.py b/vunit/about.py index 2748dae50..f8eef38c2 100644 --- a/vunit/about.py +++ b/vunit/about.py @@ -13,21 +13,21 @@ def license_text(): """ Returns licence text """ - return """VUnit ------ + return """**VUnit**, except for the projects below, is released under the terms of +`Mozilla Public License, v. 2.0`_. |copy| 2014-2020 Lars Asplund, lars.anders.asplund@gmail.com. -VUnit except for OSVVM (see below) is released under the terms of -Mozilla Public License, v. 2.0. +The following libraries are `redistributed`_ with VUnit for your convenience: -Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com +* **OSVVM** (``vunit/vhdl/osvvm``): these files are licensed under the terms of `Apache License, v 2.0`_, |copy| 2010 - 2020 by `SynthWorks Design Inc`_. All rights reserved. -OSVVM ------ +* **JSON-for-VHDL** (``vunit/vhdl/JSON-for-VHDL``): these files are licensed under the terms of `Apache License, v 2.0`_, |copy| 2015 - 2020 Patrick Lehmann. -OSVVM is redistributed as a submodule to VUnit for your convenience. OSVVM and derivative work -located under examples/vhdl/osvvm_integration/src are licensed under the terms of Artistic License 2.0. - -Copyright (c) 2006-2016, SynthWorks Design Inc http://www.synthworks.com +.. |copy| unicode:: U+000A9 .. COPYRIGHT SIGN +.. _redistributed: https://github.com/VUnit/vunit/blob/master/.gitmodules +.. _Mozilla Public License, v. 2.0: http://mozilla.org/MPL/2.0/ +.. _ARTISTIC License: http://www.perlfoundation.org/artistic_license_2_0 +.. _Apache License, v 2.0: http://www.apache.org/licenses/LICENSE-2.0 +.. _SynthWorks Design Inc: http://www.synthworks.com """ @@ -36,26 +36,18 @@ def doc(): Returns short introduction to VUnit """ return ( - r"""What is VUnit? -============== - -VUnit is an open source unit testing framework for VHDL/SystemVerilog + r"""VUnit is an open source unit testing framework for VHDL/SystemVerilog released under the terms of Mozilla Public License, v. 2.0. It features the functionality needed to realize continuous and automated testing of your HDL code. VUnit doesn't replace but rather complements traditional testing methodologies by supporting a "test early and -often" approach through automation. - -**Read more on our** `Website `__ +often" approach through automation. **Read more on our** +`Website `__ -Contributing -============ Contributing in the form of code, feedback, ideas or bug reports are welcome. Read our `contribution guide `__ to get started. -License -======= """ + license_text() ) @@ -68,4 +60,4 @@ def version(): return VERSION -VERSION = "4.3.1rc0" +VERSION = "4.4.1rc0" diff --git a/vunit/builtins.py b/vunit/builtins.py index eb1eadc42..d0fc2b579 100644 --- a/vunit/builtins.py +++ b/vunit/builtins.py @@ -12,7 +12,7 @@ from glob import glob from warnings import warn from vunit.vhdl_standard import VHDL, VHDLStandard -from vunit.sim_if.common import simulator_check +from vunit.ui.common import get_checked_file_names_from_globs VHDL_PATH = (Path(__file__).parent / "vhdl").resolve() VERILOG_PATH = (Path(__file__).parent / "verilog").resolve() @@ -43,7 +43,7 @@ def add(name, deps=tuple()): def add(self, name, args=None): self._builtins_adder.add(name, args) - def _add_files(self, pattern): + def _add_files(self, pattern=None, allow_empty=True): """ Add files with naming convention to indicate which standard is supported """ @@ -52,7 +52,7 @@ def _add_files(self, pattern): and self._vhdl_standard.supports_context ) - for file_name in glob(str(pattern)): + for file_name in get_checked_file_names_from_globs(pattern, allow_empty): base_file_name = Path(file_name).name standards = set() @@ -75,58 +75,30 @@ def _add_data_types(self, external=None): """ Add data types packages (sources corresponding to VHPIDIRECT arrays, or their placeholders) - :param external: struct to select whether to enable external models for 'string' and/or 'integer' vectors. - {'string': , 'integer': }. Allowed values are: None, False/True or - ['path/to/custom/file']. + :param external: struct to provide bridges for the external VHDL API. + { + 'string': ['path/to/custom/file'], + 'integer': ['path/to/custom/file'] + }. """ self._add_files(VHDL_PATH / "data_types" / "src" / "*.vhd") - use_ext = {"string": False, "integer": False} - files = {"string": None, "integer": None} - - if external: - for ind, val in external.items(): - if isinstance(val, bool): - use_ext[ind] = val - else: - use_ext[ind] = True - files[ind] = val - - for _, val in use_ext.items(): - if val and simulator_check(lambda simclass: not simclass.supports_vhpi()): - raise RuntimeError( - "the selected simulator does not support VHPI; must use non-VHPI packages..." + for key in ["string", "integer_vector"]: + self._add_files( + pattern=str( + VHDL_PATH + / "data_types" + / "src" + / "api" + / ("external_%s_pkg.vhd" % key) ) - - ext_path = VHDL_PATH / "data_types" / "src" / "external" - - def default_files(cond, type_str): - """ - Return name of VHDL file with default VHPIDIRECT foreign declarations. - """ - return [ - str( - ext_path - / ( - "external_" - + type_str - + "-" - + ("" if cond else "no") - + "vhpi.vhd" - ) - ), - str(ext_path / ("external_" + type_str + "-body.vhd")), - ] - - if not files["string"]: - files["string"] = default_files(use_ext["string"], "string") - - if not files["integer"]: - files["integer"] = default_files(use_ext["integer"], "integer_vector") - - for _, val in files.items(): - for name in val: - self._add_files(name) + if external is None + or key not in external + or not external[key] + or external[key] is True + else external[key], + allow_empty=False, + ) def _add_array_util(self): """ @@ -237,7 +209,7 @@ def _add_json4vhdl(self): except KeyError: library = self._vunit_obj.add_library(library_name) - library.add_source_files(VHDL_PATH / "JSON-for-VHDL" / "vhdl" / "*.vhdl") + library.add_source_files(VHDL_PATH / "JSON-for-VHDL" / "src" / "*.vhdl") def add_verilog_builtins(self): """ @@ -249,9 +221,11 @@ def add_vhdl_builtins(self, external=None): """ Add vunit VHDL builtin libraries - :param external: struct to select whether to enable external models for 'string' and/or 'integer' vectors. - {'string': , 'integer': }. Allowed values are: None, False/True or - ['path/to/custom/file']. + :param external: struct to provide bridges for the external VHDL API. + { + 'string': ['path/to/custom/file'], + 'integer': ['path/to/custom/file'] + }. """ self._add_data_types(external=external) self._add_files(VHDL_PATH / "*.vhd") diff --git a/vunit/check_preprocessor.py b/vunit/check_preprocessor.py index dec146e66..108e2ce31 100644 --- a/vunit/check_preprocessor.py +++ b/vunit/check_preprocessor.py @@ -249,13 +249,10 @@ def __init__(self, left, operand, right): self._right = right def make_context_msg(self): - return ( - '"Expected %s %s %s. Left is " & to_string(%s) & ". Right is " & to_string(%s) & "."' - % ( - self._left.replace('"', '""'), - self._operand, - self._right.replace('"', '""'), - self._left, - self._right, - ) + return '"Expected %s %s %s. Left is " & to_string(%s) & ". Right is " & to_string(%s) & "."' % ( + self._left.replace('"', '""'), + self._operand, + self._right.replace('"', '""'), + self._left, + self._right, ) diff --git a/vunit/com/codec_vhdl_array_type.py b/vunit/com/codec_vhdl_array_type.py index 95714046f..5bdc13b40 100644 --- a/vunit/com/codec_vhdl_array_type.py +++ b/vunit/com/codec_vhdl_array_type.py @@ -39,15 +39,19 @@ def generate_codecs_and_support_functions(self): definitions += template.constrained_1d_array_definition.substitute( type=self.identifier ) - definitions += template.constrained_1d_array_to_string_definition.substitute( - type=self.identifier + definitions += ( + template.constrained_1d_array_to_string_definition.substitute( + type=self.identifier + ) ) else: definitions += template.constrained_2d_array_definition.substitute( type=self.identifier ) - definitions += template.constrained_2d_array_to_string_definition.substitute( - type=self.identifier + definitions += ( + template.constrained_2d_array_to_string_definition.substitute( + type=self.identifier + ) ) else: if has_one_dimension: @@ -57,8 +61,10 @@ def generate_codecs_and_support_functions(self): init_value=init_value, range_type=self.range1.range_type, ) - definitions += template.unconstrained_1d_array_to_string_definition.substitute( - array_type=self.identifier, range_type=self.range1.range_type + definitions += ( + template.unconstrained_1d_array_to_string_definition.substitute( + array_type=self.identifier, range_type=self.range1.range_type + ) ) else: definitions += template.unconstrained_2d_array_definition.substitute( @@ -66,10 +72,12 @@ def generate_codecs_and_support_functions(self): range_type1=self.range1.range_type, range_type2=self.range2.range_type, ) - definitions += template.unconstrained_2d_array_to_string_definition.substitute( - array_type=self.identifier, - range_type1=self.range1.range_type, - range_type2=self.range2.range_type, + definitions += ( + template.unconstrained_2d_array_to_string_definition.substitute( + array_type=self.identifier, + range_type1=self.range1.range_type, + range_type2=self.range2.range_type, + ) ) return declarations, definitions @@ -172,7 +180,7 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): function encode ( constant data : $type) return string is - constant length : positive := encode(data(data'left))'length; + constant length : positive := get_encoded_length(encode(data(data'left))); variable index : positive := 1; variable ret_val : string(1 to data'length * length); begin @@ -233,7 +241,7 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): function encode ( constant data : $type) return string is - constant length : positive := encode(data(data'left(1), data'left(2)))'length; + constant length : positive := get_encoded_length(encode(data(data'left(1), data'left(2)))); variable index : positive := 1; variable ret_val : string(1 to data'length(1) * data'length(2) * length); begin @@ -305,11 +313,11 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): if data'length = 0 then return 0; else - return encode(data(data'left))'length; + return get_encoded_length(encode(data(data'left))); end if; end; constant length : natural := element_length(data); - constant range_length : positive := encode(data'left)'length; + constant range_length : positive := get_encoded_length(encode(data'left)); variable index : positive := 2 + 2 * range_length; variable ret_val : string(1 to 1 + 2 * range_length + data'length * length); begin @@ -328,7 +336,7 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): constant code : string; variable index : inout positive; variable result : out $array_type) is - constant range_length : positive := encode($range_type'left)'length; + constant range_length : positive := get_encoded_length(encode($range_type'left)); begin index := index + 1 + 2 * range_length; for i in result'range loop @@ -339,7 +347,7 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): function decode ( constant code : string) return $array_type is - constant range_length : positive := encode($range_type'left)'length; + constant range_length : positive := get_encoded_length(encode($range_type'left)); function ret_val_range ( constant code : string) return $array_type is @@ -401,12 +409,12 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): if data'length(1) * data'length(2) = 0 then return 0; else - return encode(data(data'left(1), data'left(2)))'length; + return get_encoded_length(encode(data(data'left(1), data'left(2)))); end if; end; constant length : natural := element_length(data); - constant range1_length : positive := encode(data'left(1))'length; - constant range2_length : positive := encode(data'left(2))'length; + constant range1_length : positive := get_encoded_length(encode(data'left(1))); + constant range2_length : positive := get_encoded_length(encode(data'left(2))); variable index : positive := 3 + 2 * range1_length + 2 * range2_length; variable ret_val : string(1 to 2 + 2 * range1_length + 2 * range2_length + data'length(1) * data'length(2) * length); @@ -428,8 +436,8 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): constant code : string; variable index : inout positive; variable result : out $array_type) is - constant range1_length : positive := encode($range_type1'left)'length; - constant range2_length : positive := encode($range_type2'left)'length; + constant range1_length : positive := get_encoded_length(encode($range_type1'left)); + constant range2_length : positive := get_encoded_length(encode($range_type2'left)); begin index := index + 2 + 2 * range1_length + 2 * range2_length; for i in result'range(1) loop @@ -442,8 +450,8 @@ class ArrayCodecTemplate(DatatypeCodecTemplate): function decode ( constant code : string) return $array_type is - constant range1_length : positive := encode($range_type1'left)'length; - constant range2_length : positive := encode($range_type2'left)'length; + constant range1_length : positive := get_encoded_length(encode($range_type1'left)); + constant range2_length : positive := get_encoded_length(encode($range_type2'left)); function ret_val_range ( constant code : string) return $array_type is diff --git a/vunit/com/codec_vhdl_package.py b/vunit/com/codec_vhdl_package.py index 876e2b0d8..4304b2b62 100644 --- a/vunit/com/codec_vhdl_package.py +++ b/vunit/com/codec_vhdl_package.py @@ -20,9 +20,7 @@ class CodecVHDLPackage(VHDLPackage): in the package.""" def __init__(self, identifier, enumeration_types, record_types, array_types): - super(CodecVHDLPackage, self).__init__( - identifier, enumeration_types, record_types, array_types - ) + super().__init__(identifier, enumeration_types, record_types, array_types) self._template = None @classmethod @@ -78,9 +76,11 @@ def generate_codecs_and_support_functions(self): msg_type_enumeration_types, ) = self._create_enumeration_of_all_msg_types() if all_msg_types_enumeration_type is not None: - declarations += self._template.all_msg_types_enumeration_type_declaration.substitute( - identifier=all_msg_types_enumeration_type.identifier, - literals=", ".join(all_msg_types_enumeration_type.literals), + declarations += ( + self._template.all_msg_types_enumeration_type_declaration.substitute( + identifier=all_msg_types_enumeration_type.identifier, + literals=", ".join(all_msg_types_enumeration_type.literals), + ) ) if all_msg_types_enumeration_type is not None: @@ -128,7 +128,12 @@ def _generate_array_codec_and_to_string_functions(self): """Generate codecs and to_string functions for all array data types.""" declarations = "" - definitions = "" + definitions = """ + -- Helper function to make tests pass GHDL v0.37 + function get_encoded_length ( constant vec: string ) return integer is + begin return vec'length; end; + +""" for array in self.array_types: ( new_declarations, @@ -258,17 +263,21 @@ def _generate_msg_type_encoders(self): # pylint: disable=too-many-locals encodings = " & ".join(encoding_list) - declarations += self._template.msg_type_record_codec_declaration.substitute( - name=value, - parameter_part=parameter_part, - alias_signature=alias_signature, - alias_name=value + "_msg", + declarations += ( + self._template.msg_type_record_codec_declaration.substitute( + name=value, + parameter_part=parameter_part, + alias_signature=alias_signature, + alias_name=value + "_msg", + ) ) - definitions += self._template.msg_type_record_codec_definition.substitute( - name=value, - parameter_part=parameter_part, - num_of_encodings=len(encoding_list), - encodings=encodings, + definitions += ( + self._template.msg_type_record_codec_definition.substitute( + name=value, + parameter_part=parameter_part, + num_of_encodings=len(encoding_list), + encodings=encodings, + ) ) return declarations, definitions @@ -285,11 +294,15 @@ def _generate_get_functions(self): msg_type_type = record.elements[0].subtype_indication.code if msg_type_type not in msg_type_types: msg_type_types.append(msg_type_type) - declarations += self._template.get_specific_msg_type_declaration.substitute( - type=msg_type_type + declarations += ( + self._template.get_specific_msg_type_declaration.substitute( + type=msg_type_type + ) ) - definitions += self._template.get_specific_msg_type_definition.substitute( - type=msg_type_type + definitions += ( + self._template.get_specific_msg_type_definition.substitute( + type=msg_type_type + ) ) return declarations, definitions diff --git a/vunit/configuration.py b/vunit/configuration.py index d45a3474e..1d38cd240 100644 --- a/vunit/configuration.py +++ b/vunit/configuration.py @@ -10,7 +10,7 @@ import logging import inspect -from os.path import dirname +from pathlib import Path from copy import copy from vunit.sim_if.factory import SIMULATOR_FACTORY @@ -46,7 +46,7 @@ def __init__( # pylint: disable=too-many-arguments self.sim_options = {} if sim_options is None else sim_options self.attributes = {} if attributes is None else attributes - self.tb_path = dirname(design_unit.original_file_name) + self.tb_path = str(Path(design_unit.original_file_name).parent) # Fill in tb_path generic with location of test bench if "tb_path" in design_unit.generic_names: diff --git a/vunit/csv_logs.py b/vunit/csv_logs.py index 7e2ce3550..a22e14574 100644 --- a/vunit/csv_logs.py +++ b/vunit/csv_logs.py @@ -10,7 +10,7 @@ from csv import Sniffer, DictReader, DictWriter from glob import glob -from os.path import abspath +from pathlib import Path class CsvLogs(object): @@ -35,7 +35,7 @@ def __iter__(self): def add(self, pattern): # pylint: disable=missing-docstring - for csv_file in [abspath(p) for p in glob(pattern)]: + for csv_file in [str(Path(p).resolve()) for p in glob(pattern)]: with open(csv_file, "r") as fread: sample = fread.readline() fread.seek(0) diff --git a/vunit/database.py b/vunit/database.py index 2381ea28a..7e35cc7a5 100644 --- a/vunit/database.py +++ b/vunit/database.py @@ -8,7 +8,7 @@ A simple file based database """ -from os.path import join, exists +from pathlib import Path import os import pickle import io @@ -39,7 +39,7 @@ def __init__(self, path, new=False): if new: renew_path(path) - elif not exists(path): + elif not Path(path).exists(): os.makedirs(path) # Map keys to nodes indexes @@ -55,7 +55,7 @@ def _discover_nodes(self): """ keys_to_nodes = {} for file_base_name in os.listdir(self._path): - key = self._read_key(join(self._path, file_base_name)) + key = self._read_key(str(Path(self._path) / file_base_name)) assert key not in keys_to_nodes # Two nodes contains the same key keys_to_nodes[key] = int(file_base_name) return keys_to_nodes @@ -100,7 +100,7 @@ def _to_file_name(self, key): """ Convert key to file name """ - return join(self._path, str(self._keys_to_nodes[key])) + return str(Path(self._path) / str(self._keys_to_nodes[key])) def _allocate_node_for_key(self, key): """ diff --git a/vunit/json4vhdl.py b/vunit/json4vhdl.py index 61b4e5f31..a29152bea 100644 --- a/vunit/json4vhdl.py +++ b/vunit/json4vhdl.py @@ -8,10 +8,12 @@ json4vhdl helper functions """ +from typing import Union import json +from base64 import b16encode as b16enc -def encode_json(obj): +def encode_json(obj: object): """ Convert object to stringified JSON @@ -26,7 +28,7 @@ def encode_json(obj): return json.dumps(obj, separators=(",", ":")) -def read_json(filename): +def read_json(filename: str): """ Read a JSON file and return an object @@ -39,3 +41,12 @@ def read_json(filename): generics = read_json(join(root, "src/test/data/data.json")) """ return json.loads(open(filename, "r").read()) + + +def b16encode(data: Union[str, bytes]): + """ + Encode a str|bytes using Base16 and return a str|bytes + """ + if isinstance(data, str): + return b16enc(bytes(data, "utf-8")).decode("utf-8") + return b16encode(data) diff --git a/vunit/ostools.py b/vunit/ostools.py index 04f3e70e7..dbc284ddb 100644 --- a/vunit/ostools.py +++ b/vunit/ostools.py @@ -15,8 +15,10 @@ import threading import shutil from queue import Queue, Empty -from os.path import exists, getmtime, dirname, relpath, splitdrive +from pathlib import Path +from os.path import getmtime, relpath, splitdrive import os +from os import getcwd, makedirs import io import logging @@ -296,12 +298,12 @@ def read_file(file_name, encoding="utf-8", newline=None): def write_file(file_name, contents, encoding="utf-8"): """ To stub during testing """ - path = dirname(file_name) + path = str(Path(file_name).parent) if path == "": path = "." if not file_exists(path): - os.makedirs(path) + makedirs(path) with io.open(file_name, "wb") as file_to_write: file_to_write.write(contents.encode(encoding=encoding)) @@ -309,7 +311,7 @@ def write_file(file_name, contents, encoding="utf-8"): def file_exists(file_name): """ To stub during testing """ - return exists(file_name) + return Path(file_name).exists() def get_modification_time(file_name): @@ -334,14 +336,14 @@ def renew_path(path): """ if IS_WINDOWS_SYSTEM: retries = 10 - while retries > 0 and exists(path): + while retries > 0 and Path(path).exists(): shutil.rmtree(path, ignore_errors=retries > 1) time.sleep(0.01) retries -= 1 else: - if exists(path): + if Path(path).exists(): shutil.rmtree(path) - os.makedirs(path) + makedirs(path) def simplify_path(path): @@ -349,7 +351,7 @@ def simplify_path(path): Return relative path towards current working directory unless it is a separate Windows drive """ - cwd = os.getcwd() + cwd = getcwd() drive_cwd = splitdrive(cwd)[0] drive_path = splitdrive(path)[0] if drive_path == drive_cwd: diff --git a/vunit/parsing/verilog/parser.py b/vunit/parsing/verilog/parser.py index 3e152dc05..a2ec6629a 100644 --- a/vunit/parsing/verilog/parser.py +++ b/vunit/parsing/verilog/parser.py @@ -12,7 +12,7 @@ """ import logging -from os.path import dirname, exists, abspath +from pathlib import Path from vunit.ostools import read_file from vunit.parsing.encodings import HDL_FILE_ENCODING from vunit.parsing.tokenizer import TokenStream, EOFException, LocationException @@ -63,7 +63,7 @@ def parse(self, file_name, include_paths=None, defines=None): defines = {} if defines is None else defines include_paths = [] if include_paths is None else include_paths - include_paths = [dirname(file_name)] + include_paths + include_paths = [str(Path(file_name).parent)] + include_paths cached = self._lookup_parse_cache(file_name, include_paths, defines) if cached is not None: @@ -99,7 +99,7 @@ def _key(file_name): """ Returns the database key for parse results of file_name """ - return ("CachedVerilogParser.parse(%s)" % abspath(file_name)).encode() + return ("CachedVerilogParser.parse(%s)" % str(Path(file_name).resolve)).encode() def _store_result(self, file_name, result, included_files, defines): """ @@ -124,7 +124,7 @@ def _content_hash(self, file_name): """ Hash the contents of the file """ - if file_name is None or not exists(file_name): + if file_name is None or not Path(file_name).exists(): return None if file_name not in self._content_cache: self._content_cache[file_name] = file_content_hash( diff --git a/vunit/parsing/verilog/preprocess.py b/vunit/parsing/verilog/preprocess.py index 4a0116e95..9fff6bbee 100644 --- a/vunit/parsing/verilog/preprocess.py +++ b/vunit/parsing/verilog/preprocess.py @@ -10,7 +10,7 @@ """ Verilog parsing functionality """ -from os.path import join, exists, abspath +from pathlib import Path import logging from vunit.parsing.tokenizer import ( TokenStream, @@ -114,10 +114,10 @@ def preprocessor( # pylint: disable=too-many-arguments,too-many-branches include_paths=include_paths, included_files=included_files, ) - except EOFException: + except EOFException as exe: raise LocationException.warning( "EOF reached when parsing `%s" % token.value, token.location - ) + ) from exe elif token.value in ("celldefine", "endcelldefine", "nounconnected_drive"): # Ignored @@ -150,10 +150,10 @@ def preprocessor( # pylint: disable=too-many-arguments,too-many-branches @staticmethod def _skip_protected_region(stream): """ - Skip a protected region -`pragma protect begin_protected -Skipped -`pragma protect end_protected + Skip a protected region + `pragma protect begin_protected + Skipped + `pragma protect end_protected """ while not stream.eof: stream.skip_while(WHITESPACE) @@ -275,10 +275,10 @@ def include( # pylint: disable=too-many-arguments stream.skip_while(WHITESPACE) try: tok = stream.pop() - except EOFException: + except EOFException as exe: raise LocationException.warning( "EOF reached when parsing `include argument", token.location - ) + ) from exe if tok.kind == PREPROCESSOR: if tok.value in defines: @@ -350,8 +350,8 @@ def find_included_file(include_paths, file_name): Find the file to include given include_paths """ for include_path in include_paths: - full_name = abspath(join(include_path, file_name)) - if exists(full_name): + full_name = str((Path(include_path) / file_name).resolve()) + if Path(full_name).exists(): return full_name return None @@ -363,10 +363,10 @@ def undef(undef_token, stream, defines): stream.skip_while(WHITESPACE, NEWLINE) try: name_token = stream.pop() - except EOFException: + except EOFException as exe: raise LocationException.warning( "EOF reached when parsing `undef", undef_token.location - ) + ) from exe if name_token.kind != IDENTIFIER: raise LocationException.warning("Bad argument to `undef", name_token.location) @@ -386,10 +386,10 @@ def define(define_token, stream): stream.skip_while(WHITESPACE, NEWLINE) try: name_token = stream.pop() - except EOFException: + except EOFException as exe: raise LocationException.warning( "Verilog `define without argument", define_token.location - ) + ) from exe if name_token.kind != IDENTIFIER: raise LocationException.warning( @@ -429,10 +429,10 @@ def define(define_token, stream): token = stream.pop() else: token = stream.pop() - except EOFException: + except EOFException as exe: raise LocationException.warning( "EOF reached when parsing `define argument list", lpar_token.location - ) + ) from exe stream.skip_while(WHITESPACE) start = stream.idx @@ -500,10 +500,10 @@ def expand_from_stream(self, token, stream, previous=None): else: try: values = self._parse_macro_actuals(token, stream) - except EOFException: + except EOFException as exe: raise LocationException.warning( "EOF reached when parsing `define actuals", location=token.location - ) + ) from exe # Bind defaults if len(values) < len(self.args): diff --git a/vunit/project.py b/vunit/project.py index b66c32744..e4ad6ac9c 100644 --- a/vunit/project.py +++ b/vunit/project.py @@ -9,10 +9,9 @@ """ Functionality to represent and operate on a HDL code project """ -from os.path import join, basename, dirname, isdir, exists +from typing import Optional, Union from pathlib import Path import logging -from typing import Optional from collections import OrderedDict from vunit.hashing import hash_string from vunit.dependency_graph import DependencyGraph, CircularDependencyException @@ -84,7 +83,7 @@ def add_builtin_library(self, logical_name): def add_library( self, logical_name, - directory, + directory: Union[str, Path], vhdl_standard: VHDLStandard = VHDL.STD_2008, is_external=False, ): @@ -94,19 +93,18 @@ def add_library( """ self._validate_new_library_name(logical_name) + dpath = Path(directory) + dstr = str(directory) + if is_external: - if not exists(directory): - raise ValueError("External library %r does not exist" % directory) + if not dpath.exists(): + raise ValueError("External library %r does not exist" % dstr) - if not isdir(directory): - raise ValueError( - "External library must be a directory. Got %r" % directory - ) + if not dpath.is_dir(): + raise ValueError("External library must be a directory. Got %r" % dstr) - library = Library( - logical_name, directory, vhdl_standard, is_external=is_external - ) - LOGGER.debug("Adding library %s with path %s", logical_name, directory) + library = Library(logical_name, dstr, vhdl_standard, is_external=is_external) + LOGGER.debug("Adding library %s with path %s", logical_name, dstr) self._libraries[logical_name] = library self._lower_library_names_dict[logical_name.lower()] = library.name @@ -528,7 +526,7 @@ def _get_affected_files(self, target_files, get_depend_func): return get_depend_func(target_files) except CircularDependencyException as exc: self._handle_circular_dependency(exc) - raise CompileError + raise CompileError from exc def _get_compile_order(self, files, dependency_graph): """ @@ -539,7 +537,7 @@ def _get_compile_order(self, files, dependency_graph): compile_order = dependency_graph.toposort() except CircularDependencyException as exc: self._handle_circular_dependency(exc) - raise CompileError + raise CompileError from exc def comparison_key(source_file): return compile_order.index(source_file) @@ -610,9 +608,12 @@ def _hash_file_name_of(self, source_file): Returns the name of the hash file associated with the source_file """ library = self.get_library(source_file.library.name) - prefix = hash_string(dirname(source_file.name)) - return join( - library.directory, prefix, basename(source_file.name) + ".vunit_hash" + prefix = hash_string(str(Path(source_file.name).parent)) + return str( + Path(library.directory) + / prefix + / Path(source_file.name).name + / ".vunit_hash" ) def update(self, source_file): diff --git a/vunit/sim_if/__init__.py b/vunit/sim_if/__init__.py index 3077962c4..7104721f8 100644 --- a/vunit/sim_if/__init__.py +++ b/vunit/sim_if/__init__.py @@ -10,7 +10,9 @@ import sys import os +from os import environ, listdir, pathsep import subprocess +from pathlib import Path from typing import List from ..ostools import Process, simplify_path from ..exceptions import CompileError @@ -77,12 +79,12 @@ def find_executable(executable): """ Return a list of all executables found in PATH """ - path = os.environ.get("PATH", None) + path = environ.get("PATH", None) if path is None: return [] - paths = path.split(os.pathsep) - _, ext = os.path.splitext(executable) + paths = path.split(pathsep) + ext = Path(executable).suffix if (sys.platform == "win32" or os.name == "os2") and (ext != ".exe"): executable = executable + ".exe" @@ -92,7 +94,7 @@ def find_executable(executable): result.append(executable) for prefix in paths: - file_name = os.path.join(prefix, executable) + file_name = str(Path(prefix) / executable) if isfile(file_name): # the file exists, we have a shot at spawn working result.append(file_name) @@ -133,7 +135,7 @@ def find_toolchain(cls, executables, constraints=None): all_paths = [ [ - os.path.abspath(os.path.dirname(executables)) + str(Path(executables).parent.resolve()) for executables in cls.find_executable(name) ] for name in executables @@ -170,7 +172,14 @@ def has_valid_exit_code(): @staticmethod def supports_vhpi(): """ - Return if the simulator supports VHPI + Returns True when the simulator supports VHPI + """ + return False + + @staticmethod + def supports_coverage(): + """ + Returns True when the simulator supports coverage """ return False @@ -214,7 +223,7 @@ def setup_library_mapping(self, project): Implemented by specific simulators """ - def __compile_source_file(self, source_file, printer): + def _compile_source_file(self, source_file, printer): """ Compiles a single source file and prints status information """ @@ -295,7 +304,7 @@ def compile_source_files( printer.write("\n") continue - if self.__compile_source_file(source_file, printer): + if self._compile_source_file(source_file, printer): project.update(source_file) else: source_files_to_skip.update( @@ -329,12 +338,13 @@ def get_env(): def isfile(file_name): """ - Case insensitive os.path.isfile + Case insensitive Path.is_file() """ - if not os.path.isfile(file_name): + fpath = Path(file_name) + if not fpath.is_file(): return False - return os.path.basename(file_name) in os.listdir(os.path.dirname(file_name)) + return str(fpath.name) in listdir(str(fpath.parent)) def run_command(command, cwd=None, env=None): diff --git a/vunit/sim_if/activehdl.py b/vunit/sim_if/activehdl.py index e3dc0d2c6..c3c790461 100644 --- a/vunit/sim_if/activehdl.py +++ b/vunit/sim_if/activehdl.py @@ -9,7 +9,7 @@ """ from functools import total_ordering -from os.path import join, dirname, abspath +from pathlib import Path import os import re import logging @@ -58,7 +58,9 @@ def supports_vhdl_package_generics(cls): """ Returns True when this simulator supports VHDL package generics """ - proc = Process([join(cls.find_prefix(), "vcom"), "-version"], env=cls.get_env()) + proc = Process( + [str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env() + ) consumer = VersionConsumer() proc.consume_output(consumer) if consumer.version is not None: @@ -66,9 +68,16 @@ def supports_vhdl_package_generics(cls): return False + @staticmethod + def supports_coverage(): + """ + Returns True when the simulator supports coverage + """ + return True + def __init__(self, prefix, output_path, gui=False): SimulatorInterface.__init__(self, output_path, gui) - self._library_cfg = join(output_path, "library.cfg") + self._library_cfg = str(Path(output_path) / "library.cfg") self._prefix = prefix self._create_library_cfg() self._libraries = [] @@ -112,7 +121,12 @@ def compile_vhdl_file_command(self, source_file): Returns the command to compile a VHDL file """ return ( - [join(self._prefix, "vcom"), "-quiet", "-j", dirname(self._library_cfg)] + [ + str(Path(self._prefix) / "vcom"), + "-quiet", + "-j", + str(Path(self._library_cfg).parent), + ] + source_file.compile_options.get("activehdl.vcom_flags", []) + [ self._std_str(source_file.get_vhdl_standard()), @@ -126,7 +140,7 @@ def compile_verilog_file_command(self, source_file): """ Returns the command to compile a Verilog file """ - args = [join(self._prefix, "vlog"), "-quiet", "-lc", self._library_cfg] + args = [str(Path(self._prefix) / "vlog"), "-quiet", "-lc", self._library_cfg] args += source_file.compile_options.get("activehdl.vlog_flags", []) args += ["-work", source_file.library.name, source_file.name] for library in self._libraries: @@ -143,13 +157,15 @@ def create_library(self, library_name, path, mapped_libraries=None): """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} - if not file_exists(dirname(abspath(path))): - os.makedirs(dirname(abspath(path))) + apath = str(Path(path).parent.resolve()) + + if not file_exists(apath): + os.makedirs(apath) if not file_exists(path): proc = Process( - [join(self._prefix, "vlib"), library_name, path], - cwd=dirname(self._library_cfg), + [str(Path(self._prefix) / "vlib"), library_name, path], + cwd=str(Path(self._library_cfg).parent), env=self.get_env(), ) proc.consume_output(callback=None) @@ -158,8 +174,8 @@ def create_library(self, library_name, path, mapped_libraries=None): return proc = Process( - [join(self._prefix, "vmap"), library_name, path], - cwd=dirname(self._library_cfg), + [str(Path(self._prefix) / "vmap"), library_name, path], + cwd=str(Path(self._library_cfg).parent), env=self.get_env(), ) proc.consume_output(callback=None) @@ -173,7 +189,8 @@ def _create_library_cfg(self): with open(self._library_cfg, "w") as ofile: ofile.write( - '$INCLUDE = "%s"\n' % join(self._prefix, "..", "vlib", "library.cfg") + '$INCLUDE = "%s"\n' + % str(Path(self._prefix).parent / "vlib" / "library.cfg") ) _library_re = re.compile(r'([a-zA-Z_]+)\s=\s"(.*)"') @@ -192,7 +209,9 @@ def _get_mapped_libraries(self): continue key = match.group(1) value = match.group(2) - libraries[key] = abspath(join(dirname(self._library_cfg), dirname(value))) + libraries[key] = str( + (Path(self._library_cfg).parent / Path(value).parent).resolve() + ) return libraries def _vsim_extra_args(self, config): @@ -243,7 +262,7 @@ def _create_load_function(self, config, output_path): vsim_flags.append(config.architecture_name) if config.sim_options.get("enable_coverage", False): - coverage_file_path = join(output_path, "coverage.acdb") + coverage_file_path = str(Path(output_path) / "coverage.acdb") self._coverage_files.add(coverage_file_path) vsim_flags += ["-acdb_file {%s}" % fix_path(coverage_file_path)] @@ -323,12 +342,12 @@ def merge_coverage(self, file_name, args=None): merge_command += " -o {%s}" % fix_path(file_name) + "\n" - merge_script_name = join(self._output_path, "acdb_merge.tcl") + merge_script_name = str(Path(self._output_path) / "acdb_merge.tcl") with open(merge_script_name, "w") as fptr: fptr.write(merge_command + "\n") vcover_cmd = [ - join(self._prefix, "vsimsa"), + str(Path(self._prefix) / "vsimsa"), "-tcl", "%s" % fix_path(merge_script_name), ] @@ -380,7 +399,7 @@ def _create_gui_script(self, common_file_name, config): init_file = config.sim_options.get(self.name + ".init_file.gui", None) if init_file is not None: - tcl += 'source "%s"\n' % fix_path(abspath(init_file)) + tcl += 'source "%s"\n' % fix_path(str(Path(init_file).resolve())) tcl += ( 'puts "VUnit help: Design already loaded. Use run -all to run the test."\n' @@ -399,10 +418,10 @@ def _run_batch_file(self, batch_file_name, gui, cwd): try: args = [ - join(self._prefix, "vsim"), + str(Path(self._prefix) / "vsim"), "-gui" if gui else "-c", "-l", - join(dirname(batch_file_name), "transcript"), + str(Path(batch_file_name).parent / "transcript"), "-do", todo, ] @@ -417,24 +436,27 @@ def simulate(self, output_path, test_suite_name, config, elaborate_only): """ Run a test bench """ - script_path = join(output_path, self.name) - common_file_name = join(script_path, "common.tcl") - batch_file_name = join(script_path, "batch.tcl") - gui_file_name = join(script_path, "gui.tcl") + script_path = Path(output_path) / self.name + common_file_name = script_path / "common.tcl" + batch_file_name = script_path / "batch.tcl" + gui_file_name = script_path / "gui.tcl" write_file(common_file_name, self._create_common_script(config, output_path)) - write_file(gui_file_name, self._create_gui_script(common_file_name, config)) write_file( - batch_file_name, self._create_batch_script(common_file_name, elaborate_only) + gui_file_name, self._create_gui_script(str(common_file_name), config) + ) + write_file( + str(batch_file_name), + self._create_batch_script(str(common_file_name), elaborate_only), ) if self._gui: - gui_path = join(script_path, "gui") + gui_path = str(script_path / "gui") renew_path(gui_path) - return self._run_batch_file(gui_file_name, gui=True, cwd=gui_path) + return self._run_batch_file(str(gui_file_name), gui=True, cwd=gui_path) return self._run_batch_file( - batch_file_name, gui=False, cwd=dirname(self._library_cfg) + str(batch_file_name), gui=False, cwd=str(Path(self._library_cfg).parent) ) diff --git a/vunit/sim_if/factory.py b/vunit/sim_if/factory.py index 16cd0a636..5291f2fda 100644 --- a/vunit/sim_if/factory.py +++ b/vunit/sim_if/factory.py @@ -39,7 +39,7 @@ def _extract_compile_options(self): """ Return all supported compile options """ - result = dict() + result = dict((opt.name, opt) for opt in [BooleanOption("enable_coverage")]) for sim_class in self.supported_simulators(): for opt in sim_class.compile_options: assert hasattr(opt, "name") diff --git a/vunit/sim_if/ghdl.py b/vunit/sim_if/ghdl.py index 92c091304..e459266e5 100644 --- a/vunit/sim_if/ghdl.py +++ b/vunit/sim_if/ghdl.py @@ -8,12 +8,16 @@ Interface for GHDL simulator """ -from os.path import exists, join, abspath -import os +from pathlib import Path +from os import environ, makedirs, remove import logging import subprocess import shlex +import re +import shutil +from json import dump from sys import stdout # To avoid output catched in non-verbose mode +from warnings import warn from ..exceptions import CompileError from ..ostools import Process from . import SimulatorInterface, ListOfStringOption, StringOption, BooleanOption @@ -22,17 +26,20 @@ LOGGER = logging.getLogger(__name__) -class GHDLInterface(SimulatorInterface): +class GHDLInterface(SimulatorInterface): # pylint: disable=too-many-instance-attributes """ Interface for GHDL simulator """ name = "ghdl" - executable = os.environ.get("GHDL", "ghdl") + executable = environ.get("GHDL", "ghdl") supports_gui_flag = True supports_colors_in_gui = True - compile_options = [ListOfStringOption("ghdl.flags")] + compile_options = [ + ListOfStringOption("ghdl.a_flags"), + ListOfStringOption("ghdl.flags"), + ] sim_options = [ ListOfStringOption("ghdl.sim_flags"), @@ -102,6 +109,7 @@ def __init__( # pylint: disable=too-many-arguments self._gtkwave_args = gtkwave_args self._backend = backend self._vhdl_standard = None + self._coverage_test_dirs = set() def has_valid_exit_code(self): """ @@ -109,6 +117,15 @@ def has_valid_exit_code(self): """ return self._vhdl_standard >= VHDL.STD_2008 + @classmethod + def _get_version_output(cls, prefix): + """ + Get the output of 'ghdl --version' + """ + return subprocess.check_output( + [str(Path(prefix) / cls.executable), "--version"] + ).decode() + @classmethod def determine_backend(cls, prefix): """ @@ -119,9 +136,7 @@ def determine_backend(cls, prefix): "llvm code generator": "llvm", "GCC back-end code generator": "gcc", } - output = subprocess.check_output( - [join(prefix, cls.executable), "--version"] - ).decode() + output = cls._get_version_output(prefix) for name, backend in mapping.items(): if name in output: LOGGER.debug("Detected GHDL %s", name) @@ -136,12 +151,33 @@ def determine_backend(cls, prefix): "No known GHDL back-end could be detected from running 'ghdl --version'" ) + @classmethod + def determine_version(cls, prefix): + """ + Determine the GHDL version + """ + return float( + re.match( + r"GHDL ([0-9]*\.[0-9]*).*\(.*\) \[Dunoon edition\]", + cls._get_version_output(prefix), + ).group(1) + ) + @classmethod def supports_vhpi(cls): """ - Return if the simulator supports VHPI + Returns True when the simulator supports VHPI + """ + return (cls.determine_backend(cls.find_prefix_from_path()) != "mcode") or ( + cls.determine_version(cls.find_prefix_from_path()) > 0.36 + ) + + @classmethod + def supports_coverage(cls): + """ + Returns True when the simulator supports coverage """ - return cls.determine_backend(cls.find_prefix_from_path()) != "mcode" + return cls.determine_backend(cls.find_prefix_from_path()) == "gcc" def _has_output_flag(self): """ @@ -155,8 +191,8 @@ def setup_library_mapping(self, project): """ self._project = project for library in project.get_libraries(): - if not exists(library.directory): - os.makedirs(library.directory) + if not Path(library.directory).exists(): + makedirs(library.directory) vhdl_standards = set( source_file.get_vhdl_standard() @@ -205,7 +241,7 @@ def compile_vhdl_file_command(self, source_file): Returns the command to compile a vhdl file """ cmd = [ - join(self._prefix, self.executable), + str(Path(self._prefix) / self.executable), "-a", "--workdir=%s" % source_file.library.directory, "--work=%s" % source_file.library.name, @@ -213,15 +249,36 @@ def compile_vhdl_file_command(self, source_file): ] for library in self._project.get_libraries(): cmd += ["-P%s" % library.directory] - cmd += source_file.compile_options.get("ghdl.flags", []) + + a_flags = source_file.compile_options.get("ghdl.a_flags", []) + flags = source_file.compile_options.get("ghdl.flags", []) + if flags != []: + warn( + ( + "'ghdl.flags' is deprecated and it will be removed in future releases; " + "use 'ghdl.a_flags' instead" + ), + Warning, + ) + a_flags += flags + + cmd += a_flags + + if source_file.compile_options.get("enable_coverage", False): + # Add gcc compilation flags for coverage + # -ftest-coverages creates .gcno notes files needed by gcov + # -fprofile-arcs creates branch profiling in .gcda database files + cmd += ["-fprofile-arcs", "-ftest-coverage"] cmd += [source_file.name] return cmd - def _get_command(self, config, output_path, ghdl_e): + def _get_command( # pylint: disable=too-many-branches + self, config, output_path, elaborate_only, ghdl_e, wave_file + ): """ Return GHDL simulation command """ - cmd = [join(self._prefix, self.executable)] + cmd = [str(Path(self._prefix) / self.executable)] if ghdl_e: cmd += ["-e"] @@ -234,24 +291,53 @@ def _get_command(self, config, output_path, ghdl_e): "--workdir=%s" % self._project.get_library(config.library_name).directory ] cmd += ["-P%s" % lib.directory for lib in self._project.get_libraries()] + + bin_path = str( + Path(output_path) + / ("%s-%s" % (config.entity_name, config.architecture_name)) + ) if self._has_output_flag(): - cmd += [ - "-o", - join( - output_path, - "%s-%s" % (config.entity_name, config.architecture_name), - ), - ] + cmd += ["-o", bin_path] cmd += config.sim_options.get("ghdl.elab_flags", []) + if config.sim_options.get("enable_coverage", False): + # Enable coverage in linker + cmd += ["-Wl,-lgcov"] cmd += [config.entity_name, config.architecture_name] + sim = config.sim_options.get("ghdl.sim_flags", []) + for name, value in config.generics.items(): + sim += ["-g%s=%s" % (name, value)] + sim += ["--assert-level=%s" % config.vhdl_assert_stop_level] + if config.sim_options.get("disable_ieee_warnings", False): + sim += ["--ieee-asserts=disable"] + + if wave_file: + if self._gtkwave_fmt == "ghw": + sim += ["--wave=%s" % wave_file] + elif self._gtkwave_fmt == "vcd": + sim += ["--vcd=%s" % wave_file] + if not ghdl_e: - cmd += config.sim_options.get("ghdl.sim_flags", []) - for name, value in config.generics.items(): - cmd += ["-g%s=%s" % (name, value)] - cmd += ["--assert-level=%s" % config.vhdl_assert_stop_level] - if config.sim_options.get("disable_ieee_warnings", False): - cmd += ["--ieee-asserts=disable"] + cmd += sim + if elaborate_only: + cmd += ["--no-run"] + else: + try: + makedirs(output_path, mode=0o777) + except OSError: + pass + with (Path(output_path) / "args.json").open("w") as fname: + dump( + { + "bin": str( + Path(output_path) + / ("%s-%s" % (config.entity_name, config.architecture_name)) + ), + "build": cmd[1:], + "sim": sim, + }, + fname, + ) return cmd @@ -262,35 +348,35 @@ def simulate( # pylint: disable=too-many-locals Simulate with entity as top level using generics """ - script_path = join(output_path, self.name) + script_path = str(Path(output_path) / self.name) - if not exists(script_path): - os.makedirs(script_path) + if not Path(script_path).exists(): + makedirs(script_path) ghdl_e = elaborate_only and config.sim_options.get("ghdl.elab_e", False) - cmd = self._get_command(config, script_path, ghdl_e) - - if elaborate_only and not ghdl_e: - cmd += ["--no-run"] - - if self._gtkwave_fmt is not None and not ghdl_e: - data_file_name = join(script_path, "wave.%s" % self._gtkwave_fmt) - - if exists(data_file_name): - os.remove(data_file_name) - - if self._gtkwave_fmt == "ghw": - cmd += ["--wave=%s" % data_file_name] - elif self._gtkwave_fmt == "vcd": - cmd += ["--vcd=%s" % data_file_name] - + if self._gtkwave_fmt is not None: + data_file_name = str(Path(script_path) / ("wave.%s" % self._gtkwave_fmt)) + if Path(data_file_name).exists(): + remove(data_file_name) else: data_file_name = None + cmd = self._get_command( + config, script_path, elaborate_only, ghdl_e, data_file_name + ) + status = True + + gcov_env = environ.copy() + if config.sim_options.get("enable_coverage", False): + # Set environment variable to put the coverage output in the test_output folder + coverage_dir = str(Path(output_path) / "coverage") + gcov_env["GCOV_PREFIX"] = coverage_dir + self._coverage_test_dirs.add(coverage_dir) + try: - proc = Process(cmd) + proc = Process(cmd, env=gcov_env) proc.consume_output() except Process.NonZeroExitCode: status = False @@ -300,9 +386,60 @@ def simulate( # pylint: disable=too-many-locals init_file = config.sim_options.get(self.name + ".gtkwave_script.gui", None) if init_file is not None: - cmd += ["--script", "{}".format(abspath(init_file))] + cmd += ["--script", "{}".format(str(Path(init_file).resolve()))] stdout.write("%s\n" % " ".join(cmd)) subprocess.call(cmd) return status + + def _compile_source_file(self, source_file, printer): + """ + Runs parent command for compilation, and moves any .gcno files to the compilation output + """ + compilation_ok = super()._compile_source_file(source_file, printer) + + if source_file.compile_options.get("enable_coverage", False): + # GCOV gcno files are output to where the command is run, + # move it back to the compilation folder + source_path = Path(source_file.name) + gcno_file = Path(source_path.stem + ".gcno") + if Path(gcno_file).exists(): + new_path = Path(source_file.library.directory) / gcno_file + gcno_file.rename(new_path) + + return compilation_ok + + def merge_coverage(self, file_name, args=None): + """ + Merge coverage from all test cases + """ + output_dir = file_name + + # Loop over each .gcda output folder and merge them two at a time + first_input = True + for coverage_dir in self._coverage_test_dirs: + if Path(coverage_dir).exists(): + merge_command = [ + "gcov-tool", + "merge", + "-o", + output_dir, + coverage_dir if first_input else output_dir, + coverage_dir, + ] + subprocess.call(merge_command) + first_input = False + else: + LOGGER.warning("Missing coverage directory: %s", coverage_dir) + + # Find actual output path of the .gcda files (they are deep in hierarchy) + dir_path = Path(output_dir) + gcda_dirs = {x.parent for x in dir_path.glob("**/*.gcda")} + assert len(gcda_dirs) == 1, "Expected exactly one folder with gcda files" + gcda_dir = gcda_dirs.pop() + + # Add compile-time .gcno files as well, they are needed for the report + for library in self._project.get_libraries(): + for gcno_file in Path(library.directory).glob("*.gcno"): + shutil.copy(gcno_file, gcda_dir) diff --git a/vunit/sim_if/incisive.py b/vunit/sim_if/incisive.py index c0ec4bad4..5dc683b00 100644 --- a/vunit/sim_if/incisive.py +++ b/vunit/sim_if/incisive.py @@ -8,7 +8,8 @@ Interface for the Cadence Incisive simulator """ -from os.path import join, dirname, abspath, relpath +from pathlib import Path +from os.path import relpath import os import subprocess import logging @@ -94,9 +95,9 @@ def __init__( # pylint: disable=too-many-arguments self._libraries = [] self._log_level = log_level if cdslib is None: - self._cdslib = abspath(join(output_path, "cds.lib")) + self._cdslib = str((Path(output_path) / "cds.lib").resolve()) else: - self._cdslib = abspath(cdslib) + self._cdslib = str(Path(cdslib).resolve()) self._hdlvar = hdlvar self._cds_root_irun = self.find_cds_root_irun() self._create_cdslib() @@ -106,7 +107,7 @@ def find_cds_root_irun(self): Finds irun cds root """ return subprocess.check_output( - [join(self._prefix, "cds_root"), "irun"] + [str(Path(self._prefix) / "cds_root"), "irun"] ).splitlines()[0] def find_cds_root_virtuoso(self): @@ -115,7 +116,7 @@ def find_cds_root_virtuoso(self): """ try: return subprocess.check_output( - [join(self._prefix, "cds_root"), "virtuoso"] + [str(Path(self._prefix) / "cds_root"), "virtuoso"] ).splitlines()[0] except subprocess.CalledProcessError: return None @@ -192,7 +193,7 @@ def compile_vhdl_file_command(self, source_file): """ Returns command to compile a VHDL file """ - cmd = join(self._prefix, "irun") + cmd = str(Path(self._prefix) / "irun") args = [] args += ["-compile"] args += ["-nocopyright"] @@ -205,9 +206,9 @@ def compile_vhdl_file_command(self, source_file): args += self._hdlvar_args() args += [ '-log "%s"' - % join( - self._output_path, - "irun_compile_vhdl_file_%s.log" % source_file.library.name, + % str( + Path(self._output_path) + / ("irun_compile_vhdl_file_%s.log" % source_file.library.name) ) ] if not self._log_level == "debug": @@ -216,13 +217,13 @@ def compile_vhdl_file_command(self, source_file): args += ["-messages"] args += ["-libverbose"] args += source_file.compile_options.get("incisive.irun_vhdl_flags", []) - args += ['-nclibdirname "%s"' % dirname(source_file.library.directory)] + args += ['-nclibdirname "%s"' % str(Path(source_file.library.directory).parent)] args += ["-makelib %s" % source_file.library.directory] args += ['"%s"' % source_file.name] args += ["-endlib"] - argsfile = join( - self._output_path, - "irun_compile_vhdl_file_%s.args" % source_file.library.name, + argsfile = str( + Path(self._output_path) + / ("irun_compile_vhdl_file_%s.args" % source_file.library.name) ) write_file(argsfile, "\n".join(args)) return [cmd, "-f", argsfile] @@ -231,7 +232,7 @@ def compile_verilog_file_command(self, source_file): """ Returns commands to compile a Verilog file """ - cmd = join(self._prefix, "irun") + cmd = str(Path(self._prefix) / "irun") args = [] args += ["-compile"] args += ["-nocopyright"] @@ -248,9 +249,9 @@ def compile_verilog_file_command(self, source_file): args += self._hdlvar_args() args += [ '-log "%s"' - % join( - self._output_path, - "irun_compile_verilog_file_%s.log" % source_file.library.name, + % str( + Path(self._output_path) + / ("irun_compile_verilog_file_%s.log" % source_file.library.name) ) ] if not self._log_level == "debug": @@ -266,13 +267,13 @@ def compile_verilog_file_command(self, source_file): for key, value in source_file.defines.items(): args += ["-define %s=%s" % (key, value.replace('"', '\\"'))] - args += ['-nclibdirname "%s"' % dirname(source_file.library.directory)] + args += ['-nclibdirname "%s"' % str(Path(source_file.library.directory).parent)] args += ["-makelib %s" % source_file.library.name] args += ['"%s"' % source_file.name] args += ["-endlib"] - argsfile = join( - self._output_path, - "irun_compile_verilog_file_%s.args" % source_file.library.name, + argsfile = str( + Path(self._output_path) + / ("irun_compile_verilog_file_%s.args" % source_file.library.name) ) write_file(argsfile, "\n".join(args)) return [cmd, "-f", argsfile] @@ -283,8 +284,10 @@ def create_library(self, library_name, library_path, mapped_libraries=None): """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} - if not file_exists(dirname(abspath(library_path))): - os.makedirs(dirname(abspath(library_path))) + lpath = str(Path(library_path).resolve().parent) + + if not file_exists(lpath): + os.makedirs(lpath) if ( library_name in mapped_libraries @@ -310,7 +313,7 @@ def simulate( # pylint: disable=too-many-locals Elaborates and Simulates with entity as top level using generics """ - script_path = join(output_path, self.name) + script_path = str(Path(output_path) / self.name) launch_gui = self._gui is not False and not elaborate_only if elaborate_only: @@ -319,7 +322,7 @@ def simulate( # pylint: disable=too-many-locals steps = ["elaborate", "simulate"] for step in steps: - cmd = join(self._prefix, "irun") + cmd = str(Path(self._prefix) / "irun") args = [] if step == "elaborate": args += ["-elaborate"] @@ -345,12 +348,12 @@ def simulate( # pylint: disable=too-many-locals ] # promote to error: "bad natural literal in generic association" args += ["-work work"] args += [ - '-nclibdirname "%s"' % (join(self._output_path, "libraries")) + '-nclibdirname "%s"' % (str(Path(self._output_path) / "libraries")) ] # @TODO: ugly args += config.sim_options.get("incisive.irun_sim_flags", []) args += ['-cdslib "%s"' % self._cdslib] args += self._hdlvar_args() - args += ['-log "%s"' % join(script_path, "irun_%s.log" % step)] + args += ['-log "%s"' % str(Path(script_path) / ("irun_%s.log" % step))] if not self._log_level == "debug": args += ["-quiet"] else: @@ -369,21 +372,15 @@ def simulate( # pylint: disable=too-many-locals if config.architecture_name is None: # we have a SystemVerilog toplevel: - args += [ - "-top %s" - % join("%s.%s:sv" % (config.library_name, config.entity_name)) - ] + args += ["-top %s.%s:sv" % (config.library_name, config.entity_name)] else: # we have a VHDL toplevel: args += [ - "-top %s" - % join( - "%s.%s:%s" - % ( - config.library_name, - config.entity_name, - config.architecture_name, - ) + "-top %s.%s:%s" + % ( + config.library_name, + config.entity_name, + config.architecture_name, ) ] argsfile = "%s/irun_%s.args" % (script_path, step) diff --git a/vunit/sim_if/modelsim.py b/vunit/sim_if/modelsim.py index 1a5af9352..96bdbfdd4 100644 --- a/vunit/sim_if/modelsim.py +++ b/vunit/sim_if/modelsim.py @@ -8,7 +8,7 @@ Interface towards Mentor Graphics ModelSim """ -from os.path import join, dirname, abspath +from pathlib import Path import os import logging import io @@ -70,7 +70,7 @@ def find_prefix_from_path(cls): """ def has_modelsim_ini(path): - return os.path.isfile(join(path, "..", "modelsim.ini")) + return os.path.isfile(str(Path(path).parent / "modelsim.ini")) return cls.find_toolchain(["vsim"], constraints=[has_modelsim_ini]) @@ -81,13 +81,20 @@ def supports_vhdl_package_generics(cls): """ return True + @staticmethod + def supports_coverage(): + """ + Returns True when the simulator supports coverage + """ + return True + def __init__(self, prefix, output_path, persistent=False, gui=False): SimulatorInterface.__init__(self, output_path, gui) VsimSimulatorMixin.__init__( self, prefix, persistent, - sim_cfg_file_name=join(output_path, "modelsim.ini"), + sim_cfg_file_name=str(Path(output_path) / "modelsim.ini"), ) self._libraries = [] self._coverage_files = set() @@ -98,12 +105,12 @@ def _create_modelsim_ini(self): """ Create the modelsim.ini file """ - parent = dirname(self._sim_cfg_file_name) + parent = str(Path(self._sim_cfg_file_name).parent) if not file_exists(parent): os.makedirs(parent) original_modelsim_ini = os.environ.get( - "VUNIT_MODELSIM_INI", join(self._prefix, "..", "modelsim.ini") + "VUNIT_MODELSIM_INI", str(Path(self._prefix).parent / "modelsim.ini") ) with open(original_modelsim_ini, "rb") as fread: with open(self._sim_cfg_file_name, "wb") as fwrite: @@ -157,7 +164,7 @@ def compile_vhdl_file_command(self, source_file): """ return ( [ - join(self._prefix, "vcom"), + str(Path(self._prefix) / "vcom"), "-quiet", "-modelsimini", self._sim_cfg_file_name, @@ -176,7 +183,7 @@ def compile_verilog_file_command(self, source_file): Returns the command to compile a verilog file """ args = [ - join(self._prefix, "vlog"), + str(Path(self._prefix) / "vlog"), "-quiet", "-modelsimini", self._sim_cfg_file_name, @@ -200,12 +207,14 @@ def create_library(self, library_name, path, mapped_libraries=None): """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} - if not file_exists(dirname(abspath(path))): - os.makedirs(dirname(abspath(path))) + apath = str(Path(path).parent.resolve()) + + if not file_exists(apath): + os.makedirs(apath) if not file_exists(path): proc = Process( - [join(self._prefix, "vlib"), "-unix", path], env=self.get_env() + [str(Path(self._prefix) / "vlib"), "-unix", path], env=self.get_env() ) proc.consume_output(callback=None) @@ -247,7 +256,7 @@ def _create_load_function(self, test_suite_name, config, output_path): architecture_suffix = "(%s)" % config.architecture_name if config.sim_options.get("enable_coverage", False): - coverage_file = join(output_path, "coverage.ucdb") + coverage_file = str(Path(output_path) / "coverage.ucdb") self._coverage_files.add(coverage_file) coverage_save_cmd = ( "coverage save -onexit -testname {%s} -assert -directive -cvg -codeAll {%s}" @@ -259,7 +268,7 @@ def _create_load_function(self, test_suite_name, config, output_path): coverage_args = "" vsim_flags = [ - "-wlf {%s}" % fix_path(join(output_path, "vsim.wlf")), + "-wlf {%s}" % fix_path(str(Path(output_path) / "vsim.wlf")), "-quiet", "-t ps", # for correct handling of verilog fatal/finish @@ -383,9 +392,9 @@ def merge_coverage(self, file_name, args=None): if args is None: args = [] - coverage_files = join(self._output_path, "coverage_files.txt") + coverage_files = str(Path(self._output_path) / "coverage_files.txt") vcover_cmd = ( - [join(self._prefix, "vcover"), "merge", "-inputs"] + [str(Path(self._prefix) / "vcover"), "merge", "-inputs"] + [coverage_files] + args + [file_name] diff --git a/vunit/sim_if/rivierapro.py b/vunit/sim_if/rivierapro.py index 0cb9b1766..db5561905 100644 --- a/vunit/sim_if/rivierapro.py +++ b/vunit/sim_if/rivierapro.py @@ -8,7 +8,7 @@ Interface towards Aldec Riviera Pro """ -from os.path import join, dirname, abspath +from pathlib import Path import os import re import logging @@ -66,22 +66,31 @@ def find_prefix_from_path(cls): """ def no_avhdl(path): - return not file_exists(join(path, "avhdl.exe")) + return not file_exists(str(Path(path) / "avhdl.exe")) return cls.find_toolchain(["vsim", "vsimsa"], constraints=[no_avhdl]) @classmethod - def get_osvvm_coverage_api(cls): + def _get_version(cls): """ - Returns simulator name when OSVVM coverage API is supported, None otherwise. + Return a VersionConsumer object containing the simulator version. """ - proc = Process([join(cls.find_prefix(), "vcom"), "-version"], env=cls.get_env()) + proc = Process( + [str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env() + ) consumer = VersionConsumer() proc.consume_output(consumer) - if consumer.year is not None: - if (consumer.year == 2016 and consumer.month >= 10) or ( - consumer.year > 2016 - ): + + return consumer + + @classmethod + def get_osvvm_coverage_api(cls): + """ + Returns simulator name when OSVVM coverage API is supported, None otherwise. + """ + version = cls._get_version() + if version.year is not None: + if (version.year == 2016 and version.month >= 10) or (version.year > 2016): return cls.name return None @@ -93,14 +102,25 @@ def supports_vhdl_package_generics(cls): """ return True + @staticmethod + def supports_coverage(): + """ + Returns True when the simulator supports coverage + """ + return True + def __init__(self, prefix, output_path, persistent=False, gui=False): SimulatorInterface.__init__(self, output_path, gui) VsimSimulatorMixin.__init__( - self, prefix, persistent, sim_cfg_file_name=join(output_path, "library.cfg") + self, + prefix, + persistent, + sim_cfg_file_name=str(Path(output_path) / "library.cfg"), ) self._create_library_cfg() self._libraries = [] self._coverage_files = set() + self._version = self._get_version() def add_simulator_specific(self, project): """ @@ -135,13 +155,19 @@ def compile_source_file_command(self, source_file): LOGGER.error("Unknown file type: %s", source_file.file_type) raise CompileError - @staticmethod - def _std_str(vhdl_standard): + def _std_str(self, vhdl_standard): """ Convert standard to format of Riviera-PRO command line flag """ if vhdl_standard == VHDL.STD_2019: - return "-2018" + if self._version.year is not None: + if (self._version.year == 2020 and self._version.month < 4) or ( + self._version.year < 2020 + ): + return "-2018" + + return "-2019" + return "-%s" % vhdl_standard def compile_vhdl_file_command(self, source_file): @@ -151,10 +177,10 @@ def compile_vhdl_file_command(self, source_file): return ( [ - join(self._prefix, "vcom"), + str(Path(self._prefix) / "vcom"), "-quiet", "-j", - dirname(self._sim_cfg_file_name), + str(Path(self._sim_cfg_file_name).parent), ] + source_file.compile_options.get("rivierapro.vcom_flags", []) + [ @@ -169,7 +195,12 @@ def compile_verilog_file_command(self, source_file): """ Returns the command to compile a Verilog file """ - args = [join(self._prefix, "vlog"), "-quiet", "-lc", self._sim_cfg_file_name] + args = [ + str(Path(self._prefix) / "vlog"), + "-quiet", + "-lc", + self._sim_cfg_file_name, + ] if source_file.is_system_verilog: args += ["-sv2k12"] args += source_file.compile_options.get("rivierapro.vlog_flags", []) @@ -188,13 +219,15 @@ def create_library(self, library_name, path, mapped_libraries=None): """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} - if not file_exists(dirname(abspath(path))): - os.makedirs(dirname(abspath(path))) + apath = str(Path(path).parent.resolve()) + + if not file_exists(apath): + os.makedirs(apath) if not file_exists(path): proc = Process( - [join(self._prefix, "vlib"), library_name, path], - cwd=dirname(self._sim_cfg_file_name), + [str(Path(self._prefix) / "vlib"), library_name, path], + cwd=str(Path(self._sim_cfg_file_name).parent), env=self.get_env(), ) proc.consume_output(callback=None) @@ -203,8 +236,8 @@ def create_library(self, library_name, path, mapped_libraries=None): return proc = Process( - [join(self._prefix, "vmap"), library_name, path], - cwd=dirname(self._sim_cfg_file_name), + [str(Path(self._prefix) / "vmap"), library_name, path], + cwd=str(Path(self._sim_cfg_file_name).parent), env=self.get_env(), ) proc.consume_output(callback=None) @@ -221,7 +254,7 @@ def _create_library_cfg(self): @property def _builtin_library_cfg(self): - return join(self._prefix, "..", "vlib", "library.cfg") + return str(Path(self._prefix).parent / "vlib" / "library.cfg") _library_re = re.compile(r"([a-zA-Z_0-9]+)\s=\s(.*)") @@ -230,7 +263,9 @@ def _get_mapped_libraries(self, library_cfg_file): Get mapped libraries by running vlist on the working directory """ lines = [] - proc = Process([join(self._prefix, "vlist")], cwd=dirname(library_cfg_file)) + proc = Process( + [str(Path(self._prefix) / "vlist")], cwd=str(Path(library_cfg_file).parent) + ) proc.consume_output(callback=lines.append) libraries = {} @@ -240,7 +275,9 @@ def _get_mapped_libraries(self, library_cfg_file): continue key = match.group(1) value = match.group(2) - libraries[key] = abspath(join(dirname(library_cfg_file), dirname(value))) + libraries[key] = str( + (Path(library_cfg_file).parent / (Path(value).parent)).resolve() + ) return libraries def _create_load_function( @@ -260,13 +297,13 @@ def _create_load_function( ) vsim_flags = [ - "-dataset {%s}" % fix_path(join(output_path, "dataset.asdb")), + "-dataset {%s}" % fix_path(str(Path(output_path) / "dataset.asdb")), pli_str, set_generic_str, ] if config.sim_options.get("enable_coverage", False): - coverage_file_path = join(output_path, "coverage.acdb") + coverage_file_path = str(Path(output_path) / "coverage.acdb") self._coverage_files.add(coverage_file_path) vsim_flags += ["-acdb_file {%s}" % coverage_file_path] @@ -284,15 +321,14 @@ def _create_load_function( tcl = """ proc vunit_load {{}} {{ - # Make the variable 'aldec' visible; otherwise, the Matlab interface - # is broken because vsim does not find the library aldec_matlab_cosim. - global aldec - # Make the variable 'LICENSE_QUEUE' visible (if set); otherwise vsim - # will not wait for simulation licenses. - global LICENSE_QUEUE - + # Run the 'vsim' command in the global variable context. This will make + # variables such as 'aldec' and 'LICENSE_QUEUE' visible, if set. + # Otherwise: + # - The Matlab interface is broken because vsim does not find the + # library aldec_matlab_cosim + # - vsim will not wait for simulation licenses set vsim_failed [catch {{ - eval vsim {{{vsim_flags}}} + uplevel #0 vsim {{{vsim_flags}}} }}] if {{${{vsim_failed}}}} {{ @@ -384,15 +420,15 @@ def merge_coverage(self, file_name, args=None): merge_command += " -o {%s}" % file_name.replace("\\", "/") - merge_script_name = join(self._output_path, "acdb_merge.tcl") + merge_script_name = str(Path(self._output_path) / "acdb_merge.tcl") with open(merge_script_name, "w") as fptr: fptr.write(merge_command + "\n") vcover_cmd = [ - join(self._prefix, "vsim"), + str(Path(self._prefix) / "vsim"), "-c", "-do", - "source %s; quit;" % merge_script_name.replace("\\", "/"), + "source {%s}; quit;" % merge_script_name.replace("\\", "/"), ] print("Merging coverage files into %s..." % file_name) diff --git a/vunit/sim_if/vsim_simulator_mixin.py b/vunit/sim_if/vsim_simulator_mixin.py index f5eee9a07..1bbb42337 100644 --- a/vunit/sim_if/vsim_simulator_mixin.py +++ b/vunit/sim_if/vsim_simulator_mixin.py @@ -11,7 +11,7 @@ import sys import os -from os.path import join, dirname, abspath +from pathlib import Path from ..ostools import write_file, Process from ..test.suites import get_result_file_name from ..persistent_tcl_shell import PersistentTclShell @@ -25,7 +25,7 @@ class VsimSimulatorMixin(object): def __init__(self, prefix, persistent, sim_cfg_file_name): self._prefix = prefix - sim_cfg_file_name = abspath(sim_cfg_file_name) + sim_cfg_file_name = str(Path(sim_cfg_file_name).resolve()) self._sim_cfg_file_name = sim_cfg_file_name prefix = ( @@ -36,14 +36,14 @@ def __init__(self, prefix, persistent, sim_cfg_file_name): def create_process(ident): return Process( [ - join(prefix, "vsim"), + str(Path(prefix) / "vsim"), "-c", "-l", - join(dirname(sim_cfg_file_name), "transcript%i" % ident), + str(Path(sim_cfg_file_name).parent / ("transcript%i" % ident)), "-do", - abspath(join(dirname(__file__), "tcl_read_eval_loop.tcl")), + str((Path(__file__).parent / "tcl_read_eval_loop.tcl").resolve()), ], - cwd=dirname(sim_cfg_file_name), + cwd=str(Path(sim_cfg_file_name).parent), env=env, ) @@ -54,7 +54,7 @@ def create_process(ident): @staticmethod def _create_restart_function(): - """" + """ " Create the vunit_restart function to recompile and restart the simulation This function is quite complicated to work around limitations @@ -91,7 +91,7 @@ def _create_restart_function(): "stdout=sys.stdout, " "stderr=sys.stdout))" ) - % (recompile_command, abspath(os.getcwd())), + % (recompile_command, str(Path(os.getcwd()).resolve())), ] recompile_command_eval_tcl = " ".join( ["{%s}" % part for part in recompile_command_eval] @@ -254,8 +254,8 @@ def _source_tcl_file(file_name, config, message): } """ tcl = template % ( - fix_path(abspath(config.tb_path)), - fix_path(abspath(file_name)), + fix_path(str(Path(config.tb_path).resolve())), + fix_path(str(Path(file_name).resolve())), message, ) return tcl @@ -279,15 +279,15 @@ def _run_batch_file(self, batch_file_name, gui=False): try: args = [ - join(self._prefix, "vsim"), + str(Path(self._prefix) / "vsim"), "-gui" if gui else "-c", "-l", - join(dirname(batch_file_name), "transcript"), + str(Path(batch_file_name).parent / "transcript"), "-do", 'source "%s"' % fix_path(batch_file_name), ] - proc = Process(args, cwd=dirname(self._sim_cfg_file_name)) + proc = Process(args, cwd=str(Path(self._sim_cfg_file_name).parent)) proc.consume_output() except Process.NonZeroExitCode: return False @@ -316,30 +316,33 @@ def simulate(self, output_path, test_suite_name, config, elaborate_only): """ Run a test bench """ - script_path = join(output_path, self.name) + script_path = Path(output_path) / self.name - common_file_name = join(script_path, "common.do") - gui_file_name = join(script_path, "gui.do") - batch_file_name = join(script_path, "batch.do") + common_file_name = script_path / "common.do" + gui_file_name = script_path / "gui.do" + batch_file_name = script_path / "batch.do" write_file( - common_file_name, + str(common_file_name), self._create_common_script( test_suite_name, config, script_path, output_path ), ) - write_file(gui_file_name, self._create_gui_script(common_file_name, config)) write_file( - batch_file_name, self._create_batch_script(common_file_name, elaborate_only) + str(gui_file_name), self._create_gui_script(str(common_file_name), config) + ) + write_file( + str(batch_file_name), + self._create_batch_script(str(common_file_name), elaborate_only), ) if self._gui: - return self._run_batch_file(gui_file_name, gui=True) + return self._run_batch_file(str(gui_file_name), gui=True) if self._persistent_shell is not None: - return self._run_persistent(common_file_name, load_only=elaborate_only) + return self._run_persistent(str(common_file_name), load_only=elaborate_only) - return self._run_batch_file(batch_file_name) + return self._run_batch_file(str(batch_file_name)) def fix_path(path): diff --git a/vunit/source_file.py b/vunit/source_file.py index 4dc763ba8..df5bac350 100644 --- a/vunit/source_file.py +++ b/vunit/source_file.py @@ -9,7 +9,6 @@ """ from pathlib import Path from typing import Union -from os.path import splitext import logging from copy import copy import traceback @@ -186,8 +185,8 @@ def parse(self, parser, database, include_dirs): for instance_name in design_file.instances: self.module_dependencies.append(instance_name) - except KeyboardInterrupt: - raise KeyboardInterrupt + except KeyboardInterrupt as exk: + raise KeyboardInterrupt from exk except: # pylint: disable=bare-except traceback.print_exc() LOGGER.error("Failed to parse %s", self.name) @@ -223,8 +222,8 @@ def __init__( # pylint: disable=too-many-arguments try: design_file = vhdl_parser.parse(self.name) - except KeyboardInterrupt: - raise KeyboardInterrupt + except KeyboardInterrupt as exk: + raise KeyboardInterrupt from exk except: # pylint: disable=bare-except traceback.print_exc() LOGGER.error("Failed to parse %s", self.name) @@ -369,7 +368,7 @@ def file_type_of(file_name): """ Return the file type of file_name based on the file ending """ - _, ext = splitext(file_name) + ext = str(Path(file_name).suffix) if ext.lower() in VHDL_EXTENSIONS: return "vhdl" diff --git a/vunit/test/bench.py b/vunit/test/bench.py index f3035ef92..d411c60ea 100644 --- a/vunit/test/bench.py +++ b/vunit/test/bench.py @@ -9,7 +9,7 @@ """ import logging -from os.path import basename +from pathlib import Path import re import bisect import collections @@ -98,7 +98,7 @@ def _check_architectures(design_unit): % ( design_unit.name, ", ".join( - "%s:%s" % (name, basename(fname)) + "%s:%s" % (name, str(Path(fname).name)) for name, fname in sorted( design_unit.architecture_names.items() ) diff --git a/vunit/test/report.py b/vunit/test/report.py index 9efb026e0..19900965e 100644 --- a/vunit/test/report.py +++ b/vunit/test/report.py @@ -13,7 +13,7 @@ import os import socket import re -from os.path import dirname +from pathlib import Path from vunit.color_printer import COLOR_PRINTER from vunit.ostools import read_file @@ -325,5 +325,5 @@ def to_dict(self): return { "status": self._status.name, "time": self.time, - "path": dirname(self._output_file_name), + "path": str(Path(self._output_file_name).parent), } diff --git a/vunit/test/runner.py b/vunit/test/runner.py index 5ab718e0a..1fb87267a 100644 --- a/vunit/test/runner.py +++ b/vunit/test/runner.py @@ -9,7 +9,7 @@ """ import os -from os.path import join, exists, abspath, basename, relpath +from pathlib import Path import traceback import threading import sys @@ -77,7 +77,7 @@ def run(self, test_suites): Run a list of test suites """ - if not exists(self._output_path): + if not Path(self._output_path).exists(): os.makedirs(self._output_path) self._create_test_mapping_file(test_suites) @@ -145,12 +145,12 @@ def _run_thread(self, write_stdout, scheduler, num_tests, is_main): test_suite = scheduler.next() output_path = create_output_path(self._output_path, test_suite.name) - output_file_name = join(output_path, "output.txt") + output_file_name = str(Path(output_path) / "output.txt") with self._stdout_lock(): for test_name in test_suite.test_names: print("Starting %s" % test_name) - print("Output file: %s" % relpath(output_file_name)) + print("Output file: %s" % output_file_name) self._run_test_suite( test_suite, write_stdout, num_tests, output_path, output_file_name @@ -187,7 +187,7 @@ def _run_test_suite( """ Run the actual test suite """ - color_output_file_name = join(output_path, "output_with_color.txt") + color_output_file_name = str(Path(output_path) / "output_with_color.txt") output_file = None color_output_file = None @@ -219,11 +219,11 @@ def read_output(): return contents results = test_suite.run(output_path=output_path, read_output=read_output) - except KeyboardInterrupt: + except KeyboardInterrupt as exk: self._add_skipped_tests( test_suite, results, start_time, num_tests, output_file_name ) - raise KeyboardInterrupt + raise KeyboardInterrupt from exk except: # pylint: disable=bare-except if self._dont_catch_exceptions: raise @@ -263,11 +263,13 @@ def _create_test_mapping_file(self, test_suites): Create a file mapping test name to test output folder. This is to allow the user to find the test output folder when it is hashed """ - mapping_file_name = join(self._output_path, "test_name_to_path_mapping.txt") + mapping_file_name = str( + Path(self._output_path) / "test_name_to_path_mapping.txt" + ) # Load old mapping to remember non-deleted test folders as well # even when re-running only a single test case - if exists(mapping_file_name): + if Path(mapping_file_name).exists(): with open(mapping_file_name, "r") as fptr: mapping = set(fptr.read().splitlines()) else: @@ -275,7 +277,7 @@ def _create_test_mapping_file(self, test_suites): for test_suite in test_suites: test_output = create_output_path(self._output_path, test_suite.name) - mapping.add("%s %s" % (basename(test_output), test_suite.name)) + mapping.add("%s %s" % (Path(test_output).name, test_suite.name)) # Sort by everything except hash mapping = sorted(mapping, key=lambda value: value[value.index(" ") :]) @@ -288,7 +290,7 @@ def _print_output(self, output_file_name): """ Print contents of output file if it exists """ - with open(output_file_name, "r") as fread: + with Path(output_file_name).open("r") as fread: for line in fread.readlines(): self._stdout_ansi.write(line) @@ -433,7 +435,7 @@ def create_output_path(output_path, test_suite_name): Create the full output path of a test case. Ensure no bad characters and no long path names. """ - output_path = abspath(output_path) + output_path = str(Path(output_path).resolve()) safe_name = ( "".join(char if _is_legal(char) else "_" for char in test_suite_name) + "_" ) @@ -454,7 +456,7 @@ def create_output_path(output_path, test_suite_name): else: full_name = safe_name + hash_name - return join(output_path, full_name) + return str(Path(output_path) / full_name) def wrap(file_obj, use_color=True): diff --git a/vunit/test/suites.py b/vunit/test/suites.py index 846ff06af..e7d069937 100644 --- a/vunit/test/suites.py +++ b/vunit/test/suites.py @@ -8,7 +8,7 @@ Contains different kinds of test suites """ -from os.path import join +from pathlib import Path from .. import ostools from .report import PASSED, SKIPPED, FAILED @@ -345,4 +345,4 @@ def _full_name(test_suite_name, test_case_name): def get_result_file_name(output_path): - return join(output_path, "vunit_results") + return str(Path(output_path) / "vunit_results") diff --git a/vunit/ui/__init__.py b/vunit/ui/__init__.py index 6e958ebcb..61f2d1ac9 100644 --- a/vunit/ui/__init__.py +++ b/vunit/ui/__init__.py @@ -16,8 +16,8 @@ import logging import json import os -from typing import Optional, Set -from os.path import exists, abspath, join, basename, normpath, dirname +from typing import Optional, Set, Union +from pathlib import Path from fnmatch import fnmatch from ..database import PickledDataBase, DataBase from .. import ostools @@ -58,7 +58,10 @@ class VUnit( # pylint: disable=too-many-instance-attributes, too-many-public-me @classmethod def from_argv( - cls, argv=None, compile_builtins=True, vhdl_standard: Optional[str] = None + cls, + argv=None, + compile_builtins: Optional[bool] = True, + vhdl_standard: Optional[str] = None, ): """ Create VUnit instance from command line arguments. @@ -84,7 +87,10 @@ def from_argv( @classmethod def from_args( - cls, args, compile_builtins=True, vhdl_standard: Optional[str] = None + cls, + args, + compile_builtins: Optional[bool] = True, + vhdl_standard: Optional[str] = None, ): """ Create VUnit instance from args namespace. @@ -102,11 +108,14 @@ def from_args( return cls(args, compile_builtins=compile_builtins, vhdl_standard=vhdl_standard) def __init__( - self, args, compile_builtins=True, vhdl_standard: Optional[str] = None + self, + args, + compile_builtins: Optional[bool] = True, + vhdl_standard: Optional[str] = None, ): self._args = args self._configure_logging(args.log_level) - self._output_path = abspath(args.output_path) + self._output_path = str(Path(args.output_path).resolve()) if args.no_color: self._printer = NO_COLOR_PRINTER @@ -135,10 +144,12 @@ def test_filter(name, attribute_names): # Use default simulator options if no simulator was present if self._simulator_class is None: simulator_class = SimulatorInterface - self._simulator_output_path = join(self._output_path, "none") + self._simulator_output_path = str(Path(self._output_path) / "none") else: simulator_class = self._simulator_class - self._simulator_output_path = join(self._output_path, simulator_class.name) + self._simulator_output_path = str( + Path(self._output_path) / simulator_class.name + ) self._create_output_path(args.clean) @@ -161,7 +172,7 @@ def _create_database(self): Check for Python version used to create the database is the same as the running python instance or re-create """ - project_database_file_name = join(self._output_path, "project_database") + project_database_file_name = str(Path(self._output_path) / "project_database") create_new = False key = b"version" version = str((9, sys.version)).encode() @@ -169,8 +180,8 @@ def _create_database(self): try: database = DataBase(project_database_file_name) create_new = (key not in database) or (database[key] != version) - except KeyboardInterrupt: - raise KeyboardInterrupt + except KeyboardInterrupt as exk: + raise KeyboardInterrupt from exk except: # pylint: disable=bare-except traceback.print_exc() create_new = True @@ -202,7 +213,7 @@ def _which_vhdl_standard(self, vhdl_standard: Optional[str]) -> VHDLStandard: return VHDL.standard(vhdl_standard) def add_external_library( - self, library_name, path, vhdl_standard: Optional[str] = None + self, library_name, path: Union[str, Path], vhdl_standard: Optional[str] = None ): """ Add an externally compiled library as a black-box @@ -223,14 +234,14 @@ def add_external_library( self._project.add_library( library_name, - abspath(path), + Path(path).resolve(), self._which_vhdl_standard(vhdl_standard), is_external=True, ) return self.library(library_name) def add_source_files_from_csv( - self, project_csv_path, vhdl_standard: Optional[str] = None + self, project_csv_path: Union[str, Path], vhdl_standard: Optional[str] = None ): """ Add a project configuration, mapping all the libraries and files @@ -247,14 +258,14 @@ def add_source_files_from_csv( libs: Set[str] = set() files = SourceFileList(list()) - with open(project_csv_path) as csv_path_file: + ppath = Path(project_csv_path) + + with ppath.open() as csv_path_file: for row in csv.reader(csv_path_file): if len(row) == 2: lib_name = row[0].strip() no_normalized_file = row[1].strip() - file_name_ = normpath( - join(dirname(project_csv_path), no_normalized_file) - ) + file_name_ = str((ppath.parent / no_normalized_file).resolve()) lib = ( self.library(lib_name) if lib_name in libs @@ -270,7 +281,10 @@ def add_source_files_from_csv( return files def add_library( - self, library_name, vhdl_standard: Optional[str] = None, allow_duplicate=False + self, + library_name: str, + vhdl_standard: Optional[str] = None, + allow_duplicate: Optional[bool] = False, ): """ Add a library managed by VUnit. @@ -291,10 +305,9 @@ def add_library( """ standard = self._which_vhdl_standard(vhdl_standard) - - path = join(self._simulator_output_path, "libraries", library_name) + path = Path(self._simulator_output_path) / "libraries" / library_name if not self._project.has_library(library_name): - self._project.add_library(library_name, abspath(path), standard) + self._project.add_library(library_name, str(path.resolve()), standard) elif not allow_duplicate: raise ValueError( "Library %s already added. Use allow_duplicate to ignore this error." @@ -302,7 +315,7 @@ def add_library( ) return self.library(library_name) - def library(self, library_name): + def library(self, library_name: str): """ Get a library @@ -313,7 +326,7 @@ def library(self, library_name): raise KeyError(library_name) return Library(library_name, self, self._project, self._test_bench_list) - def set_attribute(self, name, value, allow_empty=False): + def set_attribute(self, name: str, value: str, allow_empty: Optional[bool] = False): """ Set a value of attribute in all |configurations| @@ -336,7 +349,7 @@ def set_attribute(self, name, value, allow_empty=False): ): test_bench.set_attribute(name, value) - def set_generic(self, name, value, allow_empty=False): + def set_generic(self, name: str, value: str, allow_empty: Optional[bool] = False): """ Set a value of generic in all |configurations| @@ -359,7 +372,7 @@ def set_generic(self, name, value, allow_empty=False): ): test_bench.set_generic(name.lower(), value) - def set_parameter(self, name, value, allow_empty=False): + def set_parameter(self, name: str, value: str, allow_empty: Optional[bool] = False): """ Set value of parameter in all |configurations| @@ -382,7 +395,13 @@ def set_parameter(self, name, value, allow_empty=False): ): test_bench.set_generic(name, value) - def set_sim_option(self, name, value, allow_empty=False, overwrite=True): + def set_sim_option( + self, + name: str, + value: str, + allow_empty: Optional[bool] = False, + overwrite: Optional[bool] = True, + ): """ Set simulation option in all |configurations| @@ -395,7 +414,7 @@ def set_sim_option(self, name, value, allow_empty=False, overwrite=True): .. code-block:: python - prj.set_sim_option("ghdl.flags", ["--no-vital-checks"]) + prj.set_sim_option("ghdl.a_flags", ["--no-vital-checks"]) .. note:: Only affects test benches added *before* the option is set. @@ -406,7 +425,9 @@ def set_sim_option(self, name, value, allow_empty=False, overwrite=True): ): test_bench.set_sim_option(name, value, overwrite) - def set_compile_option(self, name, value, allow_empty=False): + def set_compile_option( + self, name: str, value: str, allow_empty: Optional[bool] = False + ): """ Set compile option of all files @@ -418,7 +439,7 @@ def set_compile_option(self, name, value, allow_empty=False): .. code-block:: python - prj.set_compile_option("ghdl.flags", ["--no-vital-checks"]) + prj.set_compile_option("ghdl.a_flags", ["--no-vital-checks"]) .. note:: @@ -430,7 +451,9 @@ def set_compile_option(self, name, value, allow_empty=False): ): source_file.set_compile_option(name, value) - def add_compile_option(self, name, value, allow_empty=False): + def add_compile_option( + self, name: str, value: str, allow_empty: Optional[bool] = False + ): """ Add compile option to all files @@ -447,7 +470,9 @@ def add_compile_option(self, name, value, allow_empty=False): ): source_file.add_compile_option(name, value) - def get_source_file(self, file_name, library_name=None): + def get_source_file( + self, file_name: Union[str, Path], library_name: Optional[str] = None + ): """ Get a source file @@ -456,22 +481,29 @@ def get_source_file(self, file_name, library_name=None): :returns: A :class:`.SourceFile` object """ - files = self.get_source_files(file_name, library_name, allow_empty=True) + fstr = str(file_name) + + files = self.get_source_files(fstr, library_name, allow_empty=True) if len(files) > 1: raise ValueError( "Found file named '%s' in multiple-libraries, " - "add explicit library_name." % file_name + "add explicit library_name." % fstr ) if not files: if library_name is None: - raise ValueError("Found no file named '%s'" % file_name) + raise ValueError("Found no file named '%s'" % fstr) raise ValueError( - "Found no file named '%s' in library '%s'" % (file_name, library_name) + "Found no file named '%s' in library '%s'" % (fstr, library_name) ) return files[0] - def get_source_files(self, pattern="*", library_name=None, allow_empty=False): + def get_source_files( + self, + pattern="*", + library_name: Optional[str] = None, + allow_empty: Optional[bool] = False, + ): """ Get a list of source files @@ -487,7 +519,7 @@ def get_source_files(self, pattern="*", library_name=None, allow_empty=False): continue if not ( - fnmatch(abspath(source_file.name), pattern) + fnmatch(str(Path(source_file.name).resolve()), pattern) or fnmatch(ostools.simplify_path(source_file.name), pattern) ): continue @@ -508,13 +540,13 @@ def get_source_files(self, pattern="*", library_name=None, allow_empty=False): def add_source_files( # pylint: disable=too-many-arguments self, pattern, - library_name, + library_name: str, preprocessors=None, include_dirs=None, defines=None, - allow_empty=False, + allow_empty: Optional[bool] = False, vhdl_standard: Optional[str] = None, - no_parse=False, + no_parse: Optional[bool] = False, file_type=None, ): """ @@ -552,13 +584,13 @@ def add_source_files( # pylint: disable=too-many-arguments def add_source_file( # pylint: disable=too-many-arguments self, - file_name, - library_name, + file_name: Union[str, Path], + library_name: str, preprocessors=None, include_dirs=None, defines=None, vhdl_standard: Optional[str] = None, - no_parse=False, + no_parse: Optional[bool] = False, file_type=None, ): """ @@ -583,7 +615,7 @@ def add_source_file( # pylint: disable=too-many-arguments """ return self.library(library_name).add_source_file( - file_name=file_name, + file_name=str(file_name), preprocessors=preprocessors, include_dirs=include_dirs, defines=defines, @@ -592,7 +624,9 @@ def add_source_file( # pylint: disable=too-many-arguments file_type=file_type, ) - def _preprocess(self, library_name, file_name, preprocessors): + def _preprocess( + self, library_name: str, file_name: Union[str, Path], preprocessors + ): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors @@ -604,33 +638,35 @@ def _preprocess(self, library_name, file_name, preprocessors): preprocessors = [p for p in preprocessors if p is not None] preprocessors = self._external_preprocessors + preprocessors + fstr = str(file_name) + if not preprocessors: - return file_name + return fstr + + fname = str(Path(file_name).name) try: code = ostools.read_file(file_name, encoding=HDL_FILE_ENCODING) for preprocessor in preprocessors: - code = preprocessor.run(code, basename(file_name)) - except KeyboardInterrupt: - raise KeyboardInterrupt + code = preprocessor.run(code, fname) + except KeyboardInterrupt as exk: + raise KeyboardInterrupt from exk except: # pylint: disable=bare-except traceback.print_exc() - LOGGER.error("Failed to preprocess %s", file_name) - return file_name + LOGGER.error("Failed to preprocess %s", fstr) + return fstr else: - pp_file_name = join( - self._preprocessed_path, library_name, basename(file_name) - ) + pp_file_name = str(Path(self._preprocessed_path) / library_name / fname) idx = 1 while ostools.file_exists(pp_file_name): LOGGER.debug( "Preprocessed file exists '%s', adding prefix", pp_file_name ) - pp_file_name = join( - self._preprocessed_path, - library_name, - "%i_%s" % (idx, basename(file_name)), + pp_file_name = str( + Path(self._preprocessed_path) + / library_name + / ("%i_%s" % (idx, fname)), ) idx += 1 @@ -705,7 +741,7 @@ def main(self, post_run=None): sys.exit(0) - def _create_tests(self, simulator_if): + def _create_tests(self, simulator_if: Union[None, SimulatorInterface]): """ Create the test cases """ @@ -749,7 +785,7 @@ def _create_simulator_if(self): ) sys.exit(1) - if not exists(self._simulator_output_path): + if not Path(self._simulator_output_path).exists(): os.makedirs(self._simulator_output_path) return self._simulator_class.from_args( @@ -800,7 +836,9 @@ def _main_list_only(self): print("Listed %i tests" % test_list.num_tests) return True - def _main_export_json(self, json_file_name): # pylint: disable=too-many-locals + def _main_export_json( + self, json_file_name: Union[str, Path] + ): # pylint: disable=too-many-locals """ Main function when exporting to JSON """ @@ -810,7 +848,7 @@ def _main_export_json(self, json_file_name): # pylint: disable=too-many-locals for source_file in file_objects: files.append( dict( - file_name=abspath(source_file.name), + file_name=str(Path(source_file.name).resolve()), library_name=source_file.library.name, ) ) @@ -833,7 +871,7 @@ def _main_export_json(self, json_file_name): # pylint: disable=too-many-locals dict( name=name, location=dict( - file_name=info.location.file_name, + file_name=str(info.location.file_name), offset=info.location.offset, length=info.location.length, ), @@ -850,7 +888,7 @@ def _main_export_json(self, json_file_name): # pylint: disable=too-many-locals tests=tests, ) - with open(json_file_name, "w") as fptr: + with Path(json_file_name).open("w") as fptr: json.dump(json_data, fptr, sort_keys=True, indent=4, separators=(",", ": ")) return True @@ -873,13 +911,13 @@ def _main_compile_only(self): self._compile(simulator_if) return True - def _create_output_path(self, clean): + def _create_output_path(self, clean: bool): """ Create or re-create the output path if necessary """ if clean: ostools.renew_path(self._output_path) - elif not exists(self._output_path): + elif not Path(self._output_path).exists(): os.makedirs(self._output_path) ostools.renew_path(self._preprocessed_path) @@ -890,13 +928,13 @@ def vhdl_standard(self) -> str: @property def _preprocessed_path(self): - return join(self._output_path, "preprocessed") + return str(Path(self._output_path) / "preprocessed") @property def codecs_path(self): - return join(self._output_path, "codecs") + return str(Path(self._output_path) / "codecs") - def _compile(self, simulator_if): + def _compile(self, simulator_if: SimulatorInterface): """ Compile entire project """ @@ -913,7 +951,7 @@ def _compile(self, simulator_if): target_files=target_files, ) - def _get_testbench_files(self, simulator_if): + def _get_testbench_files(self, simulator_if: Union[None, SimulatorInterface]): """ Return the list of all test bench files for the currently selected tests to run """ @@ -940,7 +978,7 @@ def _run_test(self, test_cases, report): runner = TestRunner( report, - join(self._output_path, TEST_OUTPUT_PATH), + str(Path(self._output_path) / TEST_OUTPUT_PATH), verbosity=verbosity, num_threads=self._args.num_threads, fail_fast=self._args.fail_fast, @@ -953,9 +991,11 @@ def add_builtins(self, external=None): """ Add vunit VHDL builtin libraries - :param external: struct to select whether to enable external models for 'string' and/or 'integer' vectors. - {'string': , 'integer': }. Allowed values are: None, False/True or - ['path/to/custom/file']. + :param external: struct to provide bridges for the external VHDL API. + { + 'string': ['path/to/custom/file'], + 'integer': ['path/to/custom/file'] + }. """ self._builtins.add_vhdl_builtins(external=external) @@ -1059,3 +1099,13 @@ def get_simulator_name(self): if self._simulator_class is None: return None return self._simulator_class.name + + def simulator_supports_coverage(self): + """ + Returns True when the simulator supports coverage + + Will return None if no simulator was found. + """ + if self._simulator_class is None: + return None + return self._simulator_class.supports_coverage() diff --git a/vunit/ui/common.py b/vunit/ui/common.py index becad3489..24238e2a4 100644 --- a/vunit/ui/common.py +++ b/vunit/ui/common.py @@ -8,12 +8,14 @@ UI common functions """ +from pathlib import Path +from glob import glob from os import environ from logging import getLogger -from typing import Optional +from typing import Optional, List +from ..sim_if import is_string_not_iterable from ..vhdl_standard import VHDL, VHDLStandard - LOGGER = getLogger(__name__) TEST_OUTPUT_PATH = "test_output" @@ -49,3 +51,27 @@ def check_not_empty(lst, allow_empty, error_msg): if (not allow_empty) and (not lst): raise ValueError(error_msg + ". Use allow_empty=True to avoid exception.") return lst + + +def get_checked_file_names_from_globs(pattern, allow_empty): + """ + Get file names from globs and check that exist + """ + if is_string_not_iterable(pattern): + patterns = [pattern] + elif isinstance(pattern, Path): + patterns = [str(pattern)] + else: + patterns = pattern + + file_names: List[str] = [] + for pattern_instance in patterns: + new_file_names = glob(str(pattern_instance)) + check_not_empty( + new_file_names, + allow_empty, + "Pattern %r did not match any file" % pattern_instance, + ) + file_names += new_file_names + + return file_names diff --git a/vunit/ui/library.py b/vunit/ui/library.py index 360bc1875..e90ac9274 100644 --- a/vunit/ui/library.py +++ b/vunit/ui/library.py @@ -9,15 +9,13 @@ """ from pathlib import Path -from glob import glob from fnmatch import fnmatch -from typing import Optional, List +from typing import Optional from ..vhdl_standard import VHDL, VHDLStandard from ..project import Project -from ..sim_if import is_string_not_iterable from ..source_file import file_type_of, FILE_TYPES, VERILOG_FILE_TYPES from ..builtins import add_verilog_include_dir -from .common import check_not_empty +from .common import check_not_empty, get_checked_file_names_from_globs from .source import SourceFile, SourceFileList from .testbench import TestBench from .packagefacade import PackageFacade @@ -94,7 +92,7 @@ def set_sim_option(self, name, value, allow_empty=False, overwrite=True): .. code-block:: python - lib.set_sim_option("ghdl.flags", ["--no-vital-checks"]) + lib.set_sim_option("ghdl.a_flags", ["--no-vital-checks"]) .. note:: Only affects test benches added *before* the option is set. @@ -114,7 +112,7 @@ def set_compile_option(self, name, value, allow_empty=False): .. code-block:: python - lib.set_compile_option("ghdl.flags", ["--no-vital-checks"]) + lib.set_compile_option("ghdl.a_flags", ["--no-vital-checks"]) .. note:: @@ -188,23 +186,6 @@ def add_source_files( # pylint: disable=too-many-arguments library.add_source_files("*.vhd") """ - if is_string_not_iterable(pattern): - patterns = [pattern] - elif isinstance(pattern, Path): - patterns = [str(pattern)] - else: - patterns = pattern - - file_names: List[str] = [] - for pattern_instance in patterns: - new_file_names = glob(str(pattern_instance)) - check_not_empty( - new_file_names, - allow_empty, - "Pattern %r did not match any file" % pattern_instance, - ) - file_names += new_file_names - return SourceFileList( source_files=[ self.add_source_file( @@ -216,7 +197,7 @@ def add_source_files( # pylint: disable=too-many-arguments no_parse=no_parse, file_type=file_type, ) - for file_name in file_names + for file_name in get_checked_file_names_from_globs(pattern, allow_empty) ] ) diff --git a/vunit/ui/packagefacade.py b/vunit/ui/packagefacade.py index 379e777b1..9637beb65 100644 --- a/vunit/ui/packagefacade.py +++ b/vunit/ui/packagefacade.py @@ -8,7 +8,7 @@ UI class PackageFacade """ -from os.path import join, splitext +from pathlib import Path from ..com import codec_generator @@ -33,9 +33,9 @@ def generate_codecs( codec_package_name = self._package_name + "_codecs" if output_file_name is None: - codecs_path = join(self._parent.codecs_path, self._library_name) - file_extension = splitext(self._design_unit.source_file.name)[1] - output_file_name = join(codecs_path, codec_package_name + file_extension) + codecs_path = Path(self._parent.codecs_path) / self._library_name + file_extension = Path(self._design_unit.source_file.name).suffix + output_file_name = codecs_path / (codec_package_name + file_extension) codec_generator.generate_codecs( self._design_unit, codec_package_name, used_packages, output_file_name diff --git a/vunit/ui/results.py b/vunit/ui/results.py index eb3057ca4..9e63322ce 100644 --- a/vunit/ui/results.py +++ b/vunit/ui/results.py @@ -8,7 +8,8 @@ UI class Results """ -from os.path import join, basename, normpath +from pathlib import Path +from typing import Dict, Union from .common import TEST_OUTPUT_PATH @@ -45,7 +46,7 @@ def get_report(self): report.tests.update( { test.name: TestResult( - join(self._output_path, TEST_OUTPUT_PATH), + Path(self._output_path) / TEST_OUTPUT_PATH, obj["status"], obj["time"], obj["path"], @@ -63,9 +64,9 @@ class Report(object): :data tests: Dictionary of :class:`TestResult` objects """ - def __init__(self, output_path): - self.output_path = output_path - self.tests = {} + def __init__(self, output_path: Union[str, Path]): + self.output_path = Path(output_path) + self.tests: Dict[str, TestResult] = {} class TestResult(object): @@ -93,20 +94,22 @@ def post_func(results): vu.main(post_run=post_func) """ - def __init__(self, test_output_path, status, time, path): - self._test_output_path = test_output_path + def __init__( + self, test_output_path: Union[str, Path], status, time, path: Union[str, Path] + ): + self._test_output_path = Path(test_output_path) self.status = status self.time = time - self.path = path + self.path = Path(path) @property - def relpath(self): + def relpath(self) -> str: """ If the path is a subdir to the default TEST_OUTPUT_PATH, return the subdir only """ - base = basename(self.path) - return ( + base = self.path.name + return str( base - if normpath(join(self._test_output_path, base)) == normpath(self.path) + if (self._test_output_path / base).resolve() == self.path.resolve() else self.path ) diff --git a/vunit/ui/source.py b/vunit/ui/source.py index 874542fe7..d3c2c5353 100644 --- a/vunit/ui/source.py +++ b/vunit/ui/source.py @@ -30,7 +30,7 @@ def set_compile_option(self, name, value): .. code-block:: python - files.set_compile_option("ghdl.flags", ["--no-vital-checks"]) + files.set_compile_option("ghdl.a_flags", ["--no-vital-checks"]) """ for source_file in self: source_file.set_compile_option(name, value) @@ -108,7 +108,7 @@ def set_compile_option(self, name, value): .. code-block:: python - my_file.set_compile_option("ghdl.flags", ["--no-vital-checks"]) + my_file.set_compile_option("ghdl.a_flags", ["--no-vital-checks"]) """ self._source_file.set_compile_option(name, value) diff --git a/vunit/ui/test.py b/vunit/ui/test.py index 510bb916c..805b2030b 100644 --- a/vunit/ui/test.py +++ b/vunit/ui/test.py @@ -148,7 +148,7 @@ def set_sim_option(self, name, value, overwrite=True): .. code-block:: python - test.set_sim_option("ghdl.flags", ["--no-vital-checks"]) + test.set_sim_option("ghdl.a_flags", ["--no-vital-checks"]) """ self._test_case.set_sim_option(name, value, overwrite) diff --git a/vunit/ui/testbench.py b/vunit/ui/testbench.py index e804cdeb7..3eb069e11 100644 --- a/vunit/ui/testbench.py +++ b/vunit/ui/testbench.py @@ -98,7 +98,7 @@ def set_sim_option(self, name, value, overwrite=True): .. code-block:: python - test_bench.set_sim_option("ghdl.flags", ["--no-vital-checks"]) + test_bench.set_sim_option("ghdl.a_flags", ["--no-vital-checks"]) """ self._test_bench.set_sim_option(name, value, overwrite) diff --git a/vunit/verilog/include/vunit_defines.svh b/vunit/verilog/include/vunit_defines.svh index 3e807c0b9..1ea158eff 100644 --- a/vunit/verilog/include/vunit_defines.svh +++ b/vunit/verilog/include/vunit_defines.svh @@ -2,7 +2,7 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this file, // You can obtain one at http://mozilla.org/MPL/2.0/. // -// Copyright (c) 2015-2016, Lars Asplund lars.anders.asplund@gmail.com +// Copyright (c) 2015-2020, Lars Asplund lars.anders.asplund@gmail.com `define WATCHDOG(runtime) \ initial begin \ @@ -34,7 +34,7 @@ arg_str = arg_str.substr(i, arg_str.len()-1); \ break; \ end \ - end + end `define CREATE_MSG(full_msg,func_name,got,expected,prefix,msg=__none__) \ string __none__; \ string got_str; \ @@ -45,7 +45,7 @@ expected_str ="";\ `CREATE_ARG_STRING(got, got_str); \ `CREATE_ARG_STRING(expected, expected_str); \ - full_msg = {func_name, " failed! Got ",`"got`", "=", got_str, " expected ", prefix, expected_str, ". ", msg}; + full_msg = {func_name, " failed! Got ",`"got`", "=", got_str, " expected ", prefix, expected_str, ". ", msg}; `define CHECK_EQUAL(got,expected,msg=__none__) \ assert ((got) === (expected)) else \ begin \ diff --git a/vunit/vhdl/JSON-for-VHDL b/vunit/vhdl/JSON-for-VHDL index 80100fd6d..c8a6f517a 160000 --- a/vunit/vhdl/JSON-for-VHDL +++ b/vunit/vhdl/JSON-for-VHDL @@ -1 +1 @@ -Subproject commit 80100fd6dd8c0cf27c7356391f6cbfb5efcddd42 +Subproject commit c8a6f517aabf66ce3089669cc8e98852921e268b diff --git a/vunit/vhdl/check/src/check.vhd b/vunit/vhdl/check/src/check.vhd index de4987c25..8b99ce414 100644 --- a/vunit/vhdl/check/src/check.vhd +++ b/vunit/vhdl/check/src/check.vhd @@ -132,8 +132,8 @@ package body check_pkg is end if; end; - function to_ordinal_number (num : natural) return string is - constant num_str : string := natural'image(num); + function to_ordinal_number (num : unsigned) return string is + constant num_str : string := to_integer_string(num); variable ordinal_unit : string(1 to 2); begin case num_str(num_str'right) is @@ -758,7 +758,7 @@ package body check_pkg is variable state : inout check_stable_fsm_state_t; variable ref : inout std_logic_vector; - variable clock_edge_counter : inout natural; + variable clock_edge_counter : inout unsigned(63 downto 0); variable is_stable : inout boolean; variable exit_stability_check : out boolean) is @@ -779,7 +779,7 @@ package body check_pkg is procedure open_window (variable open_ok : out boolean) is begin - clock_edge_counter := 1; + clock_edge_counter := x"0000000000000001"; ref := to_x01(expr); open_ok := true; if is_x(start_event) then @@ -798,7 +798,7 @@ package body check_pkg is end if; end procedure; - procedure close_window(cycle : natural; is_ok : boolean) is + procedure close_window(cycle : unsigned; is_ok : boolean) is variable close_ok : boolean := is_ok; variable pass_msg_en : boolean; begin @@ -815,7 +815,7 @@ package body check_pkg is passing_check(checker, std_msg("Stability check passed", msg, "Got " & format(ref) & - " for " & positive'image(cycle) & + " for " & to_integer_string(cycle) & " active and enabled clock edges."), line_num, file_name); else @@ -891,7 +891,7 @@ package body check_pkg is variable state : check_stable_fsm_state_t := idle; variable ref : std_logic_vector(expr'range); - variable clock_edge_counter : natural; + variable clock_edge_counter : unsigned(63 downto 0); variable is_stable : boolean := true; variable exit_stability_check : boolean; begin @@ -942,7 +942,7 @@ package body check_pkg is variable state : check_stable_fsm_state_t := idle; variable ref : std_logic_vector(0 to 0); - variable clock_edge_counter : natural; + variable clock_edge_counter : unsigned(63 downto 0); variable is_stable : boolean := true; variable exit_stability_check : boolean; begin @@ -1608,7 +1608,7 @@ package body check_pkg is failing_check(checker, std_msg("Next check failed", msg, "Got " & std_logic'image(expr)(2) & - " at the " & to_ordinal_number(num_cks) & + " at the " & to_ordinal_number(to_unsigned(num_cks, 32)) & " active and enabled clock edge."), level, line_num, file_name); end if; @@ -1625,7 +1625,7 @@ package body check_pkg is failing_check(checker, std_msg("Next check failed", msg, "Got overlapping start event at the " & - to_ordinal_number(clock_cycles_after_start_event) & + to_ordinal_number(to_unsigned(clock_cycles_after_start_event, 32)) & " active and enabled clock edge."), level, line_num, file_name); else @@ -1760,7 +1760,7 @@ package body check_pkg is failing_check(checker, std_msg("Sequence check failed", msg, "Missing required event at " & - to_ordinal_number(i) & + to_ordinal_number(to_unsigned(i, 32)) & " active and enabled clock edge."), level, line_num, file_name); elsif i = seq'right then diff --git a/vunit/vhdl/check/tools/generate_check_equal.py b/vunit/vhdl/check/tools/generate_check_equal.py index aeb244305..e8957e385 100644 --- a/vunit/vhdl/check/tools/generate_check_equal.py +++ b/vunit/vhdl/check/tools/generate_check_equal.py @@ -4,7 +4,7 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from string import Template api_template = """ procedure check_equal( @@ -767,14 +767,14 @@ def replace_region(region_name, file_name, new_contents): def main(): - check_api_file_name = join(dirname(__file__), "..", "src", "check_api.vhd") + check_api_file_name = str(Path(__file__).parent.parent / "src" / "check_api.vhd") replace_region("check_equal", check_api_file_name, generate_api()) - check_file_name = join(dirname(__file__), "..", "src", "check.vhd") + check_file_name = str(Path(__file__).parent.parent / "src" / "check.vhd") replace_region("check_equal", check_file_name, generate_impl()) - with open( - join(dirname(__file__), "..", "test", "tb_check_equal.vhd"), "wb" + with (Path(__file__).parent.parent / "test" / "tb_check_equal.vhd").open( + "wb" ) as fptr: fptr.write(generate_test().encode()) diff --git a/vunit/vhdl/check/tools/generate_check_match.py b/vunit/vhdl/check/tools/generate_check_match.py index 4229da772..129755b50 100644 --- a/vunit/vhdl/check/tools/generate_check_match.py +++ b/vunit/vhdl/check/tools/generate_check_match.py @@ -4,7 +4,7 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from string import Template from generate_check_equal import replace_region @@ -448,14 +448,14 @@ def generate_test(): def main(): - check_api_file_name = join(dirname(__file__), "..", "src", "check_api.vhd") + check_api_file_name = str(Path(__file__).parent.parent / "src" / "check_api.vhd") replace_region("check_match", check_api_file_name, generate_api()) - check_file_name = join(dirname(__file__), "..", "src", "check.vhd") + check_file_name = str(Path(__file__).parent.parent / "src" / "check.vhd") replace_region("check_match", check_file_name, generate_impl()) - with open( - join(dirname(__file__), "..", "test", "tb_check_match.vhd"), "wb" + with (Path(__file__).parent.parent / "test" / "tb_check_match.vhd").open( + "wb" ) as fptr: fptr.write(generate_test().encode()) diff --git a/vunit/vhdl/com/run.py b/vunit/vhdl/com/run.py index c1e83b7ec..f55e7ab28 100644 --- a/vunit/vhdl/com/run.py +++ b/vunit/vhdl/com/run.py @@ -4,17 +4,16 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent -prj = VUnit.from_argv() -prj.add_com() -tb_com_lib = prj.add_library("tb_com_lib") -tb_com_lib.add_source_files(join(root, "test", "*.vhd")) -pkg = tb_com_lib.package("custom_types_pkg") -pkg.generate_codecs( +UI = VUnit.from_argv() +UI.add_com() +TB_COM_LIB = UI.add_library("tb_com_lib") +TB_COM_LIB.add_source_files(ROOT / "test" / "*.vhd") +TB_COM_LIB.package("custom_types_pkg").generate_codecs( codec_package_name="custom_codec_pkg", used_packages=[ "ieee.std_logic_1164", @@ -22,4 +21,5 @@ "tb_com_lib.more_constants_pkg", ], ) -prj.main() + +UI.main() diff --git a/vunit/vhdl/com/src/com_debug_codec_builder.vhd b/vunit/vhdl/com/src/com_debug_codec_builder.vhd index dd592e855..c6f23c7bd 100644 --- a/vunit/vhdl/com/src/com_debug_codec_builder.vhd +++ b/vunit/vhdl/com/src/com_debug_codec_builder.vhd @@ -205,7 +205,7 @@ package body com_debug_codec_builder_pkg is return; end if; - elements := new line_vector(0 to max_num_of_elements - 1); + elements := new work.string_ops.line_vector(0 to max_num_of_elements - 1); element_start := grp'left + 1; for i in grp'left + 1 to grp'right loop if length = max_num_of_elements then diff --git a/vunit/vhdl/data_types/src/external/external_integer_vector-body.vhd b/vunit/vhdl/data_types/src/api/external_integer_vector_pkg.vhd similarity index 53% rename from vunit/vhdl/data_types/src/external/external_integer_vector-body.vhd rename to vunit/vhdl/data_types/src/api/external_integer_vector_pkg.vhd index 08f716c48..2675bb92b 100644 --- a/vunit/vhdl/data_types/src/external/external_integer_vector-body.vhd +++ b/vunit/vhdl/data_types/src/api/external_integer_vector_pkg.vhd @@ -4,25 +4,44 @@ -- -- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com +use work.types_pkg.all; + +package external_integer_vector_pkg is + procedure write_integer ( + id : integer; + i : integer; + v : integer + ); + + impure function read_integer ( + id : integer; + i : integer + ) return integer; + + impure function get_ptr ( + id : integer + ) return extintvec_access_t; +end package; + package body external_integer_vector_pkg is procedure write_integer ( id : integer; i : integer; v : integer - )is begin - assert false report "VHPI write_integer" severity failure; + ) is begin + assert false report "EXTERNAL write_integer" severity failure; end; impure function read_integer ( id : integer; i : integer ) return integer is begin - assert false report "VHPI read_integer" severity failure; + assert false report "EXTERNAL read_integer" severity failure; end; impure function get_ptr ( id : integer ) return extintvec_access_t is begin - assert false report "VHPI get_intvec_ptr" severity failure; + assert false report "EXTERNAL get_intvec_ptr" severity failure; end; end package body; diff --git a/vunit/vhdl/data_types/src/external/external_string-body.vhd b/vunit/vhdl/data_types/src/api/external_string_pkg.vhd similarity index 53% rename from vunit/vhdl/data_types/src/external/external_string-body.vhd rename to vunit/vhdl/data_types/src/api/external_string_pkg.vhd index dff05329b..e0b93a6b1 100644 --- a/vunit/vhdl/data_types/src/external/external_string-body.vhd +++ b/vunit/vhdl/data_types/src/api/external_string_pkg.vhd @@ -4,25 +4,44 @@ -- -- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com +use work.types_pkg.all; + +package external_string_pkg is + procedure write_char ( + id : integer; + i : integer; + v : character + ); + + impure function read_char ( + id : integer; + i : integer + ) return character; + + impure function get_ptr ( + id : integer + ) return extstring_access_t; +end package; + package body external_string_pkg is procedure write_char ( id : integer; i : integer; v : character - )is begin - assert false report "VHPI write_char" severity failure; + ) is begin + assert false report "EXTERNAL write_char" severity failure; end; impure function read_char ( id : integer; i : integer ) return character is begin - assert false report "VHPI read_char" severity failure; + assert false report "EXTERNAL read_char" severity failure; end; impure function get_ptr ( id : integer ) return extstring_access_t is begin - assert false report "VHPI get_string_ptr" severity failure; + assert false report "EXTERNAL get_string_ptr" severity failure; end; end package body; diff --git a/vunit/vhdl/data_types/src/codec-2008p.vhd b/vunit/vhdl/data_types/src/codec-2008p.vhd index 58dd63e95..405edae0f 100644 --- a/vunit/vhdl/data_types/src/codec-2008p.vhd +++ b/vunit/vhdl/data_types/src/codec-2008p.vhd @@ -110,7 +110,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return boolean_vector is - variable ret_val : boolean_vector(get_range(code)'range) := (others => false); + constant ret_range : range_t := get_range(code); + variable ret_val : boolean_vector(ret_range'range) := (others => false); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -136,7 +137,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return integer_vector is - variable ret_val : integer_vector(get_range(code)'range) := (others => integer'left); + constant ret_range : range_t := get_range(code); + variable ret_val : integer_vector(ret_range'range) := (others => integer'left); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -162,7 +164,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return real_vector is - variable ret_val : real_vector(get_range(code)'range) := (others => real'left); + constant ret_range : range_t := get_range(code); + variable ret_val : real_vector(ret_range'range) := (others => real'left); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -188,7 +191,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return time_vector is - variable ret_val : time_vector(get_range(code)'range) := (others => time'left); + constant ret_range : range_t := get_range(code); + variable ret_val : time_vector(ret_range'range) := (others => time'left); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -206,7 +210,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return ufixed is - variable ret_val : ufixed(get_range(code)'range); + constant ret_range : range_t := get_range(code); + variable ret_val : ufixed(ret_range'range); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -224,7 +229,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return sfixed is - variable ret_val : sfixed(get_range(code)'range); + constant ret_range : range_t := get_range(code); + variable ret_val : sfixed(ret_range'range); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -242,7 +248,8 @@ package body codec_2008p_pkg is function decode ( constant code : string) return float is - variable ret_val : float(get_range(code)'range); + constant ret_range : range_t := get_range(code); + variable ret_val : float(ret_range'range); variable index : positive := code'left; begin decode(code, index, ret_val); diff --git a/vunit/vhdl/data_types/src/codec.vhd b/vunit/vhdl/data_types/src/codec.vhd index 2b0e25877..f441b18dd 100644 --- a/vunit/vhdl/data_types/src/codec.vhd +++ b/vunit/vhdl/data_types/src/codec.vhd @@ -517,7 +517,8 @@ package body codec_pkg is function decode ( constant code : string) return string is - variable ret_val : string(get_range(code)'range) := (others => NUL); + constant ret_range : range_t := get_range(code); + variable ret_val : string(ret_range'range) := (others => NUL); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -542,7 +543,8 @@ package body codec_pkg is function decode ( constant code : string) return bit_vector is - variable ret_val : bit_vector(get_range(code)'range) := (others => '0'); + constant ret_range : range_t := get_range(code); + variable ret_val : bit_vector(ret_range'range) := (others => '0'); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -560,7 +562,8 @@ package body codec_pkg is function decode ( constant code : string) return std_ulogic_vector is - variable ret_val : std_ulogic_vector(get_range(code)'range) := (others => 'U'); + constant ret_range : range_t := get_range(code); + variable ret_val : std_ulogic_vector(ret_range'range) := (others => 'U'); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -614,7 +617,8 @@ package body codec_pkg is function decode ( constant code : string) return ieee.numeric_bit.unsigned is - variable ret_val : ieee.numeric_bit.unsigned(get_range(code)'range) := (others => '0'); + constant ret_range : range_t := get_range(code); + variable ret_val : ieee.numeric_bit.unsigned(ret_range'range) := (others => '0'); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -632,7 +636,8 @@ package body codec_pkg is function decode ( constant code : string) return ieee.numeric_bit.signed is - variable ret_val : ieee.numeric_bit.signed(get_range(code)'range) := (others => '0'); + constant ret_range : range_t := get_range(code); + variable ret_val : ieee.numeric_bit.signed(ret_range'range) := (others => '0'); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -650,7 +655,8 @@ package body codec_pkg is function decode ( constant code : string) return ieee.numeric_std.unsigned is - variable ret_val : ieee.numeric_std.unsigned(get_range(code)'range) := (others => 'U'); + constant ret_range : range_t := get_range(code); + variable ret_val : ieee.numeric_std.unsigned(ret_range'range) := (others => 'U'); variable index : positive := code'left; begin decode(code, index, ret_val); @@ -668,7 +674,8 @@ package body codec_pkg is function decode ( constant code : string) return ieee.numeric_std.signed is - variable ret_val : ieee.numeric_std.signed(get_range(code)'range) := (others => 'U'); + constant ret_range : range_t := get_range(code); + variable ret_val : ieee.numeric_std.signed(ret_range'range) := (others => 'U'); variable index : positive := code'left; begin decode(code, index, ret_val); diff --git a/vunit/vhdl/data_types/src/external/external_integer_vector-novhpi.vhd b/vunit/vhdl/data_types/src/external/external_integer_vector-novhpi.vhd deleted file mode 100644 index 6ebf849d9..000000000 --- a/vunit/vhdl/data_types/src/external/external_integer_vector-novhpi.vhd +++ /dev/null @@ -1,24 +0,0 @@ --- This Source Code Form is subject to the terms of the Mozilla Public --- License, v. 2.0. If a copy of the MPL was not distributed with this file, --- You can obtain one at http://mozilla.org/MPL/2.0/. --- --- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -use work.types_pkg.all; - -package external_integer_vector_pkg is - procedure write_integer ( - id : integer; - i : integer; - v : integer - ); - - impure function read_integer ( - id : integer; - i : integer - ) return integer; - - impure function get_ptr ( - id : integer - ) return extintvec_access_t; -end package; diff --git a/vunit/vhdl/data_types/src/external/external_integer_vector-vhpi.vhd b/vunit/vhdl/data_types/src/external/external_integer_vector-vhpi.vhd deleted file mode 100644 index 34e4ba518..000000000 --- a/vunit/vhdl/data_types/src/external/external_integer_vector-vhpi.vhd +++ /dev/null @@ -1,28 +0,0 @@ --- This Source Code Form is subject to the terms of the Mozilla Public --- License, v. 2.0. If a copy of the MPL was not distributed with this file, --- You can obtain one at http://mozilla.org/MPL/2.0/. --- --- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -use work.types_pkg.all; - -package external_integer_vector_pkg is - procedure write_integer ( - id : integer; - i : integer; - v : integer - ); - - impure function read_integer ( - id : integer; - i : integer - ) return integer; - - impure function get_ptr ( - id : integer - ) return extintvec_access_t; - - attribute foreign of write_integer : procedure is "VHPIDIRECT write_integer"; - attribute foreign of read_integer : function is "VHPIDIRECT read_integer"; - attribute foreign of get_ptr : function is "VHPIDIRECT get_intvec_ptr"; -end package; diff --git a/vunit/vhdl/data_types/src/external/external_string-novhpi.vhd b/vunit/vhdl/data_types/src/external/external_string-novhpi.vhd deleted file mode 100644 index 21c813552..000000000 --- a/vunit/vhdl/data_types/src/external/external_string-novhpi.vhd +++ /dev/null @@ -1,24 +0,0 @@ --- This Source Code Form is subject to the terms of the Mozilla Public --- License, v. 2.0. If a copy of the MPL was not distributed with this file, --- You can obtain one at http://mozilla.org/MPL/2.0/. --- --- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -use work.types_pkg.all; - -package external_string_pkg is - procedure write_char ( - id : integer; - i : integer; - v : character - ); - - impure function read_char ( - id : integer; - i : integer - ) return character; - - impure function get_ptr ( - id : integer - ) return extstring_access_t; -end package; diff --git a/vunit/vhdl/data_types/src/external/external_string-vhpi.vhd b/vunit/vhdl/data_types/src/external/external_string-vhpi.vhd deleted file mode 100644 index f54bd6217..000000000 --- a/vunit/vhdl/data_types/src/external/external_string-vhpi.vhd +++ /dev/null @@ -1,28 +0,0 @@ --- This Source Code Form is subject to the terms of the Mozilla Public --- License, v. 2.0. If a copy of the MPL was not distributed with this file, --- You can obtain one at http://mozilla.org/MPL/2.0/. --- --- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com - -use work.types_pkg.all; - -package external_string_pkg is - procedure write_char ( - id : integer; - i : integer; - v : character - ); - - impure function read_char ( - id : integer; - i : integer - ) return character; - - impure function get_ptr ( - id : integer - ) return extstring_access_t; - - attribute foreign of write_char : procedure is "VHPIDIRECT write_char"; - attribute foreign of read_char : function is "VHPIDIRECT read_char"; - attribute foreign of get_ptr : function is "VHPIDIRECT get_string_ptr"; -end package; diff --git a/vunit/vhdl/data_types/src/external/ghdl/grt.ver b/vunit/vhdl/data_types/src/external/ghdl/grt.ver deleted file mode 100644 index cd09ff8c6..000000000 --- a/vunit/vhdl/data_types/src/external/ghdl/grt.ver +++ /dev/null @@ -1,16 +0,0 @@ -VHPIDIRECT { - global: -main; -oct_main; -ghdl_main; -read_char; -write_char; -read_integer; -write_integer; -set_string_ptr; -get_string_ptr; -set_intvec_ptr; -get_intvec_ptr; - local: - *; -}; diff --git a/vunit/vhdl/data_types/src/external/ghdl/stubs.c b/vunit/vhdl/data_types/src/external/ghdl/stubs.c deleted file mode 100644 index 9ffb4ef26..000000000 --- a/vunit/vhdl/data_types/src/external/ghdl/stubs.c +++ /dev/null @@ -1,53 +0,0 @@ -#include -#include -#include - -void set_string_ptr(uint8_t id, uint8_t *p) { - printf("ERROR set_string_ptr: THIS IS A STUB\n"); - exit(1); - return; -} - -uintptr_t get_string_ptr(uint8_t id) { - printf("ERROR get_string_ptr: THIS IS A STUB\n"); - exit(1); - return NULL; -} - -void write_char( uint8_t id, uint32_t i, uint8_t v ) { - printf("ERROR write_char: THIS IS A STUB\n"); - exit(1); - return; -} - -uint8_t read_char( uint8_t id, uint32_t i ) { - printf("ERROR read_char: THIS IS A STUB\n"); - exit(1); - return 0; -} - -//--- - -void set_intvec_ptr(uint8_t id, uint8_t *p) { - printf("ERROR set_intvec_ptr: THIS IS A STUB\n"); - exit(1); - return; -} - -uintptr_t get_intvec_ptr(uint8_t id) { - printf("ERROR get_intvec_ptr: THIS IS A STUB\n"); - exit(1); - return NULL; -} - -void write_integer(uint8_t id, uint32_t i, int32_t v) { - printf("ERROR write_integer: THIS IS A STUB\n"); - exit(1); - return; -} - -int32_t read_integer(uint8_t id, uint32_t i) { - printf("ERROR read_integer: THIS IS A STUB\n"); - exit(1); - return 0; -} diff --git a/vunit/vhdl/data_types/src/external/ghdl/vhpidirect_user.h b/vunit/vhdl/data_types/src/external/ghdl/vhpidirect_user.h deleted file mode 100644 index 2541c2b39..000000000 --- a/vunit/vhdl/data_types/src/external/ghdl/vhpidirect_user.h +++ /dev/null @@ -1,49 +0,0 @@ -#include - -extern int ghdl_main (int argc, char **argv); - -uint8_t *D[256]; - -//--- - -// External string/byte_vector through access (mode = extacc) - -void set_string_ptr(uint8_t id, uintptr_t p) { - D[id] = (uint8_t*)p; -} - -uintptr_t get_string_ptr(uint8_t id) { - return (uintptr_t)D[id]; -} - -// External string/byte_vector through functions (mode = extfnc) - -void write_char(uint8_t id, uint32_t i, uint8_t v) { - ((uint8_t*)D[id])[i] = v; -} - -uint8_t read_char(uint8_t id, uint32_t i) { - return ((uint8_t*)D[id])[i]; -} - -//--- - -// External integer_vector through access (mode = extacc) - -void set_intvec_ptr(uint8_t id, uintptr_t p) { - D[id] = (uint8_t*)p; -} - -uintptr_t get_intvec_ptr(uint8_t id) { - return (uintptr_t)D[id]; -} - -// External integer_vector through functions (mode = extfnc) - -void write_integer(uint8_t id, uint32_t i, int32_t v) { - ((int32_t*)D[id])[i] = v; -} - -int32_t read_integer(uint8_t id, uint32_t i) { - return ((int32_t*)D[id])[i]; -} diff --git a/vunit/vhdl/data_types/test/tb_codec-2008p.vhd b/vunit/vhdl/data_types/test/tb_codec-2008p.vhd index a0f152c9a..f6f1c5acc 100644 --- a/vunit/vhdl/data_types/test/tb_codec-2008p.vhd +++ b/vunit/vhdl/data_types/test/tb_codec-2008p.vhd @@ -53,8 +53,30 @@ begin variable real_vector_5_downto_3 : real_vector(5 downto 3); variable time_vector_5_downto_3 : time_vector(5 downto 3); - -- Temp variables to make test case pass Riviera-PRO 2016.10 - variable range_left, range_right : integer; + -- Helper functions to make tests pass GHDL v0.37 and Riviera-PRO 2016.10 + function get_decoded_range_left ( constant vec: boolean_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: boolean_vector ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: integer_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: integer_vector ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: real_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: real_vector ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: time_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: time_vector ) return integer is + begin return vec'right; end; begin test_runner_setup(runner, runner_cfg); @@ -63,57 +85,49 @@ begin if run("Test that boolean_vector can be encoded and decoded") then boolean_vector_5_downto_3 := (true, false, true); check_relation(decode_boolean_vector(encode_boolean_vector((true, false, true))) = boolean_vector'(true, false, true)); - check_relation(decode_boolean_vector(encode_boolean_vector((0 => true))) = boolean_vector'(0 => true)); + check_relation(decode_boolean_vector(encode_boolean_vector((0 => true))) = boolean_vector'(0 => true)); check_relation(decode_boolean_vector(encode_boolean_vector(null_boolean_vector)) = null_boolean_vector); check_relation(decode_boolean_vector(encode_boolean_vector(boolean_vector_5_downto_3)) = boolean_vector'(true, false, true)); - range_left := decode_boolean_vector(encode_boolean_vector(boolean_vector_5_downto_3))'left; - range_right := decode_boolean_vector(encode_boolean_vector(boolean_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_boolean_vector(encode_boolean_vector(boolean_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_boolean_vector(encode_boolean_vector(boolean_vector_5_downto_3))) = 3); elsif run("Test that integer_vector can be encoded and decoded") then integer_vector_5_downto_3 := (-42, 0, 17); check_relation(decode_integer_vector(encode_integer_vector((-2147483648, -2147483648, -2147483648))) = integer_vector'(-2147483648, -2147483648, -2147483648)); check_relation(decode_integer_vector(encode_integer_vector((-42, 0, 17))) = integer_vector'(-42, 0, 17)); - check_relation(decode_integer_vector(encode_integer_vector((0 => -42))) = integer_vector'(0 => -42)); + check_relation(decode_integer_vector(encode_integer_vector((0 => -42))) = integer_vector'(0 => -42)); check_relation(decode_integer_vector(encode_integer_vector(null_integer_vector)) = null_integer_vector); check_relation(decode_integer_vector(encode_integer_vector(integer_vector_5_downto_3)) = integer_vector'(-42, 0, 17)); - range_left := decode_integer_vector(encode_integer_vector(integer_vector_5_downto_3))'left; - range_right := decode_integer_vector(encode_integer_vector(integer_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_integer_vector(encode_integer_vector(integer_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_integer_vector(encode_integer_vector(integer_vector_5_downto_3))) = 3); elsif run("Test that real_vector can be encoded and decoded") then real_vector_5_downto_3 := (-42.42, 0.001, 17.17); check_relation(decode_real_vector(encode_real_vector((-42.42, 0.001, 17.17))) = real_vector'(-42.42, 0.001, 17.17)); check_relation(decode_real_vector(encode_real_vector((0 => -42.42))) = real_vector'(0 => -42.42)); check_relation(decode_real_vector(encode_real_vector(null_real_vector)) = null_real_vector); check_relation(decode_real_vector(encode_real_vector(real_vector_5_downto_3)) = real_vector'(-42.42, 0.001, 17.17)); - range_left := decode_real_vector(encode_real_vector(real_vector_5_downto_3))'left; - range_right := decode_real_vector(encode_real_vector(real_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_real_vector(encode_real_vector(real_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_real_vector(encode_real_vector(real_vector_5_downto_3))) = 3); elsif run("Test that time_vector can be encoded and decoded") then time_vector_5_downto_3 := (-42 ms, 0 sec, 17 min); check_relation(decode_time_vector(encode_time_vector((-42 ms, 0 sec, 17 min))) = time_vector'(-42 ms, 0 sec, 17 min)); - check_relation(decode_time_vector(encode_time_vector((0 => -42 ms))) = time_vector'(0 => -42 ms)); + check_relation(decode_time_vector(encode_time_vector((0 => -42 ms))) = time_vector'(0 => -42 ms)); check_relation(decode_time_vector(encode_time_vector(null_time_vector)) = null_time_vector); check_relation(decode_time_vector(encode_time_vector(time_vector_5_downto_3)) = time_vector'(-42 ms, 0 sec, 17 min)); - range_left := decode_time_vector(encode_time_vector(time_vector_5_downto_3))'left; - range_right := decode_time_vector(encode_time_vector(time_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_time_vector(encode_time_vector(time_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_time_vector(encode_time_vector(time_vector_5_downto_3))) = 3); elsif run("Test that ufixed can be encoded and decoded") then - check_relation(decode_ufixed(encode_ufixed(to_ufixed(6.5, 3, -3))) = to_ufixed(6.5, 3, -3)); - check_relation(decode_ufixed(encode_ufixed(to_ufixed(8.0, 3, 1))) = to_ufixed(8.0, 3, 1)); + check_relation(decode_ufixed(encode_ufixed(to_ufixed( 6.5, 3, -3))) = to_ufixed(6.5, 3, -3)); + check_relation(decode_ufixed(encode_ufixed(to_ufixed( 8.0, 3, 1))) = to_ufixed(8.0, 3, 1)); check_relation(decode_ufixed(encode_ufixed(to_ufixed(0.25, -2, -4))) = to_ufixed(0.25, -2, -4)); elsif run("Test that sfixed can be encoded and decoded") then - check_relation(decode_sfixed(encode_sfixed(to_sfixed(6.5, 3, -3))) = to_sfixed(6.5, 3, -3)); - check_relation(decode_sfixed(encode_sfixed(to_sfixed(8.0, 4, 1))) = to_sfixed(8.0, 4, 1)); - check_relation(decode_sfixed(encode_sfixed(to_sfixed(0.25, -1, -4))) = to_sfixed(0.25, -1, -4)); - check_relation(decode_sfixed(encode_sfixed(to_sfixed(-6.5, 3, -3))) = to_sfixed(-6.5, 3, -3)); - check_relation(decode_sfixed(encode_sfixed(to_sfixed(-8.0, 4, 1))) = to_sfixed(-8.0, 4, 1)); + check_relation(decode_sfixed(encode_sfixed(to_sfixed( 6.5, 3, -3))) = to_sfixed(6.5, 3, -3)); + check_relation(decode_sfixed(encode_sfixed(to_sfixed( 8.0, 4, 1))) = to_sfixed(8.0, 4, 1)); + check_relation(decode_sfixed(encode_sfixed(to_sfixed(0.25, -1, -4))) = to_sfixed(0.25, -1, -4)); + check_relation(decode_sfixed(encode_sfixed(to_sfixed(-6.5, 3, -3))) = to_sfixed(-6.5, 3, -3)); + check_relation(decode_sfixed(encode_sfixed(to_sfixed(-8.0, 4, 1))) = to_sfixed(-8.0, 4, 1)); check_relation(decode_sfixed(encode_sfixed(to_sfixed(-0.25, -1, -4))) = to_sfixed(-0.25, -1, -4)); elsif run("Test that float can be encoded and decoded") then - check_relation(decode_float(encode_float(to_float(real'low, 11, 52))) = to_float(real'low, 11, 52)); + check_relation(decode_float(encode_float(to_float(real'low, 11, 52))) = to_float(real'low, 11, 52)); check_relation(decode_float(encode_float(to_float(real'high, 11, 52))) = to_float(real'high, 11, 52)); check_relation(to_string(decode_float(encode_float(positive_zero))) = to_string(positive_zero)); diff --git a/vunit/vhdl/data_types/test/tb_codec.vhd b/vunit/vhdl/data_types/test/tb_codec.vhd index a4ee84d41..9fc354023 100644 --- a/vunit/vhdl/data_types/test/tb_codec.vhd +++ b/vunit/vhdl/data_types/test/tb_codec.vhd @@ -101,8 +101,48 @@ begin variable numeric_std_unsigned_5_downto_3 : ieee.numeric_std.unsigned(5 downto 3); variable numeric_std_signed_5_downto_3 : ieee.numeric_std.signed(5 downto 3); - -- Temp variables to make test case pass Riviera-PRO 2016.10 - variable range_left, range_right : integer; + -- Helper functions to make tests pass GHDL v0.37 and Riviera-PRO 2016.10 + function get_decoded_range_left ( constant vec: string ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: string ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: bit_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: bit_vector ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: std_ulogic_vector ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: std_ulogic_vector ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: ieee.numeric_bit.unsigned ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: ieee.numeric_bit.unsigned ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: ieee.numeric_bit.signed ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: ieee.numeric_bit.signed ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: ieee.numeric_std.unsigned ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: ieee.numeric_std.unsigned ) return integer is + begin return vec'right; end; + + function get_decoded_range_left ( constant vec: ieee.numeric_std.signed ) return integer is + begin return vec'left; end; + + function get_decoded_range_right ( constant vec: ieee.numeric_std.signed ) return integer is + begin return vec'right; end; begin test_runner_setup(runner, runner_cfg); @@ -174,35 +214,27 @@ begin string_15_downto_4 := "Hello world!"; check_relation(decode_string(encode_string("The quick brown fox jumps over the lazy dog")) = string'("The quick brown fox jumps over the lazy dog")); check_relation(decode_string(encode_string(special_chars)) = string'(special_chars)); - range_left := decode_string(encode_string(null_string))'left; - range_right := decode_string(encode_string(null_string))'right; - check_relation(range_left = 10); - check_relation(range_right = 9); + check_relation(get_decoded_range_left(decode_string(encode_string(null_string))) = 10); + check_relation(get_decoded_range_right(decode_string(encode_string(null_string))) = 9); check_relation(decode_string(encode_string(string_15_downto_4)) = string'("Hello world!")); - range_left := decode_string(encode_string(string_15_downto_4))'left; - range_right := decode_string(encode_string(string_15_downto_4))'right; - check_relation(range_left = 15); - check_relation(range_right = 4); + check_relation(get_decoded_range_left(decode_string(encode_string(string_15_downto_4))) = 15); + check_relation(get_decoded_range_right(decode_string(encode_string(string_15_downto_4))) = 4); elsif run("Test that bit_vector can be encoded and decoded") then bit_vector_5_downto_3 := "101"; check_relation(decode_bit_vector(encode_bit_vector("101")) = bit_vector'("101")); check_relation(decode_bit_vector(encode_bit_vector("1")) = bit_vector'("1")); check_relation(decode_bit_vector(encode_bit_vector("")) = bit_vector'("")); check_relation(decode_bit_vector(encode_bit_vector(bit_vector_5_downto_3)) = bit_vector'("101")); - range_left := decode_bit_vector(encode_bit_vector(bit_vector_5_downto_3))'left; - range_right := decode_bit_vector(encode_bit_vector(bit_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_bit_vector(encode_bit_vector(bit_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_bit_vector(encode_bit_vector(bit_vector_5_downto_3))) = 3); elsif run("Test that std_ulogic_vector can be encoded and decoded") then std_ulogic_vector_5_downto_3 := "XU1"; check_relation(decode_std_ulogic_vector(encode_std_ulogic_vector("XU1")) = std_ulogic_vector'("XU1")); check_relation(decode_std_ulogic_vector(encode_std_ulogic_vector("X")) = std_ulogic_vector'("X")); check_relation(decode_std_ulogic_vector(encode_std_ulogic_vector("")) = std_ulogic_vector'("")); check_relation(decode_std_ulogic_vector(encode_std_ulogic_vector(std_ulogic_vector_5_downto_3)) = std_ulogic_vector'("XU1")); - range_left := decode_std_ulogic_vector(encode_std_ulogic_vector(std_ulogic_vector_5_downto_3))'left; - range_right := decode_std_ulogic_vector(encode_std_ulogic_vector(std_ulogic_vector_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_std_ulogic_vector(encode_std_ulogic_vector(std_ulogic_vector_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_std_ulogic_vector(encode_std_ulogic_vector(std_ulogic_vector_5_downto_3))) = 3); elsif run("Test that complex can be encoded and decoded") then check_relation(decode_complex(encode_complex((-17.17, 42.42))) = complex'(-17.17, 42.42)); elsif run("Test that complex_polar can be encoded and decoded") then @@ -213,40 +245,32 @@ begin check_relation(decode_numeric_bit_unsigned(encode_numeric_bit_unsigned("1")) = ieee.numeric_bit.unsigned'("1")); check_relation(decode_numeric_bit_unsigned(encode_numeric_bit_unsigned("")) = ieee.numeric_bit.unsigned'("")); check_relation(decode_numeric_bit_unsigned(encode_numeric_bit_unsigned(numeric_bit_unsigned_5_downto_3)) = ieee.numeric_bit.unsigned'("101")); - range_left := decode_numeric_bit_unsigned(encode_numeric_bit_unsigned(numeric_bit_unsigned_5_downto_3))'left; - range_right := decode_numeric_bit_unsigned(encode_numeric_bit_unsigned(numeric_bit_unsigned_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_numeric_bit_unsigned(encode_numeric_bit_unsigned(numeric_bit_unsigned_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_numeric_bit_unsigned(encode_numeric_bit_unsigned(numeric_bit_unsigned_5_downto_3))) = 3); elsif run("Test that signed from numeric_bit can be encoded and decoded") then numeric_bit_signed_5_downto_3 := "101"; check_relation(decode_numeric_bit_signed(encode_numeric_bit_signed("101")) = ieee.numeric_bit.signed'("101")); check_relation(decode_numeric_bit_signed(encode_numeric_bit_signed("1")) = ieee.numeric_bit.signed'("1")); check_relation(decode_numeric_bit_signed(encode_numeric_bit_signed("")) = ieee.numeric_bit.signed'("")); check_relation(decode_numeric_bit_signed(encode_numeric_bit_signed(numeric_bit_signed_5_downto_3)) = ieee.numeric_bit.signed'("101")); - range_left := decode_numeric_bit_signed(encode_numeric_bit_signed(numeric_bit_signed_5_downto_3))'left; - range_right := decode_numeric_bit_signed(encode_numeric_bit_signed(numeric_bit_signed_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_numeric_bit_signed(encode_numeric_bit_signed(numeric_bit_signed_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_numeric_bit_signed(encode_numeric_bit_signed(numeric_bit_signed_5_downto_3))) = 3); elsif run("Test that unsigned from numeric_std can be encoded and decoded") then numeric_std_unsigned_5_downto_3 := "101"; check_relation(decode_numeric_std_unsigned(encode_numeric_std_unsigned("101")) = ieee.numeric_std.unsigned'("101")); check_relation(decode_numeric_std_unsigned(encode_numeric_std_unsigned("1")) = ieee.numeric_std.unsigned'("1")); check_relation(decode_numeric_std_unsigned(encode_numeric_std_unsigned("")) = ieee.numeric_std.unsigned'("")); check_relation(decode_numeric_std_unsigned(encode_numeric_std_unsigned(numeric_std_unsigned_5_downto_3)) = ieee.numeric_std.unsigned'("101")); - range_left := decode_numeric_std_unsigned(encode_numeric_std_unsigned(numeric_std_unsigned_5_downto_3))'left; - range_right := decode_numeric_std_unsigned(encode_numeric_std_unsigned(numeric_std_unsigned_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_numeric_std_unsigned(encode_numeric_std_unsigned(numeric_std_unsigned_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_numeric_std_unsigned(encode_numeric_std_unsigned(numeric_std_unsigned_5_downto_3))) = 3); elsif run("Test that signed from numeric_std can be encoded and decoded") then numeric_std_signed_5_downto_3 := "101"; check_relation(decode_numeric_std_signed(encode_numeric_std_signed("101")) = ieee.numeric_std.signed'("101")); check_relation(decode_numeric_std_signed(encode_numeric_std_signed("1")) = ieee.numeric_std.signed'("1")); check_relation(decode_numeric_std_signed(encode_numeric_std_signed("")) = ieee.numeric_std.signed'("")); check_relation(decode_numeric_std_signed(encode_numeric_std_signed(numeric_std_signed_5_downto_3)) = ieee.numeric_std.signed'("101")); - range_left := decode_numeric_std_signed(encode_numeric_std_signed(numeric_std_signed_5_downto_3))'left; - range_right := decode_numeric_std_signed(encode_numeric_std_signed(numeric_std_signed_5_downto_3))'right; - check_relation(range_left = 5); - check_relation(range_right = 3); + check_relation(get_decoded_range_left(decode_numeric_std_signed(encode_numeric_std_signed(numeric_std_signed_5_downto_3))) = 5); + check_relation(get_decoded_range_right(decode_numeric_std_signed(encode_numeric_std_signed(numeric_std_signed_5_downto_3))) = 3); end if; end loop; diff --git a/vunit/vhdl/data_types/test/tb_dict.vhd b/vunit/vhdl/data_types/test/tb_dict.vhd index cef91d6c2..a038c4885 100644 --- a/vunit/vhdl/data_types/test/tb_dict.vhd +++ b/vunit/vhdl/data_types/test/tb_dict.vhd @@ -20,7 +20,7 @@ begin main : process variable dict : dict_t; - constant many_keys : natural := 2**16; + constant many_keys : natural := 2**13; constant long_key : string := "long--------------------------------------------------------key"; begin test_runner_setup(runner, runner_cfg); @@ -94,6 +94,7 @@ begin remove(dict, long_key & integer'image(i)); check_equal(num_keys(dict), i-1); end loop; + end loop; deallocate(dict); diff --git a/vunit/vhdl/dictionary/run.py b/vunit/vhdl/dictionary/run.py index 8fbb6da42..a90fc1571 100644 --- a/vunit/vhdl/dictionary/run.py +++ b/vunit/vhdl/dictionary/run.py @@ -4,12 +4,12 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent -ui = VUnit.from_argv() -lib = ui.add_library("lib") -lib.add_source_files(join(root, "test", "*.vhd")) -ui.main() +UI = VUnit.from_argv() +UI.add_library("lib").add_source_files(ROOT / "test" / "*.vhd") + +UI.main() diff --git a/vunit/vhdl/logging/run.py b/vunit/vhdl/logging/run.py index 5378e3cdd..caaeb8be3 100644 --- a/vunit/vhdl/logging/run.py +++ b/vunit/vhdl/logging/run.py @@ -4,11 +4,12 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) -ui = VUnit.from_argv() -lib = ui.library("vunit_lib") -lib.add_source_files(join(root, "test", "*.vhd")) -ui.main() +ROOT = Path(__file__).parent + +UI = VUnit.from_argv() +UI.library("vunit_lib").add_source_files(ROOT / "test" / "*.vhd") + +UI.main() diff --git a/vunit/vhdl/logging/src/logger_pkg-body.vhd b/vunit/vhdl/logging/src/logger_pkg-body.vhd index 9a51780ec..ca91aa00d 100644 --- a/vunit/vhdl/logging/src/logger_pkg-body.vhd +++ b/vunit/vhdl/logging/src/logger_pkg-body.vhd @@ -92,12 +92,13 @@ package body logger_pkg is procedure p_set_log_handlers(logger : logger_t; log_handlers : log_handler_vec_t) is constant handlers : integer_vector_ptr_t := to_integer_vector_ptr(get(logger.p_data, handlers_idx)); + constant full_logger_name : string := get_full_name(logger); begin resize(handlers, log_handlers'length); for i in log_handlers'range loop set(handlers, i, to_integer(log_handlers(i).p_data)); - update_max_logger_name_length(log_handlers(i), get_full_name(logger)'length); + update_max_logger_name_length(log_handlers(i), full_logger_name'length); end loop; end; @@ -262,11 +263,12 @@ package body logger_pkg is end; impure function get_max_name_length(logger : logger_t) return natural is + constant full_name : string := get_full_name(logger); variable result : natural := 0; variable child_result : natural; begin if num_children(logger) = 0 then - return get_full_name(logger)'length; + return full_name'length; end if; for i in 0 to num_children(logger)-1 loop diff --git a/vunit/vhdl/path/run.py b/vunit/vhdl/path/run.py index 913f518c3..a90fc1571 100644 --- a/vunit/vhdl/path/run.py +++ b/vunit/vhdl/path/run.py @@ -4,11 +4,12 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) -ui = VUnit.from_argv() -lib = ui.add_library("lib") -lib.add_source_files(join(root, "test", "*.vhd")) -ui.main() +ROOT = Path(__file__).parent + +UI = VUnit.from_argv() +UI.add_library("lib").add_source_files(ROOT / "test" / "*.vhd") + +UI.main() diff --git a/vunit/vhdl/path/src/path.vhd b/vunit/vhdl/path/src/path.vhd index 86b04f608..fd422d430 100644 --- a/vunit/vhdl/path/src/path.vhd +++ b/vunit/vhdl/path/src/path.vhd @@ -37,7 +37,7 @@ package body path is constant p9 : string := ""; constant p10 : string := "") return string is - variable inputs : line_vector(1 to 10); + variable inputs : work.string_ops.line_vector(1 to 10); variable result : line; begin write(inputs(1), p1); diff --git a/vunit/vhdl/random/run.py b/vunit/vhdl/random/run.py index 041600ff1..74c1b3f47 100644 --- a/vunit/vhdl/random/run.py +++ b/vunit/vhdl/random/run.py @@ -4,13 +4,13 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent -ui = VUnit.from_argv() -ui.add_random() -lib = ui.library("vunit_lib") -lib.add_source_files(join(root, "test", "*.vhd")) -ui.main() +UI = VUnit.from_argv() +UI.add_random() +UI.library("vunit_lib").add_source_files(ROOT / "test" / "*.vhd") + +UI.main() diff --git a/vunit/vhdl/run/run.py b/vunit/vhdl/run/run.py index 7135a5b94..e7ee5c0de 100644 --- a/vunit/vhdl/run/run.py +++ b/vunit/vhdl/run/run.py @@ -4,12 +4,12 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) -ui = VUnit.from_argv() +ROOT = Path(__file__).parent -lib = ui.add_library("tb_run_lib") -lib.add_source_files(join(root, "test", "*.vhd")) -ui.main() +UI = VUnit.from_argv() +UI.add_library("tb_run_lib").add_source_files(ROOT / "test" / "*.vhd") + +UI.main() diff --git a/vunit/vhdl/string_ops/run.py b/vunit/vhdl/string_ops/run.py index cb9bb49c1..a90fc1571 100644 --- a/vunit/vhdl/string_ops/run.py +++ b/vunit/vhdl/string_ops/run.py @@ -4,14 +4,12 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from vunit import VUnit -root = dirname(__file__) -common_path = join(root, "..", "common", "test") +ROOT = Path(__file__).parent -ui = VUnit.from_argv() -lib = ui.add_library("lib") -lib.add_source_files(join(root, "test", "*.vhd")) +UI = VUnit.from_argv() +UI.add_library("lib").add_source_files(ROOT / "test" / "*.vhd") -ui.main() +UI.main() diff --git a/vunit/vhdl/verification_components/run.py b/vunit/vhdl/verification_components/run.py index 1b5ab9261..cc8005c0e 100644 --- a/vunit/vhdl/verification_components/run.py +++ b/vunit/vhdl/verification_components/run.py @@ -4,17 +4,17 @@ # # Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com -from os.path import join, dirname +from pathlib import Path from itertools import product from vunit import VUnit -root = dirname(__file__) +ROOT = Path(__file__).parent -ui = VUnit.from_argv() -ui.add_random() -ui.add_verification_components() -lib = ui.library("vunit_lib") -lib.add_source_files(join(root, "test", "*.vhd")) +UI = VUnit.from_argv() +UI.add_random() +UI.add_verification_components() +LIB = UI.library("vunit_lib") +LIB.add_source_files(ROOT / "test" / "*.vhd") def encode(tb_cfg): @@ -67,12 +67,12 @@ def gen_avalon_master_tests(obj, *args): obj.add_config(name=config_name, generics=dict(encoded_tb_cfg=encode(tb_cfg))) -tb_avalon_slave = lib.test_bench("tb_avalon_slave") +tb_avalon_slave = LIB.test_bench("tb_avalon_slave") for test in tb_avalon_slave.get_tests(): gen_avalon_tests(test, [32], [1, 2, 64], [1.0, 0.3], [0.0, 0.4]) -tb_avalon_master = lib.test_bench("tb_avalon_master") +tb_avalon_master = LIB.test_bench("tb_avalon_master") for test in tb_avalon_master.get_tests(): if test.name == "wr single rd single": @@ -82,19 +82,19 @@ def gen_avalon_master_tests(obj, *args): test, [64], [1.0, 0.3], [0.0, 0.7], [1.0, 0.3], [1.0, 0.3] ) -TB_WISHBONE_SLAVE = lib.test_bench("tb_wishbone_slave") +TB_WISHBONE_SLAVE = LIB.test_bench("tb_wishbone_slave") for test in TB_WISHBONE_SLAVE.get_tests(): # TODO strobe_prob not implemented in slave tb gen_wb_tests(test, [8, 32], [1, 64], [1.0], [0.3, 1.0], [0.4, 0.0]) -TB_WISHBONE_MASTER = lib.test_bench("tb_wishbone_master") +TB_WISHBONE_MASTER = LIB.test_bench("tb_wishbone_master") for test in TB_WISHBONE_MASTER.get_tests(): gen_wb_tests(test, [8, 32], [1, 64], [0.3, 1.0], [0.3, 1.0], [0.4, 0.0]) -TB_AXI_STREAM = lib.test_bench("tb_axi_stream") +TB_AXI_STREAM = LIB.test_bench("tb_axi_stream") for id_length in [0, 8]: for dest_length in [0, 8]: @@ -110,7 +110,7 @@ def gen_avalon_master_tests(obj, *args): ), ) -TB_AXI_STREAM_PROTOCOL_CHECKER = lib.test_bench("tb_axi_stream_protocol_checker") +TB_AXI_STREAM_PROTOCOL_CHECKER = LIB.test_bench("tb_axi_stream_protocol_checker") for data_length in [0, 8]: for test in TB_AXI_STREAM_PROTOCOL_CHECKER.get_tests("*passing*tdata*"): @@ -132,5 +132,12 @@ def gen_avalon_master_tests(obj, *args): name="max_waits=%d" % max_waits, generics=dict(max_waits=max_waits) ) +TB_AXI_STREAM.test("test random stall on master").add_config( + name="stall_master", generics=dict(g_stall_percentage_master=30) +) + +TB_AXI_STREAM.test("test random stall on slave").add_config( + name="stall_slave", generics=dict(g_stall_percentage_slave=30) +) -ui.main() +UI.main() diff --git a/vunit/vhdl/verification_components/src/axi_stream_master.vhd b/vunit/vhdl/verification_components/src/axi_stream_master.vhd index 2673edf4d..5288b24d6 100644 --- a/vunit/vhdl/verification_components/src/axi_stream_master.vhd +++ b/vunit/vhdl/verification_components/src/axi_stream_master.vhd @@ -11,9 +11,13 @@ context work.vunit_context; context work.com_context; use work.stream_master_pkg.all; use work.axi_stream_pkg.all; +use work.axi_stream_private_pkg.all; use work.queue_pkg.all; use work.sync_pkg.all; +library osvvm; +use osvvm.RandomPkg.RandomPType; + entity axi_stream_master is generic ( master : axi_stream_master_t; @@ -42,6 +46,22 @@ architecture a of axi_stream_master is constant message_queue : queue_t := new_queue; signal notify_bus_process_done : std_logic := '0'; + procedure drive_invalid_output(signal l_tdata : out std_logic_vector(data_length(master)-1 downto 0); + signal l_tkeep : out std_logic_vector(data_length(master)/8-1 downto 0); + signal l_tstrb : out std_logic_vector(data_length(master)/8-1 downto 0); + signal l_tid : out std_logic_vector(id_length(master)-1 downto 0); + signal l_tdest : out std_logic_vector(dest_length(master)-1 downto 0); + signal l_tuser : out std_logic_vector(user_length(master)-1 downto 0)) + is + begin + l_tdata <= (others => drive_invalid_val); + l_tkeep <= (others => drive_invalid_val); + l_tstrb <= (others => drive_invalid_val); + l_tid <= (others => drive_invalid_val); + l_tdest <= (others => drive_invalid_val); + l_tuser <= (others => drive_invalid_val_user); + end procedure; + begin main : process @@ -69,68 +89,71 @@ begin bus_process : process variable msg : msg_t; variable msg_type : msg_type_t; + variable rnd : RandomPType; begin - if drive_invalid then - tdata <= (others => drive_invalid_val); - tkeep <= (others => drive_invalid_val); - tstrb <= (others => drive_invalid_val); - tid <= (others => drive_invalid_val); - tdest <= (others => drive_invalid_val); - tuser <= (others => drive_invalid_val_user); - end if; - - -- Wait for messages to arrive on the queue, posted by the process above - wait until rising_edge(aclk) and (not is_empty(message_queue) or areset_n = '0'); - - if (areset_n = '0') then - tvalid <= '0'; - else - while not is_empty(message_queue) loop - msg := pop(message_queue); - msg_type := message_type(msg); - - if msg_type = wait_for_time_msg then - handle_sync_message(net, msg_type, msg); - -- Re-align with the clock when a wait for time message was handled, because this breaks edge alignment. - wait until rising_edge(aclk); - elsif msg_type = notify_request_msg then - -- Ignore this message, but expect it - elsif msg_type = stream_push_msg or msg_type = push_axi_stream_msg then - tvalid <= '1'; - tdata <= pop_std_ulogic_vector(msg); - if msg_type = push_axi_stream_msg then - tlast <= pop_std_ulogic(msg); - tkeep <= pop_std_ulogic_vector(msg); - tstrb <= pop_std_ulogic_vector(msg); - tid <= pop_std_ulogic_vector(msg); - tdest <= pop_std_ulogic_vector(msg); - tuser <= pop_std_ulogic_vector(msg); - else - if pop_boolean(msg) then - tlast <= '1'; + rnd.InitSeed(rnd'instance_name); + loop + if drive_invalid then + drive_invalid_output(tdata, tkeep, tstrb, tid, tdest, tuser); + end if; + + -- Wait for messages to arrive on the queue, posted by the process above + wait until rising_edge(aclk) and (not is_empty(message_queue) or areset_n = '0'); + + if (areset_n = '0') then + tvalid <= '0'; + else + while not is_empty(message_queue) loop + msg := pop(message_queue); + msg_type := message_type(msg); + + if msg_type = wait_for_time_msg then + handle_sync_message(net, msg_type, msg); + -- Re-align with the clock when a wait for time message was handled, because this breaks edge alignment. + wait until rising_edge(aclk); + elsif msg_type = notify_request_msg then + -- Ignore this message, but expect it + elsif msg_type = stream_push_msg or msg_type = push_axi_stream_msg then + drive_invalid_output(tdata, tkeep, tstrb, tid, tdest, tuser); + -- stall according to probability configuration + probability_stall_axi_stream(aclk, master, rnd); + + tvalid <= '1'; + tdata <= pop_std_ulogic_vector(msg); + if msg_type = push_axi_stream_msg then + tlast <= pop_std_ulogic(msg); + tkeep <= pop_std_ulogic_vector(msg); + tstrb <= pop_std_ulogic_vector(msg); + tid <= pop_std_ulogic_vector(msg); + tdest <= pop_std_ulogic_vector(msg); + tuser <= pop_std_ulogic_vector(msg); else - tlast <= '0'; + if pop_boolean(msg) then + tlast <= '1'; + else + tlast <= '0'; + end if; + tkeep <= (others => '1'); + tstrb <= (others => '1'); + tid <= (others => '0'); + tdest <= (others => '0'); + tuser <= (others => '0'); end if; - tkeep <= (others => '1'); - tstrb <= (others => '1'); - tid <= (others => '0'); - tdest <= (others => '0'); - tuser <= (others => '0'); + wait until ((tvalid and tready) = '1' or areset_n = '0') and rising_edge(aclk); + tvalid <= '0'; + tlast <= '0'; + else + unexpected_msg_type(msg_type); end if; - wait until ((tvalid and tready) = '1' or areset_n = '0') and rising_edge(aclk); - tvalid <= '0'; - tlast <= '0'; - else - unexpected_msg_type(msg_type); - end if; - - delete(msg); - end loop; - - notify_bus_process_done <= '1'; - wait until notify_bus_process_done = '1'; - notify_bus_process_done <= '0'; - end if; + + delete(msg); + end loop; + + notify_bus_process_done <= '1'; + wait until notify_bus_process_done = '1'; + notify_bus_process_done <= '0'; + end if; + end loop; end process; axi_stream_monitor_generate : if master.p_monitor /= null_axi_stream_monitor generate @@ -171,4 +194,4 @@ begin ); end generate axi_stream_protocol_checker_generate; -end architecture; \ No newline at end of file +end architecture; diff --git a/vunit/vhdl/verification_components/src/axi_stream_pkg.vhd b/vunit/vhdl/verification_components/src/axi_stream_pkg.vhd index 49a6fdab3..dfd8d30e2 100644 --- a/vunit/vhdl/verification_components/src/axi_stream_pkg.vhd +++ b/vunit/vhdl/verification_components/src/axi_stream_pkg.vhd @@ -19,6 +19,18 @@ context work.data_types_context; package axi_stream_pkg is + type stall_config_t is record + stall_probability : real range 0.0 to 1.0; + min_stall_cycles : natural; + max_stall_cycles : natural; + end record; + + constant null_stall_config : stall_config_t := ( + stall_probability => 0.0, + min_stall_cycles => 0, + max_stall_cycles => 0 + ); + type axi_stream_component_type_t is (null_component, default_component, custom_component); type axi_stream_protocol_checker_t is record @@ -99,22 +111,48 @@ package axi_stream_pkg is p_id_length : natural; p_dest_length : natural; p_user_length : natural; + p_stall_config : stall_config_t; p_logger : logger_t; p_monitor : axi_stream_monitor_t; p_protocol_checker : axi_stream_protocol_checker_t; end record; + constant null_axi_stream_master : axi_stream_master_t := ( + p_actor => null_actor, + p_data_length => 0, + p_id_length => 0, + p_dest_length => 0, + p_user_length => 0, + p_stall_config => null_stall_config, + p_logger => null_logger, + p_monitor => null_axi_stream_monitor, + p_protocol_checker => null_axi_stream_protocol_checker + ); + type axi_stream_slave_t is record p_actor : actor_t; p_data_length : natural; p_id_length : natural; p_dest_length : natural; p_user_length : natural; + p_stall_config : stall_config_t; p_logger : logger_t; p_monitor : axi_stream_monitor_t; p_protocol_checker : axi_stream_protocol_checker_t; end record; + constant null_axi_stream_slave : axi_stream_slave_t := ( + p_actor => null_actor, + p_data_length => 0, + p_id_length => 0, + p_dest_length => 0, + p_user_length => 0, + p_stall_config => null_stall_config, + p_logger => null_logger, + p_monitor => null_axi_stream_monitor, + p_protocol_checker => null_axi_stream_protocol_checker + ); + constant axi_stream_logger : logger_t := get_logger("vunit_lib:axi_stream_pkg"); constant axi_stream_checker : checker_t := new_checker(axi_stream_logger); @@ -123,6 +161,7 @@ package axi_stream_pkg is id_length : natural := 0; dest_length : natural := 0; user_length : natural := 0; + stall_config : stall_config_t := null_stall_config; logger : logger_t := axi_stream_logger; actor : actor_t := null_actor; monitor : axi_stream_monitor_t := null_axi_stream_monitor; @@ -134,6 +173,7 @@ package axi_stream_pkg is id_length : natural := 0; dest_length : natural := 0; user_length : natural := 0; + stall_config : stall_config_t := null_stall_config; logger : logger_t := axi_stream_logger; actor : actor_t := null_actor; monitor : axi_stream_monitor_t := null_axi_stream_monitor; @@ -285,6 +325,12 @@ package axi_stream_pkg is variable msg : inout msg_t; variable axi_transaction : out axi_stream_transaction_t); + function new_stall_config( + stall_probability : real range 0.0 to 1.0; + min_stall_cycles : natural; + max_stall_cycles : natural + ) return stall_config_t; + end package; package body axi_stream_pkg is @@ -343,6 +389,7 @@ package body axi_stream_pkg is id_length : natural := 0; dest_length : natural := 0; user_length : natural := 0; + stall_config : stall_config_t := null_stall_config; logger : logger_t := axi_stream_logger; actor : actor_t := null_actor; monitor : axi_stream_monitor_t := null_axi_stream_monitor; @@ -361,6 +408,7 @@ package body axi_stream_pkg is p_id_length => id_length, p_dest_length => dest_length, p_user_length => user_length, + p_stall_config => stall_config, p_logger => logger, p_monitor => p_monitor, p_protocol_checker => p_protocol_checker); @@ -371,6 +419,7 @@ package body axi_stream_pkg is id_length : natural := 0; dest_length : natural := 0; user_length : natural := 0; + stall_config : stall_config_t := null_stall_config; logger : logger_t := axi_stream_logger; actor : actor_t := null_actor; monitor : axi_stream_monitor_t := null_axi_stream_monitor; @@ -384,11 +433,12 @@ package body axi_stream_pkg is p_actor := actor when actor /= null_actor else new_actor; p_protocol_checker := get_valid_protocol_checker(data_length, id_length, dest_length, user_length, logger, actor, protocol_checker, "slave"); - return (p_actor => new_actor, + return (p_actor => p_actor, p_data_length => data_length, p_id_length => id_length, p_dest_length => dest_length, p_user_length => user_length, + p_stall_config => stall_config, p_logger => logger, p_monitor => p_monitor, p_protocol_checker => p_protocol_checker); @@ -780,4 +830,17 @@ package body axi_stream_pkg is end if; end; + function new_stall_config( + stall_probability : real range 0.0 to 1.0; + min_stall_cycles : natural; + max_stall_cycles : natural) return stall_config_t is + variable stall_config : stall_config_t; + begin + stall_config := ( + stall_probability => stall_probability, + min_stall_cycles => min_stall_cycles, + max_stall_cycles => max_stall_cycles); + return stall_config; + end; + end package body; diff --git a/vunit/vhdl/verification_components/src/axi_stream_private_pkg.vhd b/vunit/vhdl/verification_components/src/axi_stream_private_pkg.vhd new file mode 100644 index 000000000..6abfb700a --- /dev/null +++ b/vunit/vhdl/verification_components/src/axi_stream_private_pkg.vhd @@ -0,0 +1,68 @@ +-- This Source Code Form is subject to the terms of the Mozilla Public +-- License, v. 2.0. If a copy of the MPL was not distributed with this file, +-- You can obtain one at http://mozilla.org/MPL/2.0/. +-- +-- Copyright (c) 2014-2020, Lars Asplund lars.anders.asplund@gmail.com + +library ieee; +use ieee.std_logic_1164.all; + +library osvvm; +use osvvm.RandomPkg.RandomPType; + +use work.axi_stream_pkg.all; + +package axi_stream_private_pkg is + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + axi_stream : in axi_stream_slave_t; + rnd : inout RandomPType + ); + + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + axi_stream : in axi_stream_master_t; + rnd : inout RandomPType + ); + + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + stall_config : in stall_config_t; + rnd : inout RandomPType + ); + +end package; + +package body axi_stream_private_pkg is + + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + axi_stream : in axi_stream_master_t; + rnd : inout RandomPType) is + begin + probability_stall_axi_stream(aclk, axi_stream.p_stall_config, rnd); + end procedure; + + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + axi_stream : in axi_stream_slave_t; + rnd : inout RandomPType) is + begin + probability_stall_axi_stream(aclk, axi_stream.p_stall_config, rnd); + end procedure; + + procedure probability_stall_axi_stream( + signal aclk : in std_logic; + stall_config : in stall_config_t; + rnd : inout RandomPType) is + variable num_stall_cycles : natural := 0; + begin + if rnd.Uniform(0.0, 1.0) < stall_config.stall_probability then + num_stall_cycles := rnd.FavorSmall(stall_config.min_stall_cycles, stall_config.max_stall_cycles); + end if; + for stall in 0 to num_stall_cycles-1 loop + wait until rising_edge(aclk); + end loop; + end procedure; + +end package body; diff --git a/vunit/vhdl/verification_components/src/axi_stream_slave.vhd b/vunit/vhdl/verification_components/src/axi_stream_slave.vhd index 08306b900..0889c172b 100644 --- a/vunit/vhdl/verification_components/src/axi_stream_slave.vhd +++ b/vunit/vhdl/verification_components/src/axi_stream_slave.vhd @@ -11,9 +11,13 @@ context work.vunit_context; context work.com_context; use work.stream_slave_pkg.all; use work.axi_stream_pkg.all; +use work.axi_stream_private_pkg.all; use work.sync_pkg.all; use work.string_ptr_pkg.all; +library osvvm; +use osvvm.RandomPkg.RandomPType; + entity axi_stream_slave is generic ( slave : axi_stream_slave_t); @@ -76,66 +80,73 @@ begin tdest(tdest'range), tuser(tuser'range) ); + variable rnd : RandomPType; begin - -- Wait for messages to arrive on the queue, posted by the process above - wait until rising_edge(aclk) and (not is_empty(message_queue)); - - while not is_empty(message_queue) loop - msg := pop(message_queue); - msg_type := message_type(msg); - - if msg_type = wait_for_time_msg then - handle_sync_message(net, msg_type, msg); - wait until rising_edge(aclk); - elsif msg_type = notify_request_msg then - -- Ignore this message, but expect it - elsif msg_type = stream_pop_msg or msg_type = pop_axi_stream_msg then - tready <= '1'; - wait until (tvalid and tready) = '1' and rising_edge(aclk); - tready <= '0'; - - axi_stream_transaction := ( - tdata => tdata, - tlast => tlast = '1', - tkeep => tkeep, - tstrb => tstrb, - tid => tid, - tdest => tdest, - tuser => tuser - ); - - reply_msg := new_axi_stream_transaction_msg(axi_stream_transaction); - reply(net, msg, reply_msg); - elsif msg_type = check_axi_stream_msg then - tready <= '1'; - wait until (tvalid and tready) = '1' and rising_edge(aclk); - tready <= '0'; - - report_msg := new_string_ptr(pop_string(msg)); - if tdata'length > 0 then - check_equal(tdata, pop_std_ulogic_vector(msg), "TDATA mismatch, " & to_string(report_msg)); - check_equal(tkeep, pop_std_ulogic_vector(msg), "TKEEP mismatch, " & to_string(report_msg)); - check_equal(tstrb, pop_std_ulogic_vector(msg), "TSTRB mismatch, " & to_string(report_msg)); - end if; - check_equal(tlast, pop_std_ulogic(msg), "TLAST mismatch, " & to_string(report_msg)); - if tid'length > 0 then - check_equal(tid, pop_std_ulogic_vector(msg), "TID mismatch, " & to_string(report_msg)); - end if; - if tdest'length > 0 then - check_equal(tdest, pop_std_ulogic_vector(msg), "TDEST mismatch, " & to_string(report_msg)); + rnd.InitSeed(rnd'instance_name); + loop + -- Wait for messages to arrive on the queue, posted by the process above + wait until rising_edge(aclk) and (not is_empty(message_queue)); + + while not is_empty(message_queue) loop + msg := pop(message_queue); + msg_type := message_type(msg); + + if msg_type = wait_for_time_msg then + handle_sync_message(net, msg_type, msg); + wait until rising_edge(aclk); + elsif msg_type = notify_request_msg then + -- Ignore this message, but expect it + elsif msg_type = stream_pop_msg or msg_type = pop_axi_stream_msg then + + -- stall according to probability configuration + probability_stall_axi_stream(aclk, slave, rnd); + + tready <= '1'; + wait until (tvalid and tready) = '1' and rising_edge(aclk); + tready <= '0'; + + axi_stream_transaction := ( + tdata => tdata, + tlast => tlast = '1', + tkeep => tkeep, + tstrb => tstrb, + tid => tid, + tdest => tdest, + tuser => tuser + ); + + reply_msg := new_axi_stream_transaction_msg(axi_stream_transaction); + reply(net, msg, reply_msg); + elsif msg_type = check_axi_stream_msg then + tready <= '1'; + wait until (tvalid and tready) = '1' and rising_edge(aclk); + tready <= '0'; + + report_msg := new_string_ptr(pop_string(msg)); + if tdata'length > 0 then + check_equal(tdata, pop_std_ulogic_vector(msg), "TDATA mismatch, " & to_string(report_msg)); + check_equal(tkeep, pop_std_ulogic_vector(msg), "TKEEP mismatch, " & to_string(report_msg)); + check_equal(tstrb, pop_std_ulogic_vector(msg), "TSTRB mismatch, " & to_string(report_msg)); + end if; + check_equal(tlast, pop_std_ulogic(msg), "TLAST mismatch, " & to_string(report_msg)); + if tid'length > 0 then + check_equal(tid, pop_std_ulogic_vector(msg), "TID mismatch, " & to_string(report_msg)); + end if; + if tdest'length > 0 then + check_equal(tdest, pop_std_ulogic_vector(msg), "TDEST mismatch, " & to_string(report_msg)); + end if; + if tuser'length > 0 then + check_equal(tuser, pop_std_ulogic_vector(msg), "TUSER mismatch, " & to_string(report_msg)); + end if; + else + unexpected_msg_type(msg_type); end if; - if tuser'length > 0 then - check_equal(tuser, pop_std_ulogic_vector(msg), "TUSER mismatch, " & to_string(report_msg)); - end if; - else - unexpected_msg_type(msg_type); - end if; - end loop; - - notify_bus_process_done <= '1'; - wait until notify_bus_process_done = '1'; - notify_bus_process_done <= '0'; + end loop; + notify_bus_process_done <= '1'; + wait until notify_bus_process_done = '1'; + notify_bus_process_done <= '0'; + end loop; end process; axi_stream_monitor_generate : if slave.p_monitor /= null_axi_stream_monitor generate diff --git a/vunit/vhdl/verification_components/test/tb_axi_stream.vhd b/vunit/vhdl/verification_components/test/tb_axi_stream.vhd index 3ab98fed2..eb506afea 100644 --- a/vunit/vhdl/verification_components/test/tb_axi_stream.vhd +++ b/vunit/vhdl/verification_components/test/tb_axi_stream.vhd @@ -21,23 +21,31 @@ entity tb_axi_stream is runner_cfg : string; g_id_length : natural := 8; g_dest_length : natural := 8; - g_user_length : natural := 8 + g_user_length : natural := 8; + g_stall_percentage_master : natural range 0 to 100 := 0; + g_stall_percentage_slave : natural range 0 to 100 := 0 ); end entity; architecture a of tb_axi_stream is + + constant min_stall_cycles : natural := 5; + constant max_stall_cycles : natural := 15; + constant master_stall_config : stall_config_t := new_stall_config(stall_probability => real(g_stall_percentage_master)/100.0, min_stall_cycles => min_stall_cycles, max_stall_cycles => max_stall_cycles); + constant slave_stall_config : stall_config_t := new_stall_config(stall_probability => real(g_stall_percentage_slave)/100.0 , min_stall_cycles => min_stall_cycles, max_stall_cycles => max_stall_cycles); + constant master_axi_stream : axi_stream_master_t := new_axi_stream_master( data_length => 8, id_length => g_id_length, dest_length => g_dest_length, user_length => g_user_length, - logger => get_logger("master"), actor => new_actor("master"), - monitor => default_axi_stream_monitor, protocol_checker => default_axi_stream_protocol_checker + stall_config => master_stall_config, logger => get_logger("master"), actor => new_actor("master"), + monitor => default_axi_stream_monitor, protocol_checker => default_axi_stream_protocol_checker ); constant master_stream : stream_master_t := as_stream(master_axi_stream); constant master_sync : sync_handle_t := as_sync(master_axi_stream); constant slave_axi_stream : axi_stream_slave_t := new_axi_stream_slave( data_length => 8, id_length => g_id_length, dest_length => g_dest_length, user_length => g_user_length, - logger => get_logger("slave"), actor => new_actor("slave"), - monitor => default_axi_stream_monitor, protocol_checker => default_axi_stream_protocol_checker + stall_config => slave_stall_config, logger => get_logger("slave"), actor => new_actor("slave"), + monitor => default_axi_stream_monitor, protocol_checker => default_axi_stream_protocol_checker ); constant slave_stream : stream_slave_t := as_stream(slave_axi_stream); constant slave_sync : sync_handle_t := as_sync(slave_axi_stream); @@ -75,6 +83,24 @@ architecture a of tb_axi_stream is signal not_valid_id : std_logic; signal not_valid_dest : std_logic; signal not_valid_user : std_logic; + + ----------------------------------------------------------------------------- + -- signals used for the statistics for stall evaluation + type axis_stall_stats_fields_t is record + length, min, max, events : natural; + prev, start : std_logic; + end record; + + type axis_stall_stats_t is record + valid : axis_stall_stats_fields_t; + ready : axis_stall_stats_fields_t; + end record; + + signal axis_stall_stats : axis_stall_stats_t := ( + valid => (0, 1000, 0, 0, '0', '0'), + ready => (0, 1000, 0, 0, '0', '0') + ); + begin main : process @@ -464,6 +490,37 @@ begin check_equal(now, timestamp + 20 ns, " transaction time incorrect"); + elsif run("test random stall on master") or run("test random stall on slave") then + wait until rising_edge(aclk); + for i in 0 to 100 loop + pop_stream(net, slave_stream, reference); + push(reference_queue, reference); + end loop; + for i in 0 to 100 loop + push_stream(net, master_stream, std_logic_vector(to_unsigned(i + 1, data'length)), true); + end loop; + + wait_until_idle(net, master_sync); -- wait until all transfers are done before checking them + wait_until_idle(net, slave_sync); + + for i in 0 to 100 loop + reference := pop(reference_queue); + await_pop_stream_reply(net, reference, data); + check_equal(data, to_unsigned(i + 1, data'length), result("for await pop stream data")); + end loop; + info("There have been " & to_string(axis_stall_stats.valid.events) & " tvalid stall events"); + info("Min stall length was " & to_string(axis_stall_stats.valid.min)); + info("Max stall length was " & to_string(axis_stall_stats.valid.max)); + if running_test_case = "test random stall on master" then + check((axis_stall_stats.valid.events < (g_stall_percentage_master+10)) and (axis_stall_stats.valid.events > (g_stall_percentage_master-10)), "Checking that the tvalid stall probability lies within reasonable boundaries"); + check((axis_stall_stats.valid.min >= min_stall_cycles) and (axis_stall_stats.valid.max <= max_stall_cycles), "Checking that the minimal and maximal stall lenghts are in expected boundaries"); + check_equal(axis_stall_stats.ready.events, 0, "Checking that there are zero tready stall events"); + else + check((axis_stall_stats.ready.events < (g_stall_percentage_slave+10)) and (axis_stall_stats.ready.events > (g_stall_percentage_slave-10)), "Checking that the tready stall probability lies within reasonable boundaries"); + check((axis_stall_stats.ready.min >= min_stall_cycles) and (axis_stall_stats.ready.max <= max_stall_cycles), "Checking that the minimal and maximal stall lenghts are in expected boundaries"); + check_equal(axis_stall_stats.valid.events, 0, "Checking that there are zero tvalid stall events"); + end if; + end if; test_runner_cleanup(runner); end process; @@ -556,5 +613,51 @@ begin tuser => tuser ); + statistics : process(aclk) + begin + if rising_edge(aclk) then + axis_stall_stats.valid.prev <= tvalid; + axis_stall_stats.ready.prev <= tready; + ------------------------------------------------------------------------- + -- TVALID and TREADY stall events counters + if tvalid and (not tready) and axis_stall_stats.ready.prev then + axis_stall_stats.ready.events <= axis_stall_stats.ready.events + 1; + end if; + if (not tvalid) and tready and axis_stall_stats.valid.prev then + axis_stall_stats.valid.events <= axis_stall_stats.valid.events + 1; + end if; + + ------------------------------------------------------------------------- + -- TVALID Minmal and Maximal Stall lengths + if tvalid then + axis_stall_stats.valid.start <= '1'; + end if; + + if (not tvalid) and axis_stall_stats.valid.start then + axis_stall_stats.valid.length <= axis_stall_stats.valid.length + 1; + end if; + if tvalid and axis_stall_stats.valid.start and (not axis_stall_stats.valid.prev) then + axis_stall_stats.valid.length <= 0; + axis_stall_stats.valid.min <= minimum(axis_stall_stats.valid.length, axis_stall_stats.valid.min); + axis_stall_stats.valid.max <= maximum(axis_stall_stats.valid.length, axis_stall_stats.valid.max); + end if; + ------------------------------------------------------------------------- + -- TREADY Minmal and Maximal Stall lengths + if tready then + axis_stall_stats.ready.start <= '1'; + end if; + + if (not tready) and axis_stall_stats.ready.start then + axis_stall_stats.ready.length <= axis_stall_stats.ready.length + 1; + end if; + if tready and axis_stall_stats.ready.start and (not axis_stall_stats.ready.prev) then + axis_stall_stats.ready.length <= 0; + axis_stall_stats.ready.min <= minimum(axis_stall_stats.ready.length, axis_stall_stats.ready.min); + axis_stall_stats.ready.max <= maximum(axis_stall_stats.ready.length, axis_stall_stats.ready.max); + end if; + + end if; + end process; + aclk <= not aclk after 5 ns; end architecture; diff --git a/vunit/vhdl_parser.py b/vunit/vhdl_parser.py index b4e530cbe..1fb05f994 100644 --- a/vunit/vhdl_parser.py +++ b/vunit/vhdl_parser.py @@ -11,7 +11,7 @@ """ import re -from os.path import abspath +from pathlib import Path import logging from vunit.cached import cached from vunit.parsing.encodings import HDL_FILE_ENCODING @@ -32,7 +32,7 @@ def parse(self, file_name): Parse the VHDL code and return a VHDLDesignFile parse result parse result is re-used if content hash found in database """ - file_name = abspath(file_name) + file_name = str(Path(file_name).resolve()) return cached( "CachedVHDLParser.parse", VHDLDesignFile.parse, diff --git a/vunit/vivado/vivado.py b/vunit/vivado/vivado.py index 530e0b8ee..0978c32b4 100644 --- a/vunit/vivado/vivado.py +++ b/vunit/vivado/vivado.py @@ -10,7 +10,7 @@ from subprocess import check_call from os import makedirs -from os.path import abspath, join, dirname, exists, basename +from pathlib import Path def add_from_compile_order_file( @@ -69,15 +69,16 @@ def create_compile_order_file(project_file, compile_order_file, vivado_path=None """ print( "Generating Vivado project compile order into %s ..." - % abspath(compile_order_file) + % str(Path(compile_order_file).resolve()) ) - if not exists(dirname(compile_order_file)): - makedirs(dirname(compile_order_file)) + fpath = Path(compile_order_file) + if not fpath.parent.exists(): + makedirs(str(fpath.parent)) print("Extracting compile order ...") run_vivado( - join(dirname(__file__), "tcl", "extract_compile_order.tcl"), + str(Path(__file__).parent / "tcl" / "extract_compile_order.tcl"), tcl_args=[project_file, compile_order_file], vivado_path=vivado_path, ) @@ -101,13 +102,13 @@ def _read_compile_order(file_name): # Vivado generates duplicate files for different IP:s # using the same underlying libraries. We remove duplicates here - key = (library_name, basename(file_name)) + key = (library_name, Path(file_name).name) if key in unique: continue unique.add(key) if file_type == "Verilog Header": - include_dirs.add(dirname(file_name)) + include_dirs.add(str(Path(file_name).parent)) else: compile_order.append((library_name, file_name)) @@ -121,10 +122,12 @@ def run_vivado(tcl_file_name, tcl_args=None, cwd=None, vivado_path=None): Note: the shell=True is important in windows where Vivado is just a bat file. """ vivado = ( - "vivado" if vivado_path is None else join(abspath(vivado_path), "bin", "vivado") + "vivado" + if vivado_path is None + else str(Path(vivado_path).resolve() / "bin" / "vivado") ) cmd = "{} -nojournal -nolog -notrace -mode batch -source {}".format( - vivado, abspath(tcl_file_name) + vivado, str(Path(tcl_file_name).resolve()) ) if tcl_args is not None: cmd += " -tclargs " + " ".join([str(val) for val in tcl_args]) diff --git a/vunit/vunit_cli.py b/vunit/vunit_cli.py index 1e9731eb7..6bb69bc09 100644 --- a/vunit/vunit_cli.py +++ b/vunit/vunit_cli.py @@ -10,13 +10,9 @@ Adding Custom Command Line Arguments ------------------------------------ It is possible to add custom command line arguments to your ``run.py`` -scripts using the :class:`.VUnitCLI` class. - -.. autoclass:: vunit.vunit_cli.VUnitCLI - :members: - -A :class:`.VUnitCLI` object has a ``parser`` field which is an -`ArgumentParser` object of the `argparse`_ library. +scripts using the ``VUnitCLI`` class. A ``VUnitCLI`` object +has a ``parser`` field which is an `ArgumentParser` object of the +`argparse`_ library. .. _argparse: https://docs.python.org/3/library/argparse.html @@ -39,7 +35,7 @@ """ import argparse -from os.path import join, abspath +from pathlib import Path import os from vunit.sim_if.factory import SIMULATOR_FACTORY from vunit.about import version @@ -80,7 +76,7 @@ def _create_argument_parser(description=None, for_documentation=False): if for_documentation: default_output_path = "./vunit_out" else: - default_output_path = join(abspath(os.getcwd()), "vunit_out") + default_output_path = str(Path(os.getcwd()).resolve() / "vunit_out") parser = argparse.ArgumentParser(description=description) @@ -267,8 +263,10 @@ def positive_int(val): ival = int(val) assert ival > 0 return ival - except (ValueError, AssertionError): - raise argparse.ArgumentTypeError("'%s' is not a valid positive int" % val) + except (ValueError, AssertionError) as exv: + raise argparse.ArgumentTypeError( + "'%s' is not a valid positive int" % val + ) from exv def _parser_for_documentation():