From 679f99e6d8976386d454484cc73725a5c2d5dbc0 Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 19:20:45 +0200 Subject: [PATCH 01/25] - clean everything related to pyRiemann core implementation - add quantum wrapper - update dependency and setup file --- .coveragerc | 8 + .github/workflows/deploy_ghpages.yml | 52 ++++ .github/workflows/testing.yml | 39 +++ .gitignore | 10 + .pre-commit-config.yaml | 13 + LICENSE | 24 ++ MANIFEST.in | 1 + doc/Makefile | 192 +++++++++++++ doc/_static/copybutton.js | 59 ++++ doc/_static/style.css | 117 ++++++++ doc/_templates/class.rst | 10 + doc/_templates/function.rst | 6 + doc/api.rst | 236 +++++++++++++++ doc/conf.py | 335 ++++++++++++++++++++++ doc/index.rst | 98 +++++++ doc/installing.rst | 49 ++++ doc/introduction.rst | 4 + doc/requirements.txt | 10 + doc/whatsnew.rst | 114 ++++++++ examples/ERP/README.txt | 4 + examples/ERP/plot_classify_EEG_quantum.py | 127 ++++++++ examples/README.txt | 6 + pyriemann_qiskit/__init__.py | 7 + pyriemann_qiskit/_version.py | 1 + pyriemann_qiskit/classification.py | 317 ++++++++++++++++++++ requirements.txt | 4 + setup.cfg | 13 + setup.py | 40 +++ tests/conftest.py | 200 +++++++++++++ tests/test_classification.py | 148 ++++++++++ 30 files changed, 2244 insertions(+) create mode 100644 .coveragerc create mode 100644 .github/workflows/deploy_ghpages.yml create mode 100644 .github/workflows/testing.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 doc/Makefile create mode 100644 doc/_static/copybutton.js create mode 100644 doc/_static/style.css create mode 100644 doc/_templates/class.rst create mode 100644 doc/_templates/function.rst create mode 100644 doc/api.rst create mode 100644 doc/conf.py create mode 100644 doc/index.rst create mode 100644 doc/installing.rst create mode 100644 doc/introduction.rst create mode 100644 doc/requirements.txt create mode 100644 doc/whatsnew.rst create mode 100644 examples/ERP/README.txt create mode 100644 examples/ERP/plot_classify_EEG_quantum.py create mode 100644 examples/README.txt create mode 100644 pyriemann_qiskit/__init__.py create mode 100644 pyriemann_qiskit/_version.py create mode 100644 pyriemann_qiskit/classification.py create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/conftest.py create mode 100644 tests/test_classification.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..7ff1ba24 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,8 @@ +[report] +include = */pyriemann/* +omit = + */python?.?/* + */site-packages/pytest/* + */setup.py + */tests/* + */examples/* diff --git a/.github/workflows/deploy_ghpages.yml b/.github/workflows/deploy_ghpages.yml new file mode 100644 index 00000000..daa8ef63 --- /dev/null +++ b/.github/workflows/deploy_ghpages.yml @@ -0,0 +1,52 @@ +name: Deploy GitHub pages + +on: [push, pull_request] + + +jobs: + build_docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Generate HTML docs + uses: ammaraskar/sphinx-action@master + with: + docs-folder: "doc/" + pre-build-command: | + apt-get update + pip install -e . + pip install -r doc/doc-requirements.txt + - name: Upload generated HTML as artifact + uses: actions/upload-artifact@v2 + with: + name: DocHTML + path: doc/build/html/ + + # deploy_docs: + # if: github.ref == 'refs/heads/master' + # needs: + # build_docs + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v2 + # - name: Download artifacts + # uses: actions/download-artifact@v2 + # with: + # name: DocHTML + # path: doc/build/html/ + # - name: Commit to documentation branch + # run: | + # git clone --no-checkout --depth 1 https://github.com/${{ github.repository_owner }}/qndiag.git --branch gh-pages --single-branch gh-pages + # cp -r doc/build/html/* gh-pages/ + # cd gh-pages + # touch .nojekyll + # git config --local user.email "pyriemann@github.com" + # git config --local user.name "pyriemann GitHub Action" + # git add . + # git commit -m "Update documentation" -a || true + # - name: Push changes + # uses: ad-m/github-push-action@v0.6.0 + # with: + # branch: gh-pages + # directory: gh-pages + # github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 00000000..7350777f --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,39 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: testing + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + build: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [3.6, 3.7, 3.8] + os: [ubuntu-latest, macOS-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + - name: Install package + run: | + python -m pip install .[tests] + - name: Lint with flake8 + run: | + flake8 examples tests pyriemann + - name: Test with pytest + run: | + pytest diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..1e7be755 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +*.pyc +.DS_Store +.coverage +.coveralls.yml +/doc/build +/doc/generated +/doc/auto_examples +/dist +/pyriemann_qiskit.egg-info/* +*-e diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..9295ecab --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +default_language_version: + python: python3.7 +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: check-yaml + - id: check-json + - id: check-added-large-files + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + - id: mixed-line-ending diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d399506e --- /dev/null +++ b/LICENSE @@ -0,0 +1,24 @@ +Copyright © 2015, authors of pyRiemann +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the names of pyriemann authors nor the names of any + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..bb3ec5f0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include README.md diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 00000000..cac96cf5 --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,192 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + yes | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyRiemann.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyRiemann.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/pyRiemann" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyRiemann" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/_static/copybutton.js b/doc/_static/copybutton.js new file mode 100644 index 00000000..0a7db6d6 --- /dev/null +++ b/doc/_static/copybutton.js @@ -0,0 +1,59 @@ +// originally taken from scikit-learn's Sphinx theme +$(document).ready(function() { + /* Add a [>>>] button on the top-right corner of code samples to hide + * the >>> and ... prompts and the output and thus make the code + * copyable. + * Note: This JS snippet was taken from the official python.org + * documentation site.*/ + var div = $('.highlight-python .highlight,' + + '.highlight-python3 .highlight,' + + '.highlight-pycon .highlight') + var pre = div.find('pre'); + + // get the styles from the current theme + pre.parent().parent().css('position', 'relative'); + var hide_text = 'Hide the prompts and output'; + var show_text = 'Show the prompts and output'; + var border_width = pre.css('border-top-width'); + var border_style = pre.css('border-top-style'); + var border_color = pre.css('border-top-color'); + var button_styles = { + 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', + 'border-color': border_color, 'border-style': border_style, + 'border-width': border_width, 'color': border_color, 'text-size': '75%', + 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em' + } + + // create and add the button to all the code blocks that contain >>> + div.each(function(index) { + var jthis = $(this); + if (jthis.find('.gp').length > 0) { + var button = $('>>>'); + button.css(button_styles) + button.attr('title', hide_text); + jthis.prepend(button); + } + // tracebacks (.gt) contain bare text elements that need to be + // wrapped in a span to work with .nextUntil() (see later) + jthis.find('pre:has(.gt)').contents().filter(function() { + return ((this.nodeType == 3) && (this.data.trim().length > 0)); + }).wrap(''); + }); + + // define the behavior of the button when it's clicked + $('.copybutton').toggle( + function() { + var button = $(this); + button.parent().find('.go, .gp, .gt').hide(); + button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); + button.css('text-decoration', 'line-through'); + button.attr('title', show_text); + }, + function() { + var button = $(this); + button.parent().find('.go, .gp, .gt').show(); + button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); + button.css('text-decoration', 'none'); + button.attr('title', hide_text); + }); +}); diff --git a/doc/_static/style.css b/doc/_static/style.css new file mode 100644 index 00000000..74adc0f5 --- /dev/null +++ b/doc/_static/style.css @@ -0,0 +1,117 @@ +body { color: #444444 !important; } + +h1 { font-size: 40px !important; } +h2 { font-size: 32px !important; } +h3 { font-size: 24px !important; } +h4 { font-size: 18px !important; } +h5 { font-size: 14px !important; } +h6 { font-size: 10px !important; } + +footer a{ + + color: #4c72b0 !important; +} +a.reference { + color: #4c72b0 !important; +} + +blockquote p { + font-size: 14px !important; +} + +blockquote { + padding-top: 4px !important; + padding-bottom: 4px !important; + margin: 0 0 0px !important; +} + +pre { + background-color: #f6f6f9 !important; +} + +code { + color: #49759c !important; + background-color: transparent; !important; +} + +code.descclassname { + padding-right: 0px !important; +} + +code.descname { + padding-left: 0px !important; +} + +dt:target, span.highlighted { + background-color: #ffffff !important; +} + +ul { + padding-left: 20px !important; +} + +ul.dropdown-menu { + padding-left: 0px !important; +} + +.alert-info { + background-color: #adb8cb !important; + border-color: #adb8cb !important; + color: #2c3e50 !important; +} + +/* From https://github.com/twbs/bootstrap/issues/1768 */ +*[id]:before { + /* display: block; */ + content: " "; + margin-top: -60px; + height: 60px; + visibility: hidden; +} + +.dataframe table { + /*Uncomment to center tables horizontally*/ + /* margin-left: auto; */ + /* margin-right: auto; */ + border: none; + border-collapse: collapse; + border-spacing: 0; + font-size: 12px; + table-layout: fixed; +} + +.dataframe thead { + border-bottom: 1px solid; + vertical-align: bottom; +} + +.dataframe tr, th, td { + text-align: left; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} + +.dataframe th { + font-weight: bold; +} + +table { + margin-bottom: 20px; +} + +tbody tr:nth-child(odd) { + background: #f5f5f5; +} + +tbody tr:hover { + background: rgba(66, 165, 245, 0.2); +} + +div.body { + min-width: None; + max-width: None; +} diff --git a/doc/_templates/class.rst b/doc/_templates/class.rst new file mode 100644 index 00000000..f71c4d49 --- /dev/null +++ b/doc/_templates/class.rst @@ -0,0 +1,10 @@ +{{ fullname }} +{{ underline }} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + + {% block methods %} + .. automethod:: __init__ + {% endblock %} diff --git a/doc/_templates/function.rst b/doc/_templates/function.rst new file mode 100644 index 00000000..4d7ea38a --- /dev/null +++ b/doc/_templates/function.rst @@ -0,0 +1,6 @@ +{{ fullname }} +{{ underline }} + +.. currentmodule:: {{ module }} + +.. autofunction:: {{ objname }} diff --git a/doc/api.rst b/doc/api.rst new file mode 100644 index 00000000..947d462e --- /dev/null +++ b/doc/api.rst @@ -0,0 +1,236 @@ +.. _api_ref: + +============= +API reference +============= + +Covariance Estimation +--------------------- +.. _estimation_api: +.. currentmodule:: pyriemann.estimation + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Covariances + ERPCovariances + XdawnCovariances + CospCovariances + Coherences + HankelCovariances + Shrinkage + +Embedding +--------- +.. _embedding_api: +.. currentmodule:: pyriemann.embedding + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Embedding + +Classification +-------------- +.. _classification_api: +.. currentmodule:: pyriemann.classification + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + MDM + FgMDM + TSclassifier + KNearestNeighbor + QuanticSVM + QuanticVQC + +Clustering +------------------ +.. _clustering_api: +.. currentmodule:: pyriemann.clustering + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Kmeans + KmeansPerClassTransform + Potato + + +Tangent Space +------------------ +.. _tangentspace_api: +.. currentmodule:: pyriemann.tangentspace + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + TangentSpace + FGDA + +Spatial Filtering +------------------ +.. _spatialfilter_api: +.. currentmodule:: pyriemann.spatialfilters + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Xdawn + CSP + SPoC + BilinearFilter + AJDC + +Preprocessing +------------- +.. _preprocessing_api: +.. currentmodule:: pyriemann.preprocessing + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Whitening + +Channel selection +------------------ +.. _channelselection_api: +.. currentmodule:: pyriemann.channelselection + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + ElectrodeSelection + FlatChannelRemover + +Stats +------------------ +.. _stats_api: +.. currentmodule:: pyriemann.stats + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + PermutationDistance + PermutationModel + + +Utils function +-------------- + +Utils functions are low level functions that implement most base components of Riemannian Geometry. + +Covariance preprocessing +~~~~~~~~~~~~~~~~~~~~~~~~ +.. _covariance_api: +.. currentmodule:: pyriemann.utils.covariance + +.. autosummary:: + :toctree: generated/ + + covariances + cross_spectrum + cospectrum + coherence + normalize + get_nondiag_weight + + +Distances +~~~~~~~~~~~~~~~~~~~~~~ +.. _distance_api: +.. currentmodule:: pyriemann.utils.distance + +.. autosummary:: + :toctree: generated/ + + distance + distance_euclid + distance_riemann + distance_logeuclid + distance_logdet + distance_kullback + distance_kullback_sym + distance_wasserstein + + +Mean +~~~~~~~~~~~~~~~~~~~~~~ +.. _mean_api: +.. currentmodule:: pyriemann.utils.mean + +.. autosummary:: + :toctree: generated/ + + mean_covariance + mean_euclid + mean_riemann + mean_logeuclid + mean_logdet + mean_wasserstein + mean_ale + mean_alm + mean_harmonic + mean_kullback_sym + + +Geodesic +~~~~~~~~~~~~~~~~~~~~~~ +.. _geodesic_api: +.. currentmodule:: pyriemann.utils.geodesic + +.. autosummary:: + :toctree: generated/ + + geodesic + geodesic_riemann + geodesic_euclid + geodesic_logeuclid + + +Tangent Space +~~~~~~~~~~~~~~~~~~~~~~ +.. _ts_base_api: +.. currentmodule:: pyriemann.utils.tangentspace + +.. autosummary:: + :toctree: generated/ + + tangent_space + untangent_space + +Base +~~~~~~~~~~~~~~~~~~~~~~ +.. _base_api: +.. currentmodule:: pyriemann.utils.base + +.. autosummary:: + :toctree: generated/ + + sqrtm + invsqrtm + expm + logm + powm + +Aproximate Joint Diagonalization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _ajd_api: +.. currentmodule:: pyriemann.utils.ajd + +.. autosummary:: + :toctree: generated/ + + rjd + ajd_pham + uwedge diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 00000000..c91ebb21 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# +# pyRiemann documentation build configuration file, created by +# sphinx-quickstart on Sun Apr 19 13:17:55 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import matplotlib + +# mne update path +import mne +print(mne.datasets.sample.data_path(update_path=True)) +print(mne.datasets.eegbci.load_data(1, [6, 10, 14], update_path=True)) + +matplotlib.use('Agg') +import shlex +import sphinx_gallery +import sphinx_bootstrap_theme + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.coverage', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + # 'sphinx.ext.linkcode', + # 'sphinx.ext.mathjax', + 'sphinx.ext.imgmath', + 'sphinx.ext.todo', + 'numpydoc', + # 'sphinx.ext.ifconfig', + # 'sphinx.ext.viewcode', + 'sphinx_gallery.gen_gallery', +] +plot_include_source = True +plot_formats = [("png", 90)] +plot_html_show_formats = False +plot_html_show_source_link = False + +sphinx_gallery_conf = { + 'examples_dirs': ['../examples', '../tutorials'], + 'gallery_dirs': ['auto_examples'] +} +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] +autodoc_default_flags = ['inherited-members'] + +autosummary_generate = True +numpydoc_show_class_members = False + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'pyRiemann' +copyright = u'2015-2021, PyRiemann Contributors' +author = u'Alexandre Barachant' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +sys.path.insert(0, os.path.abspath(os.path.pardir)) +import pyriemann +version = pyriemann.__version__ +# The full version, including alpha/beta/rc tags. +release = pyriemann.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'bootstrap' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + 'source_link_position': + "footer", + 'bootswatch_theme': + "flatly", + 'navbar_sidebarrel': + False, + 'bootstrap_version': + "3", + 'navbar_links': [("API", "api"), ("Gallery", "auto_examples/index")], +} + +# Add any paths that contain custom themes here, relative to this directory. +html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'pyRiemanndoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'pyRiemann.tex', u'pyRiemann Documentation', + u'Alexandre Barachant', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'pyriemann', u'pyRiemann Documentation', [author], + 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'pyriemann', u'pyRiemann Documentation', author, 'pyriemann', + 'One line description of project.', 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} + + +def setup(app): + app.add_javascript('copybutton.js') + app.add_stylesheet('style.css') diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 00000000..99547c57 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,98 @@ +.. raw:: html + + + +pyRiemann: Biosignals classification with Riemannian Geometry +============================================================= + +.. raw:: html + +
+ + +
+ +
+
+
+ +pyRiemann is a Python machine learning library based on scikit-learn API. It provides a high-level interface for classification and manipulation of multivariate signal through Riemannian Geometry of covariance matrices. + +pyRiemann aims at being a generic package for multivariate signal classification but has been designed around applications of biosignal (M/EEG, EMG, etc) classification. + +For a brief introduction to the ideas behind the package, you can read the +:ref:`introductory notes `. More practical information is on the +:ref:`installation page `. You may also want to browse the +`example gallery `_ to get a sense for what you can do with pyRiemann +and :ref:`API reference ` to find out how. + +To see the code or report a bug, please visit the `github repository +`_. + +.. raw:: html + +
+
+
+
+

Content

+
+
+ +.. toctree:: + :maxdepth: 1 + + Introduction + Release notes + Installing + Example gallery + API reference + +.. raw:: html + +
+
+
+ +
+
diff --git a/doc/installing.rst b/doc/installing.rst new file mode 100644 index 00000000..9b558cc5 --- /dev/null +++ b/doc/installing.rst @@ -0,0 +1,49 @@ +.. _installing: + +Installing pyRiemann +==================== + +The easiest way to install a stable version of pyRiemann is through pypi, the python packege manager : + +``pip install pyriemann`` + +For a bleeding edge version, you can clone the source code on `github `__ and install directly the package from source. + +``pip install -e .`` + +The install script will install the required dependencies. If you want also to build the documentation and to run the test locally, you could install all development dependencies with + +``pip install -e .[docs,tests]`` + +If you use a zsh shell, you need to write `pip install -e .\[docs,tests\]`. If you do not know what zsh is, you could use the above command. + + +Dependencies +~~~~~~~~~~~~ + +- Python (>= 3.6) + +Mandatory dependencies +^^^^^^^^^^^^^^^^^^^^^^ + +- `numpy `__ + +- `scipy `__ + +- `scikit-learn >=0.17 `__ + +- `pandas `__ + +- `joblib `__ + +- `qiskit `__ + +Recommended dependencies +^^^^^^^^^^^^^^^^^^^^^^^^ +These dependencies are recommanded to use the plotting functions of pyriemann or to run examples and tutorials, but they are not mandatory: + +- `mne-python `__ + +- `matplotlib `__ + +- `seaborn `__ diff --git a/doc/introduction.rst b/doc/introduction.rst new file mode 100644 index 00000000..d1d6a62c --- /dev/null +++ b/doc/introduction.rst @@ -0,0 +1,4 @@ +.. _introduction: + +Introduction to pyRiemann +========================= diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 00000000..1645b2da --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,10 @@ +sphinx-gallery +sphinx-bootstrap_theme +numpydoc +cython +mne +seaborn +scikit-learn +joblib +pandas +qiskit diff --git a/doc/whatsnew.rst b/doc/whatsnew.rst new file mode 100644 index 00000000..2c8ebc0b --- /dev/null +++ b/doc/whatsnew.rst @@ -0,0 +1,114 @@ +.. _whatsnew: + +.. currentmodule:: pyriemann + +What's new in the package +========================= + +A catalog of new features, improvements, and bug-fixes in each release. + +v0.2.8.dev +---------- + +- Correct spectral estimation in :func:`pyriemann.utils.covariance.cross_spectrum` to obtain equivalence with SciPy + +- Add instantaneous, lagged and imaginary coherences in :func:`pyriemann.utils.covariance.coherence` and :class:`pyriemann.estimation.Coherences` + +- Add ``partial_fit`` in :class:`pyriemann.clustering.Potato`, useful for an online update; and update example on artifact detection. + +- Deprecate :func:`pyriemann.utils.viz.plot_confusion_matrix` as sklearn integrate its own version. + +- Add Ando-Li-Mathias mean estimation in :func:`pyriemann.utils.mean.mean_covariance` + +- Add Schaefer-Strimmer covariance estimator in :func:`pyriemann.utils.covariance.covariances`, and an example to compare estimators + +- Refactor tests + fix refit of :class:`pyriemann.tangentspace.TangentSpace` + +v0.2.7 (June 2021) +------------------ + +- Add example on SSVEP classification + +- Fix compatibility with scikit-learn v0.24 + +- Correct probas of :class:`pyriemann.classification.MDM` + +- Add ``predict_proba`` for :class:`pyriemann.clustering.Potato`, and an example on artifact detection + +- Add weights to Pham's AJD algorithm :func:`pyriemann.utils.ajd.ajd_pham` + +- Add :func:`pyriemann.utils.covariance.cross_spectrum`, fix :func:`pyriemann.utils.covariance.cospectrum`; :func:`pyriemann.utils.covariance.coherence` output is kept unchanged + +- Add :class:`pyriemann.spatialfilters.AJDC` for BSS and gBSS, with an example on artifact correction + +- Add :class:`pyriemann.preprocessing.Whitening`, with optional dimension reduction + +v0.2.6 (March 2020) +------------------- + +- Updated for better Scikit-Learn v0.22 support + +v0.2.5 (January 2018) +--------------------- + +- Added BilinearFilter + +- Added a permutation test for generic scikit-learn estimator + +- Stats module refactoring, with distance based t-test and f-test + +- Removed two way permutation test + +- Added FlatChannelRemover + +- Support for python 3.5 in travis + +- Added Shrinkage transformer + +- Added Coherences transformer + +- Added Embedding class. + +v0.2.4 (June 2016) +------------------ + +- Improved documentation + +- Added TSclassifier for out-of the box tangent space classification. + +- Added Wasserstein distance and mean. + +- Added NearestNeighbor classifier. + +- Added Softmax probabilities for MDM. + +- Added CSP for covariance matrices. + +- Added Approximate Joint diagonalization algorithms (JADE, PHAM, UWEDGE). + +- Added ALE mean. + +- Added Multiclass CSP. + +- API: param name changes in `CospCovariances` to comply to Scikit-Learn. + +- API: attributes name changes in most modules to comply to the Scikit-Learn naming convention. + +- Added `HankelCovariances` estimation + +- Added `SPoC` spatial filtering + +- Added Harmonic mean + +- Added Kullback leibler mean + +v0.2.3 (November 2015) +---------------------- + +- Added multiprocessing for MDM with joblib. + +- Added kullback-leibler divergence. + +- Added Riemannian Potato. + +- Added sample_weight for mean estimation and MDM. diff --git a/examples/ERP/README.txt b/examples/ERP/README.txt new file mode 100644 index 00000000..2372c943 --- /dev/null +++ b/examples/ERP/README.txt @@ -0,0 +1,4 @@ +Classification of ERP +--------------------- + +Event related potential classification with RG. diff --git a/examples/ERP/plot_classify_EEG_quantum.py b/examples/ERP/plot_classify_EEG_quantum.py new file mode 100644 index 00000000..c3f65089 --- /dev/null +++ b/examples/ERP/plot_classify_EEG_quantum.py @@ -0,0 +1,127 @@ +""" +==================================================================== +ERP EEG decoding with Quantum Classifier. +==================================================================== + +Decoding applied to EEG data in sensor space decomposed using Xdawn. +After spatial filtering, covariances matrices are estimated, then projected in +the tangent space and classified with a quantum classifier + +""" +# Author: Gregoire Cattan +# Modified from plot_classify_EEG_tangentspace.py +# License: BSD (3-clause) + +import numpy as np + +from pyriemann.estimation import XdawnCovariances +from pyriemann.tangentspace import TangentSpace +from pyriemann_qiskit.classification import QuanticSVM + +import mne +from mne import io +from mne.datasets import sample + +from sklearn.pipeline import make_pipeline +from sklearn.model_selection import train_test_split +from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay + +from matplotlib import pyplot as plt + +# cvxpy is not correctly imported due to wheel not building +# in the doc pipeline +__cvxpy__ = True +try: + import cvxpy + del cvxpy +except Exception: + __cvxpy__ = False + +print(__doc__) + +data_path = sample.data_path() + +############################################################################### +# Set parameters and read data +raw_fname = data_path + "/MEG/sample/sample_audvis_filt-0-40_raw.fif" +event_fname = data_path + "/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif" +tmin, tmax = -0.0, 1 +event_id = dict(aud_l=1, aud_r=2, vis_l=3, vis_r=4) + +# Setup for reading the raw data +raw = io.Raw(raw_fname, preload=True, verbose=False) +raw.filter(2, None, method="iir") # replace baselining with high-pass +events = mne.read_events(event_fname) + +raw.info["bads"] = ["MEG 2443"] # set bad channels +picks = mne.pick_types( + raw.info, meg=False, eeg=True, stim=False, eog=False, exclude="bads" +) + +# Read epochs +epochs = mne.Epochs( + raw, + events, + event_id, + tmin, + tmax, + proj=False, + picks=picks, + baseline=None, + preload=True, + verbose=False, +) + +X = epochs.get_data() +y = epochs.events[:, -1] + +# Reduce the number of classes as QuanticBase supports only 2 classes +y[y % 3 == 0] = 0 +y[y % 3 != 0] = 1 + +# Reduce trial number to dimish testing time +X = X[:100] +y = y[:100] + +# ...skipping the KFold validation parts (for the purpose of the test only) +X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3) + +############################################################################### +# Decoding in tangent space with a quantum classifier + +# Time complexity of quantum algorithm depends on the number of trials and +# the number of elements inside the correlation matrices +# Thus we reduce elements number by using restrictive spatial filtering +sf = XdawnCovariances(nfilter=1) + + +# ...and dividing the number of remaining elements by two +def ds(v): + return v[::2] + + +# Projecting correlation matrices into the tangent space +# as quantum algorithms take vectors as inputs +# (If not, then matrices will be inlined inside the quantum classifier) +tg = TangentSpace() + +# Results will be computed for QuanticSVM versus SKLearnSVM for comparison +for quantum in [True, False]: + # This is a hack for the documentation pipeline + if(not __cvxpy__): + continue + + qsvm = QuanticSVM(target=1, verbose=False, + quantum=quantum, processVector=ds) + clf = make_pipeline(sf, tg, qsvm) + clf.fit(X_train, y_train) + y_pred = clf.predict(X_test) + + # Printing the results + acc = np.mean(y_pred == y_test) + print("Classification accuracy: %f " % (acc)) + + names = ['0(quantum)', '1(quantum)'] if quantum else ['0', '1'] + cm = confusion_matrix(y_pred, y_test) + ConfusionMatrixDisplay(cm, display_labels=names).plot() + plt.show() diff --git a/examples/README.txt b/examples/README.txt new file mode 100644 index 00000000..0db9ef0e --- /dev/null +++ b/examples/README.txt @@ -0,0 +1,6 @@ +Examples Gallery +================ + +.. contents:: Contents + :local: + :depth: 2 diff --git a/pyriemann_qiskit/__init__.py b/pyriemann_qiskit/__init__.py new file mode 100644 index 00000000..06efeec5 --- /dev/null +++ b/pyriemann_qiskit/__init__.py @@ -0,0 +1,7 @@ +from ._version import __version__ +from . import classification + +__all__ = [ + '__version__', + 'classification', +] diff --git a/pyriemann_qiskit/_version.py b/pyriemann_qiskit/_version.py new file mode 100644 index 00000000..32bb603b --- /dev/null +++ b/pyriemann_qiskit/_version.py @@ -0,0 +1 @@ +__version__ = '0.0.0.dev' diff --git a/pyriemann_qiskit/classification.py b/pyriemann_qiskit/classification.py new file mode 100644 index 00000000..210d26e3 --- /dev/null +++ b/pyriemann_qiskit/classification.py @@ -0,0 +1,317 @@ +"""Module for classification function.""" +import numpy as np + +from sklearn.base import BaseEstimator, ClassifierMixin +from sklearn.model_selection import train_test_split + +from qiskit import BasicAer, IBMQ +from qiskit.circuit.library import ZZFeatureMap, TwoLocal +from qiskit.aqua import QuantumInstance, aqua_globals +from qiskit.aqua.quantum_instance import logger +from qiskit.aqua.algorithms import QSVM, SklearnSVM, VQC +from qiskit.aqua.utils import get_feature_dimension +from qiskit.providers.ibmq import least_busy +from qiskit.aqua.components.optimizers import SPSA +from datetime import datetime +import logging +logger.level = logging.INFO + + +class QuanticBase(BaseEstimator, ClassifierMixin): + + """Quantum classification. + + This class implements a SKLearn wrapper around Qiskit library. + It provides a mean to run classification tasks on a local and + simulated quantum computer or a remote and real quantum computer. + Difference between simulated and real quantum computer will be that: + - There is no noise on a simulated quantum computer (so results are better) + - Real quantum computer are quicker than simulator + - Real quantum computer tasks are assigned to a queue + before being executed on a back-end + + WARNING: At the moment this implementation only supports binary + classification (eg. Target vs Non-Target experiment) + + Parameters + ---------- + target : int + Label of the target symbol + qAccountToken : string (default:None) + If quantum==True and qAccountToken provided, + the classification task will be running on a IBM quantum backend + processVector : lambda vector: processedVector (default) + Additional processing on the input vectors. eg: downsampling + verbose : bool (default:True) + If true will output all intermediate results and logs + quantum : Bool (default:True) + - If true will run on local or remote backend + (depending on qAccountToken value). + - If false, will perform classical computing instead + **parameters : dictionnary + This is used by SKLearn with get_params and set_params method + in order to create a deepcopy of an instance + + Attributes + ---------- + classes_ : list + list of classes. + verbose : see above + processVector : see above + qAccountToken : see above + target : see above + quantum : see above + test_input : Dictionnary + Contains vectorized test set for target and non-target classes + training_input : Dictionnary + Contains vectorized training set for target and non-target classes + provider : IBMQ Provider + This service provide a remote quantum computer backend + backend : Quantum computer or simulator + feature_dim : int + Size of the vectorized matrix which is passed to quantum classifier + new_feature_dim : int + Feature dimension after proccessed by `processVector` lambda + prev_fit_params : Dictionnary of data and labels + Keep in memory data and labels passed to fit method. + This is used for self-calibration. + feature_map: ZZFeatureMap + Transform data into quantum space + quantum_instance: QuantumInstance (Object) + Backend with specific parameters (number of shots, etc.) + + See Also + -------- + QuanticSVM + QuanticVQC + + """ + + def __init__(self, target, qAccountToken=None, quantum=True, + processVector=lambda v: v, verbose=True, **parameters): + self.verbose = verbose + self.log("Initializing Quantum Classifier") + self.test_input = {} + self.set_params(**parameters) + self.processVector = processVector + self.qAccountToken = qAccountToken + self.training_input = {} + self.target = target + self.quantum = quantum + if quantum: + aqua_globals.random_seed = datetime.now().microsecond + self.log("seed = ", aqua_globals.random_seed) + if qAccountToken: + self.log("Real quantum computation will be performed") + IBMQ.delete_account() + IBMQ.save_account(qAccountToken) + IBMQ.load_account() + self.log("Getting provider...") + self.provider = IBMQ.get_provider(hub='ibm-q') + else: + self.log("Quantum simulation will be performed") + self.backend = BasicAer.get_backend('qasm_simulator') + else: + self.log("Classical SVM will be performed") + + def log(self, *values): + if self.verbose: + print("[QClass] ", *values) + + def vectorize(self, X): + vector = X.reshape(len(X), self.feature_dim) + return [self.processVector(x) for x in vector] + + def splitTargetAndNonTarget(self, X, y): + self.log("""[Warning] Spitting target from non target. + Only binary classification is supported.""") + nbSensor = len(X[0]) + try: + nbSamples = len(X[0][0]) + except Exception: + nbSamples = 1 + self.feature_dim = nbSensor * nbSamples + self.log("Feature dimension = ", self.feature_dim) + Xta = X[y == self.target] + Xnt = X[np.logical_not(y == self.target)] + VectorizedXta = self.vectorize(Xta) + VectorizedXnt = self.vectorize(Xnt) + self.new_feature_dim = len(VectorizedXta[0]) + self.log("Feature dimension after vector processing = ", + self.new_feature_dim) + return (VectorizedXta, VectorizedXnt) + + def additionnal_setup(self): + self.log("There is no additional setup.") + + def fit(self, X, y): + self.log("Fitting: ", X.shape) + self.prev_fit_params = {"X": X, "y": y} + self.classes_ = np.unique(y) + VectorizedXta, VectorizedXnt = self.splitTargetAndNonTarget(X, y) + + self.training_input["Target"] = VectorizedXta + self.training_input["NonTarget"] = VectorizedXnt + self.log(get_feature_dimension(self.training_input)) + feature_dim = get_feature_dimension(self.training_input) + self.feature_map = ZZFeatureMap(feature_dimension=feature_dim, reps=2, + entanglement='linear') + self.additionnal_setup() + if self.quantum: + if not hasattr(self, "backend"): + def filters(device): + return ( + device.configuration().n_qubits >= self.new_feature_dim + and not device.configuration().simulator + and device.status().operational) + devices = self.provider.backends(filters=filters) + try: + self.backend = least_busy(devices) + except Exception: + self.log("Devices are all busy. Getting the first one...") + self.backend = devices[0] + self.log("Quantum backend = ", self.backend) + seed_sim = aqua_globals.random_seed + seed_trans = aqua_globals.random_seed + self.quantum_instance = QuantumInstance(self.backend, shots=1024, + seed_simulator=seed_sim, + seed_transpiler=seed_trans) + return self + + def get_params(self, deep=True): + # Class is re-instanciated for each fold of a cv pipeline. + # Deep copy of the original instance is insure trough this method + # and the pending one set_params + return { + "target": self.target, + "qAccountToken": self.qAccountToken, + "quantum": self.quantum, + "processVector": self.processVector, + "verbose": self.verbose, + "test_input": self.test_input, + } + + def set_params(self, **parameters): + for parameter, value in parameters.items(): + setattr(self, parameter, value) + return self + + def run(self, predict_set=None): + raise Exception("Run method was not implemented") + + def self_calibration(self): + X = self.prev_fit_params["X"] + y = self.prev_fit_params["y"] + test_per = 0.33 + self.log("Test size = ", test_per, " of previous fitting.") + X_train, X_test, y_train, y_test = train_test_split(X, y, + test_size=test_per) + self.fit(X_train, y_train) + self.score(X_test, y_test) + + def predict(self, X): + if(len(self.test_input) == 0): + self.log("There is no test inputs. Self-calibrating...") + self.self_calibration() + result = None + predict_set = self.vectorize(X) + self.log("Prediction: ", X.shape) + result = self.run(predict_set) + self.log("Prediction finished. Returning predicted labels") + return result["predicted_labels"] + + def predict_proba(self, X): + self.log("""[WARNING] SVM prediction probabilities are not available. + Results from predict will be used instead.""") + predicted_labels = self.predict(X) + ret = [np.array([c == 0, c == 1]) for c in predicted_labels] + return np.array(ret) + + def score(self, X, y): + self.log("Scoring: ", X.shape) + VectorizedXta, VectorizedXnt = self.splitTargetAndNonTarget(X, y) + self.test_input = {} + self.test_input["Target"] = VectorizedXta + self.test_input["NonTarget"] = VectorizedXnt + result = self.run() + balanced_accuracy = result["testing_accuracy"] + self.log("Balanced accuracy = ", balanced_accuracy) + return balanced_accuracy + + +class QuanticSVM(QuanticBase): + + """Quantum-enhanced SVM classification. + + This class implements SVC on a quantum machine. + Note if `quantum` parameter is set to `False` + then a classical SVC will be perfomed instead. + + See Also + -------- + QuanticBase + + """ + + def run(self, predict_set=None): + self.log("SVM classification running...") + if self.quantum: + self.log("Quantum instance is ", self.quantum_instance) + qsvm = QSVM(self.feature_map, self.training_input, + self.test_input, predict_set) + result = qsvm.run(self.quantum_instance) + else: + result = SklearnSVM(self.training_input, + self.test_input, predict_set).run() + self.log(result) + return result + + +class QuanticVQC(QuanticBase): + + """Variational Quantum Classifier + + Note there is no classical version of this algorithm. + This will always run on a quantum computer (simulated or not) + + Parameters + ---------- + target : see QuanticBase + qAccountToken : see QuanticBase + processVector : see QuanticBase + verbose : see QuanticBase + parameters : see QuanticBase + + Attributes + ---------- + optimizer: SPSA + SPSA is a descent method capable of finding global minima + https://qiskit.org/documentation/stubs/qiskit.aqua.components.optimizers.SPSA.html + var_form: TwoLocal + In quantum mechanics, the variational method is one way of finding + approximations to the lowest energy eigenstate + https://qiskit.org/documentation/apidoc/qiskit.aqua.components.variational_forms.html + + See Also + -------- + QuanticBase + + """ + + def __init__(self, target, qAccountToken=None, + processVector=lambda v: v, verbose=True, **parameters): + QuanticBase.__init__(self, target=target, qAccountToken=qAccountToken, + processVector=processVector, verbose=verbose, + **parameters) + + def additionnal_setup(self): + self.optimizer = SPSA(maxiter=40, c0=4.0, skip_calibration=True) + self.var_form = TwoLocal(self.new_feature_dim, + ['ry', 'rz'], 'cz', reps=3) + + def run(self, predict_set=None): + self.log("VQC classification running...") + vqc = VQC(self.optimizer, self.feature_map, self.var_form, + self.training_input, self.test_input, predict_set) + result = vqc.run(self.quantum_instance) + return result diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..c6b42198 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +cython +pyriemann +qiskit==0.20.0 +cvxpy==1.1.12 \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..3f7d987b --- /dev/null +++ b/setup.cfg @@ -0,0 +1,13 @@ +[bdist_wheel] +# This flag says that the code is written to work on both Python 2 and Python +# 3. If at all possible, it is good practice to do this. If you cannot, you +# will need to generate wheels for each Python version that you support. +universal=1 + +[build_sphinx] +source-dir = doc/ +build-dir = doc/build +all_files = 1 + +[upload_sphinx] +upload-dir = doc/build/html diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..45c21554 --- /dev/null +++ b/setup.py @@ -0,0 +1,40 @@ +import os.path as op + +from setuptools import setup, find_packages + + +# get the version (don't import mne here, so dependencies are not needed) +version = None +with open(op.join('pyriemann_qiskit', '_version.py'), 'r') as fid: + for line in (line.strip() for line in fid): + if line.startswith('__version__'): + version = line.split('=')[1].strip().strip('\'') + break +if version is None: + raise RuntimeError('Could not determine version') + +with open('README.md', 'r', encoding="utf8") as fid: + long_description = fid.read() + +setup(name='pyriemann-qiskit', + version=version, + description='Qiskit wrapper for pyRiemann', + url='https://pyriemann.readthedocs.io', + author='Alexandre Barachant', + author_email='alexandre.barachant@gmail.com', + license='BSD (3-clause)', + packages=find_packages(), + long_description=long_description, + long_description_content_type='text/markdown', + project_urls={ + 'Documentation': 'https://pyriemann.readthedocs.io', + 'Source': 'https://github.com/pyRiemann/pyRiemann-qiskit', + 'Tracker': 'https://github.com/pyRiemann/pyRiemann-qiskit/issues/', + }, + platforms='any', + python_requires=">=3.6", + install_requires=['cython', 'pyriemann', 'qiskit==0.20.0', 'cvxpy==1.1.12'], + extras_require={'docs': ['sphinx-gallery', 'sphinx-bootstrap_theme', 'numpydoc', 'mne', 'seaborn'], + 'tests': ['pytest', 'seaborn', 'flake8']}, + zip_safe=False, +) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..7ecb2226 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,200 @@ +import pytest +from pytest import approx +import numpy as np +from functools import partial + + +def requires_module(function, name, call=None): + """Skip a test if package is not available (decorator).""" + call = ("import %s" % name) if call is None else call + reason = "Test %s skipped, requires %s." % (function.__name__, name) + try: + exec(call) in globals(), locals() + except Exception as exc: + if len(str(exc)) > 0 and str(exc) != "No module named %s" % name: + reason += " Got exception (%s)" % (exc,) + skip = True + else: + skip = False + return pytest.mark.skipif(skip, reason=reason)(function) + + +requires_matplotlib = partial(requires_module, name="matplotlib") +requires_seaborn = partial(requires_module, name="seaborn") + + +def generate_cov(n_trials, n_channels, rs, return_params=False): + """Generate a set of covariances matrices for test purpose""" + diags = 2.0 + 0.1 * rs.randn(n_trials, n_channels) + A = 2 * rs.rand(n_channels, n_channels) - 1 + A /= np.linalg.norm(A, axis=1)[:, np.newaxis] + covmats = np.empty((n_trials, n_channels, n_channels)) + for i in range(n_trials): + covmats[i] = A @ np.diag(diags[i]) @ A.T + if return_params: + return covmats, diags, A + else: + return covmats + + +@pytest.fixture +def rndstate(): + return np.random.RandomState(1234) + + +@pytest.fixture +def get_covmats(rndstate): + def _gen_cov(n_trials, n_chan): + return generate_cov(n_trials, n_chan, rndstate, return_params=False) + + return _gen_cov + + +@pytest.fixture +def get_covmats_params(rndstate): + def _gen_cov_params(n_trials, n_chan): + return generate_cov(n_trials, n_chan, rndstate, return_params=True) + + return _gen_cov_params + + +@pytest.fixture +def get_labels(): + def _get_labels(n_trials, n_classes): + return np.arange(n_classes).repeat(n_trials // n_classes) + + return _get_labels + + +def is_positive_semi_definite(X): + """Check if all matrices are positive semi-definite. + + Parameters + ---------- + X : ndarray, shape (..., n, n) + The set of square matrices, at least 2D ndarray. + + Returns + ------- + ret : boolean + True if all matrices are positive semi-definite. + """ + cs = X.shape[-1] + return np.all(np.linalg.eigvals(X.reshape((-1, cs, cs))) >= 0.0) + + +def is_positive_definite(X): + """Check if all matrices are positive definite. + + Parameters + ---------- + X : ndarray, shape (..., n, n) + The set of square matrices, at least 2D ndarray. + + Returns + ------- + ret : boolean + True if all matrices are positive definite. + """ + cs = X.shape[-1] + return np.all(np.linalg.eigvals(X.reshape((-1, cs, cs))) > 0.0) + + +def is_symmetric(X): + """Check if all matrices are symmetric. + + Parameters + ---------- + X : ndarray, shape (..., n, n) + The set of square matrices, at least 2D ndarray. + + Returns + ------- + ret : boolean + True if all matrices are symmetric. + """ + return X == approx(np.swapaxes(X, -2, -1)) + + +@pytest.fixture +def is_spd(): + """Check if all matrices are symmetric positive-definite. + + Parameters + ---------- + X : ndarray, shape (..., n, n) + The set of square matrices, at least 2D ndarray. + + Returns + ------- + ret : boolean + True if all matrices are symmetric positive-definite. + """ + + def _is_spd(X): + return is_symmetric(X) and is_positive_definite(X) + + return _is_spd + + +@pytest.fixture +def is_spsd(): + """Check if all matrices are symmetric positive semi-definite. + + Parameters + ---------- + X : ndarray, shape (..., n, n) + The set of square matrices, at least 2D ndarray. + + Returns + ------- + ret : boolean + True if all matrices are symmetric positive semi-definite. + """ + + def _is_spsd(X): + return is_symmetric(X) and is_positive_semi_definite(X) + + return _is_spsd + + +def get_distances(): + distances = [ + "riemann", + "logeuclid", + "euclid", + "logdet", + "kullback", + "kullback_right", + "kullback_sym", + ] + for dist in distances: + yield dist + + +def get_means(): + means = [ + "riemann", + "logeuclid", + "euclid", + "logdet", + "identity", + "wasserstein", + "ale", + "harmonic", + "kullback_sym", + ] + for mean in means: + yield mean + + +def get_metrics(): + metrics = [ + "riemann", + "logeuclid", + "euclid", + "logdet", + "kullback_sym", + ] + for met in metrics: + yield met diff --git a/tests/test_classification.py b/tests/test_classification.py new file mode 100644 index 00000000..29cb5c57 --- /dev/null +++ b/tests/test_classification.py @@ -0,0 +1,148 @@ +import numpy as np +from pyriemann_qiskit.classification import (QuanticSVM, QuanticVQC) + + +def test_Quantic_init(): + """Test init of quantum classifiers""" + # if "classical" computation enable, + # no provider and backend should be defined + q = QuanticSVM(target=1, quantum=False) + assert(not q.quantum) + assert(not hasattr(q, "backend")) + assert(not hasattr(q, "provider")) + # if "quantum" computation enabled, but no accountToken are provided, + # then "quantum" simulation will be enabled + # i.e., no remote quantum provider will be defined + q = QuanticSVM(target=1, quantum=True) + assert(q.quantum) + assert(hasattr(q, "backend")) + assert(not hasattr(q, "provider")) + # if "quantum" computation enabled, and accountToken is provided, + # then real quantum backend is used + # this should raise a error as uncorrect API Token is passed + try: + q = QuanticSVM(target=1, quantum=True, qAccountToken="Test") + assert(False) # Should never reach this line + except Exception: + pass + + +def test_Quantic_splitTargetAndNonTarget(get_covmats): + """Test splitTargetAndNonTarget method of quantum classifiers""" + covset = get_covmats(100, 3) + labels = np.array([0, 1]).repeat(50) + q = QuanticSVM(target=1, quantum=False) + xta, xnt = q.splitTargetAndNonTarget(covset, labels) + assert(len(xta) == 50) + # Covariance matrices should be vectorized + assert(len(xta[0]) == 3 * 3) + assert(len(xnt) == 50) + assert(len(xnt[0]) == 3 * 3) + + +def test_Quantic_SelfCalibration(get_covmats): + """Test self_calibration method of quantum classifiers""" + covset = get_covmats(100, 3) + labels = np.array([0, 1]).repeat(50) + q = QuanticSVM(target=1, quantum=False) + q.fit(covset, labels) + test_size = 0.33 # internal setting to self_calibration method + len_test = int(test_size * 100) + # Just using a little trick as fit and score method are + # called by self_calibration method + + def fit(X_train, y_train): + assert(len(X_train) == 100 - len_test) + assert(len(y_train) == 100 - len_test) + # Covariances matrices of fit and score method + # should always be non-vectorized + assert(len(X_train[0]) == 3) + assert(len(X_train[0][0]) == 3) + + def score(X_test, y_test): + assert(len(X_test) == len_test) + assert(len(y_test) == len_test) + assert(len(X_test[0]) == 3) + assert(len(X_test[0][0]) == 3) + q.fit = fit + q.score = score + q.self_calibration() + + +def test_Quantic_FVT_Classical(): + """ Perform standard SVC test + (canary test to assess pipeline correctness) + """ + # When quantum=False, it should use + # classical SVC implementation from SKlearn + q = QuanticSVM(target=1, quantum=False, verbose=False) + # We need to have different values for target and non-target in our covset + # or vector machine will not converge + iNt = 75 + iTa = 25 + nt = np.zeros((iNt, 3, 3)) + ta = np.ones((iTa, 3, 3)) + covset = np.concatenate((nt, ta), axis=0) + labels = np.concatenate((np.array([0]*75), np.array([1]*25)), axis=0) + q.fit(covset, labels) + # This will autodefine testing sets + prediction = q.predict(covset) + # In this case, using SVM, predicting accuracy should be 100% + assert(prediction[0:iNt].all() == 0) + assert(prediction[iNt:].all() == 1) + + +def test_QuanticSVM_FVT_SimulatedQuantum(): + """Perform SVC on a simulated quantum computer. + This test can also be run on a real computer by providing a qAccountToken + To do so, you need to use your own token, by registering on: + https://quantum-computing.ibm.com/ + Note that the "real quantum version" of this test may also take some time. + """ + # We will use a quantum simulator on the local machine + q = QuanticSVM(target=1, quantum=True, verbose=False) + # We need to have different values for target and non-target in our covset + # or vector machine will not converge + # To achieve testing in a reasonnable amount of time, + # we will lower the size of the feature and the number of trials + iNt = 10 + iTa = 5 + nt = np.zeros((iNt, 2, 2)) + ta = np.ones((iTa, 2, 2)) + covset = np.concatenate((nt, ta), axis=0) + labels = np.concatenate((np.array([0] * iNt), np.array([1] * iTa)), axis=0) + q.fit(covset, labels) + # We are dealing with a small number of trial, + # therefore we will skip self_calibration as it may happens + # that self_calibration select only target or non-target trials + q.test_input = {"Target": [[1, 1, 1, 1]], "NonTarget": [[0, 0, 0, 0]]} + prediction = q.predict(covset) + # In this case, using SVM, predicting accuracy should be 100% + assert(prediction[0:iNt].all() == 0) + assert(prediction[iNt:].all() == 1) + + +def test_QuanticVQC_FVT_SimulatedQuantum(): + """Perform VQC on a simulated quantum computer""" + # We will use a quantum simulator on the local machine + # quantum parameter for VQC is always true + q = QuanticVQC(target=1, verbose=False) + # We need to have different values for target and non-target in our covset + # or vector machine will not converge + # To achieve testing in a reasonnable amount of time, + # we will lower the size of the feature and the number of trials + iNt = 2 + iTa = 2 + nt = np.zeros((iNt, 2, 2)) + ta = np.ones((iTa, 2, 2)) + covset = np.concatenate((nt, ta), axis=0) + labels = np.concatenate((np.array([0] * iNt), np.array([1] * iTa)), axis=0) + q.fit(covset, labels) + # We are dealing with a small number of trial, + # therefore we will skip self_calibration as it may happens that + # self_calibration select only target or non-target trials + q.test_input = {"Target": [[1, 1, 1, 1]], "NonTarget": [[0, 0, 0, 0]]} + prediction = q.predict(covset) + # Considering the inputs, this probably make no sense to test accuracy. + # Instead, we could consider this test as a canary test + assert(len(prediction) == len(labels)) From 812be71c2e729e5f080c2c000b8859554202a6ef Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 19:27:13 +0200 Subject: [PATCH 02/25] separe quantum implementation from repo architecture --- examples/ERP/plot_classify_EEG_quantum.py | 127 --------- pyriemann_qiskit/classification.py | 317 ---------------------- 2 files changed, 444 deletions(-) delete mode 100644 examples/ERP/plot_classify_EEG_quantum.py delete mode 100644 pyriemann_qiskit/classification.py diff --git a/examples/ERP/plot_classify_EEG_quantum.py b/examples/ERP/plot_classify_EEG_quantum.py deleted file mode 100644 index c3f65089..00000000 --- a/examples/ERP/plot_classify_EEG_quantum.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -==================================================================== -ERP EEG decoding with Quantum Classifier. -==================================================================== - -Decoding applied to EEG data in sensor space decomposed using Xdawn. -After spatial filtering, covariances matrices are estimated, then projected in -the tangent space and classified with a quantum classifier - -""" -# Author: Gregoire Cattan -# Modified from plot_classify_EEG_tangentspace.py -# License: BSD (3-clause) - -import numpy as np - -from pyriemann.estimation import XdawnCovariances -from pyriemann.tangentspace import TangentSpace -from pyriemann_qiskit.classification import QuanticSVM - -import mne -from mne import io -from mne.datasets import sample - -from sklearn.pipeline import make_pipeline -from sklearn.model_selection import train_test_split -from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay - -from matplotlib import pyplot as plt - -# cvxpy is not correctly imported due to wheel not building -# in the doc pipeline -__cvxpy__ = True -try: - import cvxpy - del cvxpy -except Exception: - __cvxpy__ = False - -print(__doc__) - -data_path = sample.data_path() - -############################################################################### -# Set parameters and read data -raw_fname = data_path + "/MEG/sample/sample_audvis_filt-0-40_raw.fif" -event_fname = data_path + "/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif" -tmin, tmax = -0.0, 1 -event_id = dict(aud_l=1, aud_r=2, vis_l=3, vis_r=4) - -# Setup for reading the raw data -raw = io.Raw(raw_fname, preload=True, verbose=False) -raw.filter(2, None, method="iir") # replace baselining with high-pass -events = mne.read_events(event_fname) - -raw.info["bads"] = ["MEG 2443"] # set bad channels -picks = mne.pick_types( - raw.info, meg=False, eeg=True, stim=False, eog=False, exclude="bads" -) - -# Read epochs -epochs = mne.Epochs( - raw, - events, - event_id, - tmin, - tmax, - proj=False, - picks=picks, - baseline=None, - preload=True, - verbose=False, -) - -X = epochs.get_data() -y = epochs.events[:, -1] - -# Reduce the number of classes as QuanticBase supports only 2 classes -y[y % 3 == 0] = 0 -y[y % 3 != 0] = 1 - -# Reduce trial number to dimish testing time -X = X[:100] -y = y[:100] - -# ...skipping the KFold validation parts (for the purpose of the test only) -X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3) - -############################################################################### -# Decoding in tangent space with a quantum classifier - -# Time complexity of quantum algorithm depends on the number of trials and -# the number of elements inside the correlation matrices -# Thus we reduce elements number by using restrictive spatial filtering -sf = XdawnCovariances(nfilter=1) - - -# ...and dividing the number of remaining elements by two -def ds(v): - return v[::2] - - -# Projecting correlation matrices into the tangent space -# as quantum algorithms take vectors as inputs -# (If not, then matrices will be inlined inside the quantum classifier) -tg = TangentSpace() - -# Results will be computed for QuanticSVM versus SKLearnSVM for comparison -for quantum in [True, False]: - # This is a hack for the documentation pipeline - if(not __cvxpy__): - continue - - qsvm = QuanticSVM(target=1, verbose=False, - quantum=quantum, processVector=ds) - clf = make_pipeline(sf, tg, qsvm) - clf.fit(X_train, y_train) - y_pred = clf.predict(X_test) - - # Printing the results - acc = np.mean(y_pred == y_test) - print("Classification accuracy: %f " % (acc)) - - names = ['0(quantum)', '1(quantum)'] if quantum else ['0', '1'] - cm = confusion_matrix(y_pred, y_test) - ConfusionMatrixDisplay(cm, display_labels=names).plot() - plt.show() diff --git a/pyriemann_qiskit/classification.py b/pyriemann_qiskit/classification.py deleted file mode 100644 index 210d26e3..00000000 --- a/pyriemann_qiskit/classification.py +++ /dev/null @@ -1,317 +0,0 @@ -"""Module for classification function.""" -import numpy as np - -from sklearn.base import BaseEstimator, ClassifierMixin -from sklearn.model_selection import train_test_split - -from qiskit import BasicAer, IBMQ -from qiskit.circuit.library import ZZFeatureMap, TwoLocal -from qiskit.aqua import QuantumInstance, aqua_globals -from qiskit.aqua.quantum_instance import logger -from qiskit.aqua.algorithms import QSVM, SklearnSVM, VQC -from qiskit.aqua.utils import get_feature_dimension -from qiskit.providers.ibmq import least_busy -from qiskit.aqua.components.optimizers import SPSA -from datetime import datetime -import logging -logger.level = logging.INFO - - -class QuanticBase(BaseEstimator, ClassifierMixin): - - """Quantum classification. - - This class implements a SKLearn wrapper around Qiskit library. - It provides a mean to run classification tasks on a local and - simulated quantum computer or a remote and real quantum computer. - Difference between simulated and real quantum computer will be that: - - There is no noise on a simulated quantum computer (so results are better) - - Real quantum computer are quicker than simulator - - Real quantum computer tasks are assigned to a queue - before being executed on a back-end - - WARNING: At the moment this implementation only supports binary - classification (eg. Target vs Non-Target experiment) - - Parameters - ---------- - target : int - Label of the target symbol - qAccountToken : string (default:None) - If quantum==True and qAccountToken provided, - the classification task will be running on a IBM quantum backend - processVector : lambda vector: processedVector (default) - Additional processing on the input vectors. eg: downsampling - verbose : bool (default:True) - If true will output all intermediate results and logs - quantum : Bool (default:True) - - If true will run on local or remote backend - (depending on qAccountToken value). - - If false, will perform classical computing instead - **parameters : dictionnary - This is used by SKLearn with get_params and set_params method - in order to create a deepcopy of an instance - - Attributes - ---------- - classes_ : list - list of classes. - verbose : see above - processVector : see above - qAccountToken : see above - target : see above - quantum : see above - test_input : Dictionnary - Contains vectorized test set for target and non-target classes - training_input : Dictionnary - Contains vectorized training set for target and non-target classes - provider : IBMQ Provider - This service provide a remote quantum computer backend - backend : Quantum computer or simulator - feature_dim : int - Size of the vectorized matrix which is passed to quantum classifier - new_feature_dim : int - Feature dimension after proccessed by `processVector` lambda - prev_fit_params : Dictionnary of data and labels - Keep in memory data and labels passed to fit method. - This is used for self-calibration. - feature_map: ZZFeatureMap - Transform data into quantum space - quantum_instance: QuantumInstance (Object) - Backend with specific parameters (number of shots, etc.) - - See Also - -------- - QuanticSVM - QuanticVQC - - """ - - def __init__(self, target, qAccountToken=None, quantum=True, - processVector=lambda v: v, verbose=True, **parameters): - self.verbose = verbose - self.log("Initializing Quantum Classifier") - self.test_input = {} - self.set_params(**parameters) - self.processVector = processVector - self.qAccountToken = qAccountToken - self.training_input = {} - self.target = target - self.quantum = quantum - if quantum: - aqua_globals.random_seed = datetime.now().microsecond - self.log("seed = ", aqua_globals.random_seed) - if qAccountToken: - self.log("Real quantum computation will be performed") - IBMQ.delete_account() - IBMQ.save_account(qAccountToken) - IBMQ.load_account() - self.log("Getting provider...") - self.provider = IBMQ.get_provider(hub='ibm-q') - else: - self.log("Quantum simulation will be performed") - self.backend = BasicAer.get_backend('qasm_simulator') - else: - self.log("Classical SVM will be performed") - - def log(self, *values): - if self.verbose: - print("[QClass] ", *values) - - def vectorize(self, X): - vector = X.reshape(len(X), self.feature_dim) - return [self.processVector(x) for x in vector] - - def splitTargetAndNonTarget(self, X, y): - self.log("""[Warning] Spitting target from non target. - Only binary classification is supported.""") - nbSensor = len(X[0]) - try: - nbSamples = len(X[0][0]) - except Exception: - nbSamples = 1 - self.feature_dim = nbSensor * nbSamples - self.log("Feature dimension = ", self.feature_dim) - Xta = X[y == self.target] - Xnt = X[np.logical_not(y == self.target)] - VectorizedXta = self.vectorize(Xta) - VectorizedXnt = self.vectorize(Xnt) - self.new_feature_dim = len(VectorizedXta[0]) - self.log("Feature dimension after vector processing = ", - self.new_feature_dim) - return (VectorizedXta, VectorizedXnt) - - def additionnal_setup(self): - self.log("There is no additional setup.") - - def fit(self, X, y): - self.log("Fitting: ", X.shape) - self.prev_fit_params = {"X": X, "y": y} - self.classes_ = np.unique(y) - VectorizedXta, VectorizedXnt = self.splitTargetAndNonTarget(X, y) - - self.training_input["Target"] = VectorizedXta - self.training_input["NonTarget"] = VectorizedXnt - self.log(get_feature_dimension(self.training_input)) - feature_dim = get_feature_dimension(self.training_input) - self.feature_map = ZZFeatureMap(feature_dimension=feature_dim, reps=2, - entanglement='linear') - self.additionnal_setup() - if self.quantum: - if not hasattr(self, "backend"): - def filters(device): - return ( - device.configuration().n_qubits >= self.new_feature_dim - and not device.configuration().simulator - and device.status().operational) - devices = self.provider.backends(filters=filters) - try: - self.backend = least_busy(devices) - except Exception: - self.log("Devices are all busy. Getting the first one...") - self.backend = devices[0] - self.log("Quantum backend = ", self.backend) - seed_sim = aqua_globals.random_seed - seed_trans = aqua_globals.random_seed - self.quantum_instance = QuantumInstance(self.backend, shots=1024, - seed_simulator=seed_sim, - seed_transpiler=seed_trans) - return self - - def get_params(self, deep=True): - # Class is re-instanciated for each fold of a cv pipeline. - # Deep copy of the original instance is insure trough this method - # and the pending one set_params - return { - "target": self.target, - "qAccountToken": self.qAccountToken, - "quantum": self.quantum, - "processVector": self.processVector, - "verbose": self.verbose, - "test_input": self.test_input, - } - - def set_params(self, **parameters): - for parameter, value in parameters.items(): - setattr(self, parameter, value) - return self - - def run(self, predict_set=None): - raise Exception("Run method was not implemented") - - def self_calibration(self): - X = self.prev_fit_params["X"] - y = self.prev_fit_params["y"] - test_per = 0.33 - self.log("Test size = ", test_per, " of previous fitting.") - X_train, X_test, y_train, y_test = train_test_split(X, y, - test_size=test_per) - self.fit(X_train, y_train) - self.score(X_test, y_test) - - def predict(self, X): - if(len(self.test_input) == 0): - self.log("There is no test inputs. Self-calibrating...") - self.self_calibration() - result = None - predict_set = self.vectorize(X) - self.log("Prediction: ", X.shape) - result = self.run(predict_set) - self.log("Prediction finished. Returning predicted labels") - return result["predicted_labels"] - - def predict_proba(self, X): - self.log("""[WARNING] SVM prediction probabilities are not available. - Results from predict will be used instead.""") - predicted_labels = self.predict(X) - ret = [np.array([c == 0, c == 1]) for c in predicted_labels] - return np.array(ret) - - def score(self, X, y): - self.log("Scoring: ", X.shape) - VectorizedXta, VectorizedXnt = self.splitTargetAndNonTarget(X, y) - self.test_input = {} - self.test_input["Target"] = VectorizedXta - self.test_input["NonTarget"] = VectorizedXnt - result = self.run() - balanced_accuracy = result["testing_accuracy"] - self.log("Balanced accuracy = ", balanced_accuracy) - return balanced_accuracy - - -class QuanticSVM(QuanticBase): - - """Quantum-enhanced SVM classification. - - This class implements SVC on a quantum machine. - Note if `quantum` parameter is set to `False` - then a classical SVC will be perfomed instead. - - See Also - -------- - QuanticBase - - """ - - def run(self, predict_set=None): - self.log("SVM classification running...") - if self.quantum: - self.log("Quantum instance is ", self.quantum_instance) - qsvm = QSVM(self.feature_map, self.training_input, - self.test_input, predict_set) - result = qsvm.run(self.quantum_instance) - else: - result = SklearnSVM(self.training_input, - self.test_input, predict_set).run() - self.log(result) - return result - - -class QuanticVQC(QuanticBase): - - """Variational Quantum Classifier - - Note there is no classical version of this algorithm. - This will always run on a quantum computer (simulated or not) - - Parameters - ---------- - target : see QuanticBase - qAccountToken : see QuanticBase - processVector : see QuanticBase - verbose : see QuanticBase - parameters : see QuanticBase - - Attributes - ---------- - optimizer: SPSA - SPSA is a descent method capable of finding global minima - https://qiskit.org/documentation/stubs/qiskit.aqua.components.optimizers.SPSA.html - var_form: TwoLocal - In quantum mechanics, the variational method is one way of finding - approximations to the lowest energy eigenstate - https://qiskit.org/documentation/apidoc/qiskit.aqua.components.variational_forms.html - - See Also - -------- - QuanticBase - - """ - - def __init__(self, target, qAccountToken=None, - processVector=lambda v: v, verbose=True, **parameters): - QuanticBase.__init__(self, target=target, qAccountToken=qAccountToken, - processVector=processVector, verbose=verbose, - **parameters) - - def additionnal_setup(self): - self.optimizer = SPSA(maxiter=40, c0=4.0, skip_calibration=True) - self.var_form = TwoLocal(self.new_feature_dim, - ['ry', 'rz'], 'cz', reps=3) - - def run(self, predict_set=None): - self.log("VQC classification running...") - vqc = VQC(self.optimizer, self.feature_map, self.var_form, - self.training_input, self.test_input, predict_set) - result = vqc.run(self.quantum_instance) - return result From d4317c3d83214f6170e080e9f6823f5aacdf0e5a Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 19:29:03 +0200 Subject: [PATCH 03/25] remove tests --- tests/test_classification.py | 148 ----------------------------------- 1 file changed, 148 deletions(-) delete mode 100644 tests/test_classification.py diff --git a/tests/test_classification.py b/tests/test_classification.py deleted file mode 100644 index 29cb5c57..00000000 --- a/tests/test_classification.py +++ /dev/null @@ -1,148 +0,0 @@ -import numpy as np -from pyriemann_qiskit.classification import (QuanticSVM, QuanticVQC) - - -def test_Quantic_init(): - """Test init of quantum classifiers""" - # if "classical" computation enable, - # no provider and backend should be defined - q = QuanticSVM(target=1, quantum=False) - assert(not q.quantum) - assert(not hasattr(q, "backend")) - assert(not hasattr(q, "provider")) - # if "quantum" computation enabled, but no accountToken are provided, - # then "quantum" simulation will be enabled - # i.e., no remote quantum provider will be defined - q = QuanticSVM(target=1, quantum=True) - assert(q.quantum) - assert(hasattr(q, "backend")) - assert(not hasattr(q, "provider")) - # if "quantum" computation enabled, and accountToken is provided, - # then real quantum backend is used - # this should raise a error as uncorrect API Token is passed - try: - q = QuanticSVM(target=1, quantum=True, qAccountToken="Test") - assert(False) # Should never reach this line - except Exception: - pass - - -def test_Quantic_splitTargetAndNonTarget(get_covmats): - """Test splitTargetAndNonTarget method of quantum classifiers""" - covset = get_covmats(100, 3) - labels = np.array([0, 1]).repeat(50) - q = QuanticSVM(target=1, quantum=False) - xta, xnt = q.splitTargetAndNonTarget(covset, labels) - assert(len(xta) == 50) - # Covariance matrices should be vectorized - assert(len(xta[0]) == 3 * 3) - assert(len(xnt) == 50) - assert(len(xnt[0]) == 3 * 3) - - -def test_Quantic_SelfCalibration(get_covmats): - """Test self_calibration method of quantum classifiers""" - covset = get_covmats(100, 3) - labels = np.array([0, 1]).repeat(50) - q = QuanticSVM(target=1, quantum=False) - q.fit(covset, labels) - test_size = 0.33 # internal setting to self_calibration method - len_test = int(test_size * 100) - # Just using a little trick as fit and score method are - # called by self_calibration method - - def fit(X_train, y_train): - assert(len(X_train) == 100 - len_test) - assert(len(y_train) == 100 - len_test) - # Covariances matrices of fit and score method - # should always be non-vectorized - assert(len(X_train[0]) == 3) - assert(len(X_train[0][0]) == 3) - - def score(X_test, y_test): - assert(len(X_test) == len_test) - assert(len(y_test) == len_test) - assert(len(X_test[0]) == 3) - assert(len(X_test[0][0]) == 3) - q.fit = fit - q.score = score - q.self_calibration() - - -def test_Quantic_FVT_Classical(): - """ Perform standard SVC test - (canary test to assess pipeline correctness) - """ - # When quantum=False, it should use - # classical SVC implementation from SKlearn - q = QuanticSVM(target=1, quantum=False, verbose=False) - # We need to have different values for target and non-target in our covset - # or vector machine will not converge - iNt = 75 - iTa = 25 - nt = np.zeros((iNt, 3, 3)) - ta = np.ones((iTa, 3, 3)) - covset = np.concatenate((nt, ta), axis=0) - labels = np.concatenate((np.array([0]*75), np.array([1]*25)), axis=0) - q.fit(covset, labels) - # This will autodefine testing sets - prediction = q.predict(covset) - # In this case, using SVM, predicting accuracy should be 100% - assert(prediction[0:iNt].all() == 0) - assert(prediction[iNt:].all() == 1) - - -def test_QuanticSVM_FVT_SimulatedQuantum(): - """Perform SVC on a simulated quantum computer. - This test can also be run on a real computer by providing a qAccountToken - To do so, you need to use your own token, by registering on: - https://quantum-computing.ibm.com/ - Note that the "real quantum version" of this test may also take some time. - """ - # We will use a quantum simulator on the local machine - q = QuanticSVM(target=1, quantum=True, verbose=False) - # We need to have different values for target and non-target in our covset - # or vector machine will not converge - # To achieve testing in a reasonnable amount of time, - # we will lower the size of the feature and the number of trials - iNt = 10 - iTa = 5 - nt = np.zeros((iNt, 2, 2)) - ta = np.ones((iTa, 2, 2)) - covset = np.concatenate((nt, ta), axis=0) - labels = np.concatenate((np.array([0] * iNt), np.array([1] * iTa)), axis=0) - q.fit(covset, labels) - # We are dealing with a small number of trial, - # therefore we will skip self_calibration as it may happens - # that self_calibration select only target or non-target trials - q.test_input = {"Target": [[1, 1, 1, 1]], "NonTarget": [[0, 0, 0, 0]]} - prediction = q.predict(covset) - # In this case, using SVM, predicting accuracy should be 100% - assert(prediction[0:iNt].all() == 0) - assert(prediction[iNt:].all() == 1) - - -def test_QuanticVQC_FVT_SimulatedQuantum(): - """Perform VQC on a simulated quantum computer""" - # We will use a quantum simulator on the local machine - # quantum parameter for VQC is always true - q = QuanticVQC(target=1, verbose=False) - # We need to have different values for target and non-target in our covset - # or vector machine will not converge - # To achieve testing in a reasonnable amount of time, - # we will lower the size of the feature and the number of trials - iNt = 2 - iTa = 2 - nt = np.zeros((iNt, 2, 2)) - ta = np.ones((iTa, 2, 2)) - covset = np.concatenate((nt, ta), axis=0) - labels = np.concatenate((np.array([0] * iNt), np.array([1] * iTa)), axis=0) - q.fit(covset, labels) - # We are dealing with a small number of trial, - # therefore we will skip self_calibration as it may happens that - # self_calibration select only target or non-target trials - q.test_input = {"Target": [[1, 1, 1, 1]], "NonTarget": [[0, 0, 0, 0]]} - prediction = q.predict(covset) - # Considering the inputs, this probably make no sense to test accuracy. - # Instead, we could consider this test as a canary test - assert(len(prediction) == len(labels)) From 670aebc4edd41b22038f572c2b210814acc0a5c2 Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 20:42:40 +0200 Subject: [PATCH 04/25] add pyriemann to doc requirements --- doc/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/requirements.txt b/doc/requirements.txt index 1645b2da..7b11425e 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -8,3 +8,4 @@ scikit-learn joblib pandas qiskit +pyriemann From 8b2058bc7b4ce156ac1fab8a78cfcd40a78e5793 Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 21:19:41 +0200 Subject: [PATCH 05/25] Thank you Microsoff >< --- .github/workflows/testing.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 7350777f..c700d96b 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -5,9 +5,9 @@ name: testing on: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master ] + branches: [ main ] jobs: build: From 8199032e5be6323d844c53e42aa956d697298252 Mon Sep 17 00:00:00 2001 From: gcattan Date: Sat, 25 Sep 2021 21:26:09 +0200 Subject: [PATCH 06/25] update doc --- doc/whatsnew.rst | 107 +---------------------------------------------- 1 file changed, 2 insertions(+), 105 deletions(-) diff --git a/doc/whatsnew.rst b/doc/whatsnew.rst index 2c8ebc0b..f48871be 100644 --- a/doc/whatsnew.rst +++ b/doc/whatsnew.rst @@ -1,114 +1,11 @@ .. _whatsnew: -.. currentmodule:: pyriemann +.. currentmodule:: pyriemann-qiskit What's new in the package ========================= A catalog of new features, improvements, and bug-fixes in each release. -v0.2.8.dev +v0.0.0.dev ---------- - -- Correct spectral estimation in :func:`pyriemann.utils.covariance.cross_spectrum` to obtain equivalence with SciPy - -- Add instantaneous, lagged and imaginary coherences in :func:`pyriemann.utils.covariance.coherence` and :class:`pyriemann.estimation.Coherences` - -- Add ``partial_fit`` in :class:`pyriemann.clustering.Potato`, useful for an online update; and update example on artifact detection. - -- Deprecate :func:`pyriemann.utils.viz.plot_confusion_matrix` as sklearn integrate its own version. - -- Add Ando-Li-Mathias mean estimation in :func:`pyriemann.utils.mean.mean_covariance` - -- Add Schaefer-Strimmer covariance estimator in :func:`pyriemann.utils.covariance.covariances`, and an example to compare estimators - -- Refactor tests + fix refit of :class:`pyriemann.tangentspace.TangentSpace` - -v0.2.7 (June 2021) ------------------- - -- Add example on SSVEP classification - -- Fix compatibility with scikit-learn v0.24 - -- Correct probas of :class:`pyriemann.classification.MDM` - -- Add ``predict_proba`` for :class:`pyriemann.clustering.Potato`, and an example on artifact detection - -- Add weights to Pham's AJD algorithm :func:`pyriemann.utils.ajd.ajd_pham` - -- Add :func:`pyriemann.utils.covariance.cross_spectrum`, fix :func:`pyriemann.utils.covariance.cospectrum`; :func:`pyriemann.utils.covariance.coherence` output is kept unchanged - -- Add :class:`pyriemann.spatialfilters.AJDC` for BSS and gBSS, with an example on artifact correction - -- Add :class:`pyriemann.preprocessing.Whitening`, with optional dimension reduction - -v0.2.6 (March 2020) -------------------- - -- Updated for better Scikit-Learn v0.22 support - -v0.2.5 (January 2018) ---------------------- - -- Added BilinearFilter - -- Added a permutation test for generic scikit-learn estimator - -- Stats module refactoring, with distance based t-test and f-test - -- Removed two way permutation test - -- Added FlatChannelRemover - -- Support for python 3.5 in travis - -- Added Shrinkage transformer - -- Added Coherences transformer - -- Added Embedding class. - -v0.2.4 (June 2016) ------------------- - -- Improved documentation - -- Added TSclassifier for out-of the box tangent space classification. - -- Added Wasserstein distance and mean. - -- Added NearestNeighbor classifier. - -- Added Softmax probabilities for MDM. - -- Added CSP for covariance matrices. - -- Added Approximate Joint diagonalization algorithms (JADE, PHAM, UWEDGE). - -- Added ALE mean. - -- Added Multiclass CSP. - -- API: param name changes in `CospCovariances` to comply to Scikit-Learn. - -- API: attributes name changes in most modules to comply to the Scikit-Learn naming convention. - -- Added `HankelCovariances` estimation - -- Added `SPoC` spatial filtering - -- Added Harmonic mean - -- Added Kullback leibler mean - -v0.2.3 (November 2015) ----------------------- - -- Added multiprocessing for MDM with joblib. - -- Added kullback-leibler divergence. - -- Added Riemannian Potato. - -- Added sample_weight for mean estimation and MDM. From 6ae5407774a49099d2fc6982ee8792a9896b9b01 Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 22:27:27 +0200 Subject: [PATCH 07/25] Update doc/conf.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Quentin Barthélemy --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index c91ebb21..6bb08e37 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -83,7 +83,7 @@ master_doc = 'index' # General information about the project. -project = u'pyRiemann' +project = u'pyRiemann-qiskit' copyright = u'2015-2021, PyRiemann Contributors' author = u'Alexandre Barachant' From 8bab3f79dd861a5aaad48ef73481731e9353c57e Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 22:27:41 +0200 Subject: [PATCH 08/25] Update doc/index.rst MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Quentin Barthélemy --- doc/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/index.rst b/doc/index.rst index 99547c57..e073672d 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -19,7 +19,7 @@ -pyRiemann: Biosignals classification with Riemannian Geometry +pyRiemann-qiskit: Qiskit wrapper for pyRiemann ============================================================= .. raw:: html From 5d1a8936e72aebdff2b4b0b3738b5e8c9f68a565 Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 22:28:00 +0200 Subject: [PATCH 09/25] Update doc/introduction.rst MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Quentin Barthélemy --- doc/introduction.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/introduction.rst b/doc/introduction.rst index d1d6a62c..f7d21aed 100644 --- a/doc/introduction.rst +++ b/doc/introduction.rst @@ -1,4 +1,4 @@ .. _introduction: -Introduction to pyRiemann +Introduction to pyRiemann-qiskit ========================= From 871a857a2d0a30197c214e8f5af6f1bf2bbc7183 Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 22:51:13 +0200 Subject: [PATCH 10/25] remove readme --- examples/ERP/README.txt | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 examples/ERP/README.txt diff --git a/examples/ERP/README.txt b/examples/ERP/README.txt deleted file mode 100644 index 2372c943..00000000 --- a/examples/ERP/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -Classification of ERP ---------------------- - -Event related potential classification with RG. From 60d6eff3c2b647e3026903f4e44ae094af6d1ce1 Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 22:55:34 +0200 Subject: [PATCH 11/25] update authorship --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 45c21554..baf9764a 100644 --- a/setup.py +++ b/setup.py @@ -20,8 +20,8 @@ version=version, description='Qiskit wrapper for pyRiemann', url='https://pyriemann.readthedocs.io', - author='Alexandre Barachant', - author_email='alexandre.barachant@gmail.com', + author='Gregoire Cattan', + author_email='gcattan@hotmail.com', license='BSD (3-clause)', packages=find_packages(), long_description=long_description, From be97574680a05d371a2bf077104db7a1420ec4bf Mon Sep 17 00:00:00 2001 From: gcattan Date: Mon, 27 Sep 2021 23:04:38 +0200 Subject: [PATCH 12/25] update url --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index baf9764a..72d37791 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setup(name='pyriemann-qiskit', version=version, description='Qiskit wrapper for pyRiemann', - url='https://pyriemann.readthedocs.io', + url='https://pyriemann-qiskit.readthedocs.io', author='Gregoire Cattan', author_email='gcattan@hotmail.com', license='BSD (3-clause)', From e93b530e387bb46d91453ef17e6d5d004022abd9 Mon Sep 17 00:00:00 2001 From: gcattan Date: Tue, 28 Sep 2021 10:20:08 +0200 Subject: [PATCH 13/25] update doc --- doc/index.rst | 25 ++++--------------------- doc/installing.rst | 27 ++++++++++----------------- 2 files changed, 14 insertions(+), 38 deletions(-) diff --git a/doc/index.rst b/doc/index.rst index e073672d..df3f8989 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -27,26 +27,11 @@ pyRiemann-qiskit: Qiskit wrapper for pyRiemann
@@ -56,18 +41,16 @@ pyRiemann-qiskit: Qiskit wrapper for pyRiemann
-pyRiemann is a Python machine learning library based on scikit-learn API. It provides a high-level interface for classification and manipulation of multivariate signal through Riemannian Geometry of covariance matrices. - -pyRiemann aims at being a generic package for multivariate signal classification but has been designed around applications of biosignal (M/EEG, EMG, etc) classification. +pyRiemann-qiskit is a Qiskit wrapper around pyRiemann. It allows to use quantum classification with riemannian geometry. For a brief introduction to the ideas behind the package, you can read the :ref:`introductory notes `. More practical information is on the :ref:`installation page `. You may also want to browse the -`example gallery `_ to get a sense for what you can do with pyRiemann +`example gallery `_ to get a sense for what you can do with pyRiemann-qiskit and :ref:`API reference ` to find out how. To see the code or report a bug, please visit the `github repository -`_. +`_. .. raw:: html diff --git a/doc/installing.rst b/doc/installing.rst index 9b558cc5..4281aef7 100644 --- a/doc/installing.rst +++ b/doc/installing.rst @@ -1,13 +1,11 @@ .. _installing: -Installing pyRiemann -==================== +Installing pyRiemann-qiskit +=========================== -The easiest way to install a stable version of pyRiemann is through pypi, the python packege manager : +There is no yet stable version of pyRiemann-qiskit. -``pip install pyriemann`` - -For a bleeding edge version, you can clone the source code on `github `__ and install directly the package from source. +Therefore, it is recommanded to clone the source code on `github `__ and install directly the package from source. ``pip install -e .`` @@ -17,7 +15,6 @@ The install script will install the required dependencies. If you want also to b If you use a zsh shell, you need to write `pip install -e .\[docs,tests\]`. If you do not know what zsh is, you could use the above command. - Dependencies ~~~~~~~~~~~~ @@ -26,24 +23,20 @@ Dependencies Mandatory dependencies ^^^^^^^^^^^^^^^^^^^^^^ -- `numpy `__ - -- `scipy `__ - -- `scikit-learn >=0.17 `__ +- `cython `__ -- `pandas `__ +- `pyriemann `__ -- `joblib `__ +- `qiskit==0.20.0 `__ -- `qiskit `__ +- `cvxpy=1.1.12 `__ Recommended dependencies ^^^^^^^^^^^^^^^^^^^^^^^^ These dependencies are recommanded to use the plotting functions of pyriemann or to run examples and tutorials, but they are not mandatory: -- `mne-python `__ +- `matplotlib>=2.2 `__ -- `matplotlib `__ +- `mne-python `__ - `seaborn `__ From 279a19b6e66c3131a38d2a8bd47ab3abc08303e1 Mon Sep 17 00:00:00 2001 From: gcattan Date: Tue, 28 Sep 2021 10:34:46 +0200 Subject: [PATCH 14/25] update api and install instruction add link to issue in the example gallery --- doc/api.rst | 224 +--------------------------------------------------- 1 file changed, 3 insertions(+), 221 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index 947d462e..ef422114 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -4,233 +4,15 @@ API reference ============= -Covariance Estimation ---------------------- -.. _estimation_api: -.. currentmodule:: pyriemann.estimation - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - Covariances - ERPCovariances - XdawnCovariances - CospCovariances - Coherences - HankelCovariances - Shrinkage - -Embedding ---------- -.. _embedding_api: -.. currentmodule:: pyriemann.embedding - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - Embedding - Classification -------------- .. _classification_api: -.. currentmodule:: pyriemann.classification +.. currentmodule:: pyriemann_qiskit.classification .. autosummary:: :toctree: generated/ :template: class.rst - MDM - FgMDM - TSclassifier - KNearestNeighbor + QuanticBase QuanticSVM - QuanticVQC - -Clustering ------------------- -.. _clustering_api: -.. currentmodule:: pyriemann.clustering - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - Kmeans - KmeansPerClassTransform - Potato - - -Tangent Space ------------------- -.. _tangentspace_api: -.. currentmodule:: pyriemann.tangentspace - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - TangentSpace - FGDA - -Spatial Filtering ------------------- -.. _spatialfilter_api: -.. currentmodule:: pyriemann.spatialfilters - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - Xdawn - CSP - SPoC - BilinearFilter - AJDC - -Preprocessing -------------- -.. _preprocessing_api: -.. currentmodule:: pyriemann.preprocessing - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - Whitening - -Channel selection ------------------- -.. _channelselection_api: -.. currentmodule:: pyriemann.channelselection - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - ElectrodeSelection - FlatChannelRemover - -Stats ------------------- -.. _stats_api: -.. currentmodule:: pyriemann.stats - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - PermutationDistance - PermutationModel - - -Utils function --------------- - -Utils functions are low level functions that implement most base components of Riemannian Geometry. - -Covariance preprocessing -~~~~~~~~~~~~~~~~~~~~~~~~ -.. _covariance_api: -.. currentmodule:: pyriemann.utils.covariance - -.. autosummary:: - :toctree: generated/ - - covariances - cross_spectrum - cospectrum - coherence - normalize - get_nondiag_weight - - -Distances -~~~~~~~~~~~~~~~~~~~~~~ -.. _distance_api: -.. currentmodule:: pyriemann.utils.distance - -.. autosummary:: - :toctree: generated/ - - distance - distance_euclid - distance_riemann - distance_logeuclid - distance_logdet - distance_kullback - distance_kullback_sym - distance_wasserstein - - -Mean -~~~~~~~~~~~~~~~~~~~~~~ -.. _mean_api: -.. currentmodule:: pyriemann.utils.mean - -.. autosummary:: - :toctree: generated/ - - mean_covariance - mean_euclid - mean_riemann - mean_logeuclid - mean_logdet - mean_wasserstein - mean_ale - mean_alm - mean_harmonic - mean_kullback_sym - - -Geodesic -~~~~~~~~~~~~~~~~~~~~~~ -.. _geodesic_api: -.. currentmodule:: pyriemann.utils.geodesic - -.. autosummary:: - :toctree: generated/ - - geodesic - geodesic_riemann - geodesic_euclid - geodesic_logeuclid - - -Tangent Space -~~~~~~~~~~~~~~~~~~~~~~ -.. _ts_base_api: -.. currentmodule:: pyriemann.utils.tangentspace - -.. autosummary:: - :toctree: generated/ - - tangent_space - untangent_space - -Base -~~~~~~~~~~~~~~~~~~~~~~ -.. _base_api: -.. currentmodule:: pyriemann.utils.base - -.. autosummary:: - :toctree: generated/ - - sqrtm - invsqrtm - expm - logm - powm - -Aproximate Joint Diagonalization -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. _ajd_api: -.. currentmodule:: pyriemann.utils.ajd - -.. autosummary:: - :toctree: generated/ - - rjd - ajd_pham - uwedge + QuanticVQC \ No newline at end of file From ae5f100b574c8de29f4993517439daf3a433e4d0 Mon Sep 17 00:00:00 2001 From: gcattan Date: Tue, 28 Sep 2021 10:34:55 +0200 Subject: [PATCH 15/25] missing --- doc/index.rst | 2 +- doc/installing.rst | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/index.rst b/doc/index.rst index df3f8989..b896f882 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -27,7 +27,7 @@ pyRiemann-qiskit: Qiskit wrapper for pyRiemann