From 0022fc2f774f27e6d1af913781f2dfe86fca38ba Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Tue, 11 Jul 2023 22:04:00 +0200 Subject: [PATCH 1/8] Updating the whats_new.rst and fixing the dataset list --- docs/source/whats_new.rst | 3 ++- moabb/datasets/utils.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/whats_new.rst b/docs/source/whats_new.rst index 0bf8d73d4..6c4abbbca 100644 --- a/docs/source/whats_new.rst +++ b/docs/source/whats_new.rst @@ -41,7 +41,8 @@ Bugs - Fixing :func:`moabb.dataset.bnci.MNEBNCI.data_path` that returned the data itself instead of paths (:gh:`412` by `Pierre Guetschel`_) - Adding :func:`moabb.datasets.fake` in the init file to use in braindecode object (:gh:`414` by `Bruno Aristimunha`_) - Fixing the parallel download issue when the dataset have the same directory (:gh:`421` by `Sara Sedlar`_) -- Fixing warning with annotation in the p300 datasets (:gh:`421` by `Sara Sedlar`_) +- Fixing fixes the problem with the annotation loading for the P300 datasets Sosulski2019, Huebner2017 and Huebner2018 (:gh:`396` by `Sara Sedlar`_) +- Removing the print in the dataset list (:gh:`423` by `Bruno Aristimunha`_) API changes ~~~~~~~~~~~ diff --git a/moabb/datasets/utils.py b/moabb/datasets/utils.py index 8d004642a..7d3e26902 100644 --- a/moabb/datasets/utils.py +++ b/moabb/datasets/utils.py @@ -13,7 +13,6 @@ def _init_dataset_list(): for ds in inspect.getmembers(db, inspect.isclass): - print("ds", ds) if issubclass(ds[1], BaseDataset): dataset_list.append(ds[1]) From a1140fd28cd24473b33b8b53b8288a55a4b6a110 Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sat, 19 Aug 2023 17:26:53 +0200 Subject: [PATCH 2/8] fixing tests and fixing datalist order --- moabb/datasets/__init__.py | 43 +++++++++++++++++++------------------- moabb/tests/datasets.py | 15 ++++++------- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/moabb/datasets/__init__.py b/moabb/datasets/__init__.py index ff35d8284..38bc1cd14 100644 --- a/moabb/datasets/__init__.py +++ b/moabb/datasets/__init__.py @@ -11,6 +11,16 @@ # flake8: noqa from .alex_mi import AlexMI from .bbci_eeg_fnirs import Shin2017A, Shin2017B + +# Depreciated datasets (will be removed in the future): +from .bnci import BNCI2014001 # noqa: F401 +from .bnci import BNCI2014002 # noqa: F401 +from .bnci import BNCI2014004 # noqa: F401 +from .bnci import BNCI2014008 # noqa: F401 +from .bnci import BNCI2014009 # noqa: F401 +from .bnci import BNCI2015001 # noqa: F401 +from .bnci import BNCI2015003 # noqa: F401 +from .bnci import BNCI2015004 # noqa: F401 from .bnci import ( BNCI2014_001, BNCI2014_002, @@ -21,6 +31,13 @@ BNCI2015_003, BNCI2015_004, ) +from .braininvaders import VirtualReality # noqa: F401 +from .braininvaders import bi2012 # noqa: F401 +from .braininvaders import bi2013a # noqa: F401 +from .braininvaders import bi2014a # noqa: F401 +from .braininvaders import bi2014b # noqa: F401 +from .braininvaders import bi2015a # noqa: F401 +from .braininvaders import bi2015b # noqa: F401 from .braininvaders import ( BI2012, BI2013a, @@ -35,12 +52,15 @@ from .gigadb import Cho2017 from .huebner_llp import Huebner2017, Huebner2018 from .Lee2019 import Lee2019_ERP, Lee2019_MI, Lee2019_SSVEP +from .mpi_mi import MunichMI # noqa: F401 from .mpi_mi import GrosseWentrup2009 from .neiry import DemonsP300 +from .phmd_ml import HeadMountedDisplay # noqa: F401 from .phmd_ml import Cattan2019_PHMD from .physionet_mi import PhysionetMI from .schirrmeister2017 import Schirrmeister2017 from .sosulski2019 import Sosulski2019 +from .ssvep_exo import SSVEPExo # noqa: F401 from .ssvep_exo import Kalunga2016 from .ssvep_mamem import MAMEM1, MAMEM2, MAMEM3 from .ssvep_nakanishi import Nakanishi2015 @@ -51,27 +71,6 @@ from .Zhou2016 import Zhou2016 -# Call this last in order to make sure the dataset list contains all +# Call this last in order to make sure the dataset list is populated with # the datasets imported in this file. _init_dataset_list() -del _init_dataset_list - -# Depreciated datasets (not added to dataset_list): -from .bnci import BNCI2014001 # noqa: F401 -from .bnci import BNCI2014002 # noqa: F401 -from .bnci import BNCI2014004 # noqa: F401 -from .bnci import BNCI2014008 # noqa: F401 -from .bnci import BNCI2014009 # noqa: F401 -from .bnci import BNCI2015001 # noqa: F401 -from .bnci import BNCI2015003 # noqa: F401 -from .bnci import BNCI2015004 # noqa: F401 -from .braininvaders import VirtualReality # noqa: F401 -from .braininvaders import bi2012 # noqa: F401 -from .braininvaders import bi2013a # noqa: F401 -from .braininvaders import bi2014a # noqa: F401 -from .braininvaders import bi2014b # noqa: F401 -from .braininvaders import bi2015a # noqa: F401 -from .braininvaders import bi2015b # noqa: F401 -from .mpi_mi import MunichMI # noqa: F401 -from .phmd_ml import HeadMountedDisplay # noqa: F401 -from .ssvep_exo import SSVEPExo # noqa: F401 diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index ae4457945..320644f7c 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -187,17 +187,18 @@ def test_datasets_init(self): kwargs = {} if inspect.signature(ds).parameters.get("accept"): kwargs["accept"] = True - with self.assertLogs(logger="moabb.datasets.base", level="WARNING") as cm: + with self.assertLogs(logger="moabb.datasets.base", level="WARNING"): # We test if the is_abrev does not throw a warning. # Trick needed because assertNoLogs only inrtoduced in python 3.10: logger.warning(f"Testing {ds.__name__}") obj = ds(**kwargs) - self.assertEqual(len(cm.output), 1) + # Commented for now, return in next release + # self.assertEqual(len(cm.output), 1) self.assertIsNotNone(obj) codes.append(obj.code) # Check that all codes are unique: - self.assertEqual(len(codes), len(set(codes))) + # self.assertEqual(len(codes), len(set(codes))) def test_depreciated_datasets_init(self): depreciated_names, _, _ = zip(*aliases_list) @@ -220,7 +221,7 @@ def test_dataset_list(self): depreciated_list, _, _ = zip(*aliases_list) else: depreciated_list = [] - all_datasets = [ + [ c for c in db.__dict__.values() if ( @@ -229,9 +230,9 @@ def test_dataset_list(self): and c.__name__ not in depreciated_list ) ] - - assert len(dataset_list) == len(all_datasets) - assert set(dataset_list) == set(all_datasets) + # Commented until next release + # assert len(dataset_list) == len(all_datasets) + # assert set(dataset_list) == set(all_datasets) class Test_VirtualReality_Dataset(unittest.TestCase): From d2e8de714bfbfdce9476c555c685386297c93fef Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sat, 19 Aug 2023 17:28:25 +0200 Subject: [PATCH 3/8] whats_new file --- docs/source/whats_new.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/whats_new.rst b/docs/source/whats_new.rst index fb2255f39..a29741365 100644 --- a/docs/source/whats_new.rst +++ b/docs/source/whats_new.rst @@ -66,7 +66,7 @@ Bugs - Fix :func:`moabb.paradigms.FakeImageryParadigm`, :func:`moabb.paradigms.FakeP300Paradigm` and :func:`moabb.paradigms.FakeSSVEPParadigm` ``is_valid`` methods to only accept the correct datasets (PR :gh:`408` by `Pierre Guetschel`_) - Fix ``dataset_list`` construction, which could be empty due to bad import order (PR :gh:`449` by `Thomas Moreau`_). - Fixing dataset downloader from servers with non-http (PR :gh:`433` by `Sara Sedlar`_) - +- Fix ``dataset_list`` to include deprecated datasets (PR :gh:`464` by `Bruno Aristimunha`_) API changes ~~~~~~~~~~~ From 9890bf2093ce42e2591b9f32da4a36500427ba5c Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sat, 19 Aug 2023 17:31:41 +0200 Subject: [PATCH 4/8] Commeting one test --- moabb/tests/datasets.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index 320644f7c..d07c6dee5 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -216,23 +216,23 @@ def test_depreciated_datasets_init(self): self.assertIsNotNone(obj) self.assertIn(ds.__name__, depreciated_names) - def test_dataset_list(self): - if aliases_list: - depreciated_list, _, _ = zip(*aliases_list) - else: - depreciated_list = [] - [ - c - for c in db.__dict__.values() - if ( - inspect.isclass(c) - and issubclass(c, BaseDataset) - and c.__name__ not in depreciated_list - ) - ] - # Commented until next release - # assert len(dataset_list) == len(all_datasets) - # assert set(dataset_list) == set(all_datasets) + # def test_dataset_list(self): + # if aliases_list: + # depreciated_list, _, _ = zip(*aliases_list) + # else: + # depreciated_list = [] + # dataset_list = [ + # c + # for c in db.__dict__.values() + # if ( + # inspect.isclass(c) + # and issubclass(c, BaseDataset) + # and c.__name__ not in depreciated_list + # ) + # ] + # Commented until next release + # assert len(dataset_list) == len(all_datasets) + # assert set(dataset_list) == set(all_datasets) class Test_VirtualReality_Dataset(unittest.TestCase): From 5888f4c7d54c9dabf8ae0ef887c8961f75ed558b Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sun, 20 Aug 2023 01:21:56 +0200 Subject: [PATCH 5/8] Returning the test --- moabb/tests/datasets.py | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index d07c6dee5..499df4331 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -216,23 +216,22 @@ def test_depreciated_datasets_init(self): self.assertIsNotNone(obj) self.assertIn(ds.__name__, depreciated_names) - # def test_dataset_list(self): - # if aliases_list: - # depreciated_list, _, _ = zip(*aliases_list) - # else: - # depreciated_list = [] - # dataset_list = [ - # c - # for c in db.__dict__.values() - # if ( - # inspect.isclass(c) - # and issubclass(c, BaseDataset) - # and c.__name__ not in depreciated_list - # ) - # ] - # Commented until next release - # assert len(dataset_list) == len(all_datasets) - # assert set(dataset_list) == set(all_datasets) + def test_dataset_list(self): + if aliases_list: + depreciated_list, _, _ = zip(*aliases_list) + else: + pass + all_datasets = [ + c + for c in db.__dict__.values() + if ( + inspect.isclass(c) + and issubclass(c, BaseDataset) + # and c.__name__ not in depreciated_list + ) + ] + assert len(dataset_list) == len(all_datasets) + assert set(dataset_list) == set(all_datasets) class Test_VirtualReality_Dataset(unittest.TestCase): From 894e7223e58369fe9fd1903d15ed1ec34875a1c6 Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sun, 20 Aug 2023 01:30:17 +0200 Subject: [PATCH 6/8] Fixing tests --- moabb/tests/datasets.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index 499df4331..b96ca810d 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -183,6 +183,8 @@ def test_dataset_accept(self): def test_datasets_init(self): codes = [] logger = logging.getLogger("moabb.datasets.base") + deprecated_list, _, _ = zip(*aliases_list) + for ds in dataset_list: kwargs = {} if inspect.signature(ds).parameters.get("accept"): @@ -195,10 +197,11 @@ def test_datasets_init(self): # Commented for now, return in next release # self.assertEqual(len(cm.output), 1) self.assertIsNotNone(obj) - codes.append(obj.code) + if type(obj).__name__ not in deprecated_list: + codes.append(obj.code) # Check that all codes are unique: - # self.assertEqual(len(codes), len(set(codes))) + self.assertEqual(len(codes), len(set(codes))) def test_depreciated_datasets_init(self): depreciated_names, _, _ = zip(*aliases_list) From 6bd9aaaf03be0ca437c5d235aec308223c32c41c Mon Sep 17 00:00:00 2001 From: Bru Date: Sun, 20 Aug 2023 14:25:44 +0200 Subject: [PATCH 7/8] Update moabb/tests/datasets.py Co-authored-by: PierreGtch <25532709+PierreGtch@users.noreply.github.com> --- moabb/tests/datasets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index b96ca810d..93c20ea58 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -195,7 +195,8 @@ def test_datasets_init(self): logger.warning(f"Testing {ds.__name__}") obj = ds(**kwargs) # Commented for now, return in next release - # self.assertEqual(len(cm.output), 1) + if type(obj).__name__ not in deprecated_list: + self.assertEqual(len(cm.output), 1) self.assertIsNotNone(obj) if type(obj).__name__ not in deprecated_list: codes.append(obj.code) From 2d116919b8594efd55765d063ce70ebdff3c21f9 Mon Sep 17 00:00:00 2001 From: bruAristimunha Date: Sun, 20 Aug 2023 21:15:43 +0200 Subject: [PATCH 8/8] Fixing tests --- moabb/tests/datasets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/moabb/tests/datasets.py b/moabb/tests/datasets.py index 93c20ea58..f53ffb6ee 100644 --- a/moabb/tests/datasets.py +++ b/moabb/tests/datasets.py @@ -189,12 +189,11 @@ def test_datasets_init(self): kwargs = {} if inspect.signature(ds).parameters.get("accept"): kwargs["accept"] = True - with self.assertLogs(logger="moabb.datasets.base", level="WARNING"): + with self.assertLogs(logger="moabb.datasets.base", level="WARNING") as cm: # We test if the is_abrev does not throw a warning. # Trick needed because assertNoLogs only inrtoduced in python 3.10: logger.warning(f"Testing {ds.__name__}") obj = ds(**kwargs) - # Commented for now, return in next release if type(obj).__name__ not in deprecated_list: self.assertEqual(len(cm.output), 1) self.assertIsNotNone(obj)