From d7bcc1d74065844fe0483dc3ce3fda7d06d07bc0 Mon Sep 17 00:00:00 2001 From: lartpang Date: Fri, 12 Mar 2021 22:13:30 +0800 Subject: [PATCH] =?UTF-8?q?=20=20-=20=E8=BF=99=E4=B8=80=E7=89=88=E6=9C=AC?= =?UTF-8?q?=E6=AD=A3=E5=BC=8F=E5=B0=86sod=E7=9A=84=E8=AF=84=E4=BC=B0?= =?UTF-8?q?=E3=80=81=E7=BB=98=E5=9B=BE=E4=BB=A3=E7=A0=81=E4=B8=8E=E9=85=8D?= =?UTF-8?q?=E7=BD=AE=E5=88=86=E7=A6=BB=EF=BC=8C=E4=B8=BB=E8=A6=81=E8=80=83?= =?UTF-8?q?=E8=99=91=E5=A6=82=E4=B8=8B=20=20=20=20=20-=20=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E7=9A=84=E9=85=8D=E7=BD=AE=E6=98=AF=E9=9C=80=E8=A6=81?= =?UTF-8?q?=E8=B0=83=E6=95=B4=E7=9A=84=EF=BC=8C=E8=BF=99=E9=83=A8=E5=88=86?= =?UTF-8?q?=E4=B8=8D=E9=80=82=E5=AE=9C=E8=A2=ABgit=E4=B8=A5=E6=A0=BC?= =?UTF-8?q?=E7=9A=84=E7=9B=91=E8=A7=86=EF=BC=8C=E4=B9=9F=E4=BE=BF=E4=BA=8E?= =?UTF-8?q?=E6=8F=90=E4=BA=A4=E5=90=8E=E7=BB=AD=E6=9B=B4=E6=96=B0=E7=9A=84?= =?UTF-8?q?=E6=97=B6=E5=80=99=EF=BC=8C=E7=9B=B4=E6=8E=A5=E5=BF=BD=E7=95=A5?= =?UTF-8?q?=E5=85=B3=E4=BA=8E=E9=85=8D=E7=BD=AE=E7=9A=84=E6=9B=B4=E6=94=B9?= =?UTF-8?q?=EF=BC=8C=E5=8D=B3=E5=90=8E=E7=BB=AD=E6=9B=B4=E6=96=B0=E6=97=B6?= =?UTF-8?q?=EF=BC=8C=20=20=20=20=20=20=20=E7=94=A8=E6=88=B7=E9=85=8D?= =?UTF-8?q?=E7=BD=AE=E9=83=A8=E5=88=86=E4=BC=9A=E4=B8=8D=E5=86=8D=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=EF=BC=8C=E8=8B=A5=E6=98=AF=E6=B7=BB=E5=8A=A0=E6=96=B0?= =?UTF-8?q?=E5=8A=9F=E8=83=BD=EF=BC=8C=E7=9B=B4=E6=8E=A5=E8=B0=83=E6=95=B4?= =?UTF-8?q?=E5=8E=9F=E5=A7=8B=E7=9A=84=E5=87=BD=E6=95=B0=EF=BC=8C=E5=85=B6?= =?UTF-8?q?=E5=8F=82=E6=95=B0=E9=BB=98=E8=AE=A4=E5=85=B3=E9=97=AD=E6=96=B0?= =?UTF-8?q?=E5=8A=9F=E8=83=BD=EF=BC=8C=E4=BF=9D=E8=AF=81=E7=94=A8=E6=88=B7?= =?UTF-8?q?=E4=B8=8D=E4=BC=9A=E5=8F=97=E5=88=B0=E5=BD=B1=E5=93=8D=E3=80=82?= =?UTF-8?q?=20=20=20=20=20-=20sod=E5=92=8Ccosod=E8=AF=84=E4=BC=B0=E6=96=B9?= =?UTF-8?q?=E5=BC=8F=E6=9C=89=E5=B7=AE=E5=BC=82=EF=BC=8C=E4=BD=86=E6=98=AF?= =?UTF-8?q?=E7=BB=98=E5=9B=BE=E6=96=B9=E5=BC=8F=E4=B8=80=E8=87=B4=EF=BC=8C?= =?UTF-8?q?=E6=89=80=E4=BB=A5=E7=8E=B0=E5=B0=86=E8=AF=84=E4=BC=B0=E7=BB=98?= =?UTF-8?q?=E5=9B=BE=E6=8B=86=E5=88=86=E6=88=90=E7=8B=AC=E7=AB=8B=E9=83=A8?= =?UTF-8?q?=E5=88=86=EF=BC=8C=E7=BD=AE=E4=BA=8Emetrics/sod=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E5=A4=B9=E4=B8=8B=EF=BC=8C=E4=B9=8B=E5=90=8E=E6=88=96?= =?UTF-8?q?=E8=AE=B8=E6=88=96=E8=B0=83=E6=95=B4=E4=BD=8D=E7=BD=AE=EF=BC=8C?= =?UTF-8?q?=20=20=20=20=20=20=20=E4=BD=86=E8=BF=99=E7=A7=8D=E6=8B=86?= =?UTF-8?q?=E5=88=86=E7=AD=96=E7=95=A5=E4=B8=8D=E5=8F=98=E3=80=82=20=20=20?= =?UTF-8?q?-=20=E4=BC=98=E5=8C=96=E4=BA=86cosod=E7=9A=84=E8=AF=84=E4=BC=B0?= =?UTF-8?q?=E4=BB=A3=E7=A0=81=EF=BC=8C=E5=AF=B9sod=E5=92=8Ccosod=E7=9A=84?= =?UTF-8?q?=E6=8C=87=E6=A0=87recorder=E9=83=A8=E5=88=86=E8=BF=9B=E8=A1=8C?= =?UTF-8?q?=E4=BA=86=E7=AE=80=E5=8C=96=E3=80=82=20=20=20-=20=E4=B8=8D?= =?UTF-8?q?=E5=86=8D=E4=BD=BF=E7=94=A8=E7=8B=AC=E7=AB=8B=E7=9A=84sod=5Fmet?= =?UTF-8?q?rics=E4=BB=A3=E7=A0=81=EF=BC=8C=E7=94=B1=E4=BA=8E=E6=88=91?= =?UTF-8?q?=E5=B7=B2=E7=BB=8F=E5=B0=86PySODMetrics=E5=8F=91=E5=B8=83?= =?UTF-8?q?=E5=88=B0=E4=BA=86PyPI=E4=B8=8A=EF=BC=8C=E6=89=80=E4=BB=A5?= =?UTF-8?q?=E5=8F=AF=E4=BB=A5=E7=9B=B4=E6=8E=A5=E9=80=9A=E8=BF=87pip?= =?UTF-8?q?=E5=AE=89=E8=A3=85=E3=80=82=20=20=20-=20=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E4=BA=86=E5=AF=B9=E4=BA=8Eprint=E7=9A=84?= =?UTF-8?q?=E4=B8=80=E4=B8=AA=E5=BD=A9=E8=89=B2=E5=A2=9E=E5=BC=BA=E7=9A=84?= =?UTF-8?q?=E5=B0=81=E8=A3=85=EF=BC=8C=E5=8F=AF=E8=A7=81`./utils/misc.py`?= =?UTF-8?q?=E4=B8=AD=E7=9A=84`colored=5Fprint`=E3=80=82=20=20=20-=20git?= =?UTF-8?q?=E4=B8=8D=E5=86=8D=E8=B7=9F=E8=B8=AA=E6=96=B9=E6=B3=95=E9=85=8D?= =?UTF-8?q?=E7=BD=AE=E6=96=87=E4=BB=B6=E5=92=8C=E6=95=B0=E6=8D=AE=E9=9B=86?= =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6=EF=BC=8C=E8=BF=99=E9=83=A8?= =?UTF-8?q?=E5=88=86=E7=8E=B0=E6=9C=89=E7=9A=84=E4=BD=9C=E4=B8=BA=E7=A4=BA?= =?UTF-8?q?=E4=BE=8B=EF=BC=8C=E4=BB=85=E4=BE=9B=E4=BD=BF=E7=94=A8=E8=80=85?= =?UTF-8?q?=E7=8B=AC=E7=AB=8B=E8=A1=A5=E5=85=85=E5=92=8C=E5=8F=82=E8=80=83?= =?UTF-8?q?=E3=80=82=20=20=20-=20=E4=BF=AE=E5=A4=8D=E4=BA=86=E4=B9=8B?= =?UTF-8?q?=E5=89=8D=E7=BB=98=E5=88=B6Fm=E6=9B=B2=E7=BA=BF=E6=97=B6x?= =?UTF-8?q?=E7=9A=84=E9=97=AE=E9=A2=98=EF=BC=8C=E4=B9=8B=E5=89=8D=E5=8F=96?= =?UTF-8?q?=E5=8F=8D=E4=BA=86=E3=80=82=E8=AF=A6=E8=A7=81=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 9 +- configs/datasets/__init__.py | 0 configs/datasets/rgb_cod.py | 23 - configs/datasets/rgb_cosod.py | 65 --- configs/datasets/rgb_sod.py | 53 -- configs/datasets/rgbd_sod.py | 73 --- configs/methods/__init__.py | 0 configs/methods/rgb_cod_methods.py | 414 ------------- configs/methods/rgb_cosod_methods.py | 53 -- configs/methods/rgb_sod_methods.py | 837 --------------------------- configs/methods/rgbd_sod_methods.py | 549 ------------------ eval_cosod_all_methods.py | 346 +++-------- eval_sod_all_methods.py | 320 +++------- eval_sod_all_methods_from_mat.py | 241 ++++---- eval_sod_single_method.py | 67 +-- metrics/sod/__init__.py | 5 + metrics/sod/cal_cosod_matrics.py | 169 ++++++ metrics/sod/cal_sod_matrics.py | 140 +++++ metrics/sod/draw_curves.py | 64 ++ metrics/sod/metrics.py | 443 -------------- readme.md | 26 + utils/misc.py | 20 + utils/recorders/__init__.py | 2 +- utils/recorders/excel_recorder.py | 19 + utils/recorders/metric_recorder.py | 143 ++++- utils/recorders/txt_recorder.py | 7 + 26 files changed, 883 insertions(+), 3205 deletions(-) delete mode 100644 configs/datasets/__init__.py delete mode 100644 configs/datasets/rgb_cod.py delete mode 100644 configs/datasets/rgb_cosod.py delete mode 100644 configs/datasets/rgb_sod.py delete mode 100644 configs/datasets/rgbd_sod.py delete mode 100644 configs/methods/__init__.py delete mode 100644 configs/methods/rgb_cod_methods.py delete mode 100644 configs/methods/rgb_cosod_methods.py delete mode 100644 configs/methods/rgb_sod_methods.py delete mode 100644 configs/methods/rgbd_sod_methods.py mode change 100755 => 100644 metrics/sod/__init__.py create mode 100644 metrics/sod/cal_cosod_matrics.py create mode 100644 metrics/sod/cal_sod_matrics.py create mode 100644 metrics/sod/draw_curves.py delete mode 100644 metrics/sod/metrics.py diff --git a/.gitignore b/.gitignore index 7f22bd4..003fea8 100755 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,3 @@ -# private files -/output/ -/untracked/ - # Big files **/*.png **/*.zip @@ -273,4 +269,9 @@ cython_debug/ *.iml out gen + +# private files /output/ +/untracked/ +/configs/methods/ +/configs/datasets/ diff --git a/configs/datasets/__init__.py b/configs/datasets/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configs/datasets/rgb_cod.py b/configs/datasets/rgb_cod.py deleted file mode 100644 index a577d74..0000000 --- a/configs/datasets/rgb_cod.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -_RGB_COD_ROOT = "/home/lart/Datasets/Saliency/COD" - -chameleon_path = dict( - root=os.path.join(_RGB_COD_ROOT, "Test", "CHAMELEON"), - image=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "CHAMELEON", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "CHAMELEON", "Mask"), suffix=".png"), -) -camo_path = dict( - root=os.path.join(_RGB_COD_ROOT, "Test", "CAMO"), - image=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "CAMO", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "CAMO", "Mask"), suffix=".png"), -) -cod10k_path = dict( - root=os.path.join(_RGB_COD_ROOT, "Test", "COD10K"), - image=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "COD10K", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_COD_ROOT, "Test", "COD10K", "Mask"), suffix=".png"), -) - -rgb_cod_data = OrderedDict({"CHAMELEON": chameleon_path, "CAMO": camo_path, "COD10K": cod10k_path}) diff --git a/configs/datasets/rgb_cosod.py b/configs/datasets/rgb_cosod.py deleted file mode 100644 index 3c6dbb1..0000000 --- a/configs/datasets/rgb_cosod.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -_RGB_CoSOD_ROOT = "/home/lart/Datasets/Saliency/CoSOD" - -COCO9213 = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "COCO9213"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "COCO9213-os", "img"), suffix=".png"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "COCO9213-os", "gt"), suffix=".png"), -) -CoCA = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "CoCA"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoCA", "image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoCA", "binary"), suffix=".png"), - bbox=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoCA", "bbox"), suffix=".txt"), - instance=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoCA", "instance"), suffix=".png"), -) -CoSal2015 = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "CoSal2015"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoSal2015", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoSal2015", "GroundTruth"), suffix=".png"), -) -CoSOD3k = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "CoSOD3k"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoSOD3k", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoSOD3k", "GroundTruth"), suffix=".png"), - bbox=dict(path=os.path.join(_RGB_CoSOD_ROOT, "CoSOD3k", "BoundingBox"), suffix=".txt"), - instance=dict( - path=os.path.join(_RGB_CoSOD_ROOT, "CoSOD3k", "SegmentationObject"), suffix=".png" - ), -) -iCoSeg = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "iCoSeg"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "iCoSeg", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "iCoSeg", "GroundTruth"), suffix=".png"), -) -ImagePair = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "ImagePair"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "ImagePair", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "ImagePair", "GroundTruth"), suffix=".png"), -) -MSRC = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "MSRC"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "MSRC", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "MSRC", "GroundTruth"), suffix=".png"), -) -WICOS = dict( - root=os.path.join(_RGB_CoSOD_ROOT, "WICOS"), - image=dict(path=os.path.join(_RGB_CoSOD_ROOT, "WICOS", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_CoSOD_ROOT, "WICOS", "GroundTruth"), suffix=".png"), -) - -# [('ImagePair', 210), ('MSRC', 233), ('WICOS', 364), ('iCoSeg', 643), ('CoCA', 1295), ('CoSal2015', 2015), ('CoSOD3k', 3316)] -rgb_cosod_data = OrderedDict( - { - "ImagePair": ImagePair, - "MSRC": MSRC, - "WICOS": WICOS, - "iCoSeg": iCoSeg, - "CoCA": CoCA, - "CoSal2015": CoSal2015, - "CoSOD3k": CoSOD3k, - } -) diff --git a/configs/datasets/rgb_sod.py b/configs/datasets/rgb_sod.py deleted file mode 100644 index 383ff7c..0000000 --- a/configs/datasets/rgb_sod.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -_RGB_SOD_ROOT = "/home/lart/Datasets/Saliency/RGBSOD" - -ECSSD = dict( - root=os.path.join(_RGB_SOD_ROOT, "ECSSD"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "ECSSD", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "ECSSD", "Mask"), suffix=".png"), -) -DUTOMRON = dict( - root=os.path.join(_RGB_SOD_ROOT, "DUT-OMRON"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "DUT-OMRON", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "DUT-OMRON", "Mask"), suffix=".png"), -) -HKUIS = dict( - root=os.path.join(_RGB_SOD_ROOT, "HKU-IS"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "HKU-IS", "Image"), suffix=".png"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "HKU-IS", "Mask"), suffix=".png"), -) -PASCALS = dict( - root=os.path.join(_RGB_SOD_ROOT, "PASCAL-S"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "PASCAL-S", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "PASCAL-S", "Mask"), suffix=".png"), -) -SOC_TE = dict( - root=os.path.join(_RGB_SOD_ROOT, "SOC/Test"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "SOC/Test", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "SOC/Test", "Mask"), suffix=".png"), -) -DUTS_TE = dict( - root=os.path.join(_RGB_SOD_ROOT, "DUTS/Test"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "DUTS/Test", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "DUTS/Test", "Mask"), suffix=".png"), -) -DUTS_TR = dict( - root=os.path.join(_RGB_SOD_ROOT, "DUTS/Train"), - image=dict(path=os.path.join(_RGB_SOD_ROOT, "DUTS/Train", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGB_SOD_ROOT, "DUTS/Train", "Mask"), suffix=".png"), -) - -rgb_sod_data = OrderedDict( - { - # "DUTS-TR": DUTS_TR, - "PASCAL-S": PASCALS, - "ECSSD": ECSSD, - "HKU-IS": HKUIS, - "DUT-OMRON": DUTOMRON, - "DUTS-TE": DUTS_TE, - "SOC": SOC_TE, - } -) diff --git a/configs/datasets/rgbd_sod.py b/configs/datasets/rgbd_sod.py deleted file mode 100644 index e510802..0000000 --- a/configs/datasets/rgbd_sod.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -_RGBD_SOD_ROOT = "/home/lart/Datasets/Saliency/RGBDSOD" - -LFSD = dict( - root=os.path.join(_RGBD_SOD_ROOT, "LFSD"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "LFSD", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "LFSD", "Mask"), suffix=".png"), -) -NLPR = dict( - root=os.path.join(_RGBD_SOD_ROOT, "NLPR_FULL"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "NLPR_FULL", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "NLPR_FULL", "Mask"), suffix=".png"), - # index_file=os.path.join(_RGBD_SOD_ROOT, "nlpr_test_jw_name_list.lst"), - # 测试的时候应该使用全部数据来和方法的预测结果计算交集,这样才会测到所有的预测结果,所以就不使用index_file了。 -) -NJUD = dict( - root=os.path.join(_RGBD_SOD_ROOT, "NJUD_FULL"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "NJUD_FULL", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "NJUD_FULL", "Mask"), suffix=".png"), - # index_file=os.path.join(_RGBD_SOD_ROOT, "njud_test_jw_name_list.lst"), - # 同上 -) -RGBD135 = dict( - root=os.path.join(_RGBD_SOD_ROOT, "RGBD135"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "RGBD135", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "RGBD135", "Mask"), suffix=".png"), -) -SIP = dict( - root=os.path.join(_RGBD_SOD_ROOT, "SIP"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "SIP", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "SIP", "Mask"), suffix=".png"), -) -SSD = dict( - root=os.path.join(_RGBD_SOD_ROOT, "SSD"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "SSD", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "SSD", "Mask"), suffix=".png"), -) -STEREO797 = dict( - root=os.path.join(_RGBD_SOD_ROOT, "STEREO797"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "STEREO797", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "STEREO797", "Mask"), suffix=".png"), -) -STEREO1000 = dict( - root=os.path.join(_RGBD_SOD_ROOT, "STEREO1000"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "STEREO1000", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "STEREO1000", "Mask"), suffix=".png"), -) -DUTRGBD_TE = dict( - root=os.path.join(_RGBD_SOD_ROOT, "DUT-RGBD/Test"), - image=dict(path=os.path.join(_RGBD_SOD_ROOT, "DUT-RGBD/Test", "Image"), suffix=".jpg"), - mask=dict(path=os.path.join(_RGBD_SOD_ROOT, "DUT-RGBD/Test", "Mask"), suffix=".png"), -) -RGBDSOD_TR = dict( - root=os.path.join(_RGBD_SOD_ROOT), - index_file=os.path.join(_RGBD_SOD_ROOT, "rgbd_train_jw_name_list.lst"), -) - -rgbd_sod_data = OrderedDict( - { - "LFSD": LFSD, - "NJUD": NJUD, - "NLPR": NLPR, - "RGBD135": RGBD135, - "SIP": SIP, - "SSD": SSD, - "STEREO797": STEREO797, - "STEREO1000": STEREO1000, - "DUTRGBD": DUTRGBD_TE, - } -) diff --git a/configs/methods/__init__.py b/configs/methods/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configs/methods/rgb_cod_methods.py b/configs/methods/rgb_cod_methods.py deleted file mode 100644 index 3969b25..0000000 --- a/configs/methods/rgb_cod_methods.py +++ /dev/null @@ -1,414 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -from configs.utils.config_generator import curve_info_generator, simple_info_generator - -_COD_METHODS_MAT_ROOT = "/home/lart/Coding/GIT/CODToolbox/Onekey_Evaluation_Code/OnekeyEvaluationCode/Results/Result-COD10K-test" -_COD_METHODS_PRED_ROOT = "/home/lart/Datasets/Saliency/PaperResults/COD" -_COD_DATASETS = ["CAMO-mat", "CHAMELEON-mat", "COD10K-mat"] -_COD_METHODS = { - "FPN": "2017-CVPR-FPN.mat", - "MaskRCNN": "2017-CVPR-MaskRCNN.mat", - "PSPNet": "2017-CVPR-PSPNet.mat", - "UNet++": "2018-DLMIA-UNet++.mat", - "MSRCNN": "2019-CVPR-MSRCNN.mat", - "HTC": "2019-CVPR-HTC.mat", - "PiCANet": "2018-CVPR-PiCANet.mat", - "BASNet": "2019-CVPR-BASNet.mat", - "CPD_ResNet": "2019-CVPR-CPD_ResNet.mat", - "PFANet": "2019-CVPR-PFANet.mat", - "PoolNet": "2019-CVPR-PoolNet.mat", - "EGNet": "2019-ICCV-EGNet.mat", - "F3Net": "F3Net.mat", - "MINet": "MINet_Res50_COD.mat", - "GateNet": "GateNet_Res50_COD.mat", - "ITSD": "ITSD_CVPR2020_COD.mat", - "ANet_SRM": "2019-CVIU-ANet_SRM.mat", - "SINet": "2020-CVPR-SINet.mat", - "Ours": "Ours", -} - -FPN = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-FPN", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["FPN"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-FPN", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["FPN"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-FPN", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["FPN"]), - }, -} - -MaskRCNN = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-MaskRCNN", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["MaskRCNN"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-MaskRCNN", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["MaskRCNN"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-MaskRCNN", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["MaskRCNN"]), - }, -} - -PSPNet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-PSPNet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["PSPNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-PSPNet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["PSPNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2017-CVPR-PSPNet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["PSPNet"]), - }, -} - -PiCANet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-CVPR-PiCANet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["PiCANet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-CVPR-PiCANet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["PiCANet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-CVPR-PiCANet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["PiCANet"]), - }, -} - -UNetPP = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-DLMIA-UNet++", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["UNet++"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-DLMIA-UNet++", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["UNet++"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2018-DLMIA-UNet++", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["UNet++"]), - }, -} - -ANet_SRM = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVIU-ANet_SRM", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["ANet_SRM"]), - }, - "chameleon": None, - "cod10k": None, -} - -BASNet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-BASNet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["BASNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-BASNet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["BASNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-BASNet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["BASNet"]), - }, -} - -CPD = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-CPD_ResNet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["CPD_ResNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-CPD_ResNet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["CPD_ResNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-CPD_ResNet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["CPD_ResNet"]), - }, -} - -HTC = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-HTC", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["HTC"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-HTC", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["HTC"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-HTC", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["HTC"]), - }, -} - -MSRCNN = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-MSRCNN", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["MSRCNN"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-MSRCNN", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["MSRCNN"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-MSRCNN", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["MSRCNN"]), - }, -} - -PFANet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PFANet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["PFANet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PFANet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["PFANet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PFANet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["PFANet"]), - }, -} - -PoolNet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PoolNet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["PoolNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PoolNet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["PoolNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-CVPR-PoolNet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["PoolNet"]), - }, -} - -EGNet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-ICCV-EGNet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["EGNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-ICCV-EGNet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["EGNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2019-ICCV-EGNet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["EGNet"]), - }, -} - -F3Net = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "F3Net", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["F3Net"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "F3Net", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["F3Net"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "F3Net", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["F3Net"]), - }, -} - -MINet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "MINet_Res50_COD", "camo"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["MINet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "MINet_Res50_COD", "chameleon"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["MINet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "MINet_Res50_COD", "cod10k"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["MINet"]), - }, -} - -GateNet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "GateNet_Res50_COD", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["GateNet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "GateNet_Res50_COD", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["GateNet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "GateNet_Res50_COD", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["GateNet"]), - }, -} - -ITSD = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "ITSD_CVPR2020_COD", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["ITSD"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "ITSD_CVPR2020_COD", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["ITSD"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "ITSD_CVPR2020_COD", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["ITSD"]), - }, -} - -SINet = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2020-CVPR-SINet", "CAMO"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["SINet"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2020-CVPR-SINet", "CHAMELEON"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["SINet"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "2020-CVPR-SINet", "COD10K"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["SINet"]), - }, -} - -Ours = { - "CAMO": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "Ours", "camo"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[0], _COD_METHODS["Ours"]), - }, - "CHAMELEON": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "Ours", "chameleon"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[1], _COD_METHODS["Ours"]), - }, - "COD10K": { - "path": os.path.join(_COD_METHODS_PRED_ROOT, "Ours", "cod10k"), - "suffix": ".png", - "mat": os.path.join(_COD_METHODS_MAT_ROOT, _COD_DATASETS[2], _COD_METHODS["Ours"]), - }, -} - -curve_info = curve_info_generator() -methods_info_for_drawing = OrderedDict( - { - "FPN": curve_info(FPN, "FPN"), - "MaskRCNN": curve_info(MaskRCNN, "MaskRCNN"), - "PSPNet": curve_info(PSPNet, "PSPNet"), - "UNet++": curve_info(UNetPP, "UNet++"), - "MSRCNN": curve_info(MSRCNN, "MSRCNN"), - "HTC": curve_info(HTC, "HTC"), - "PiCANet": curve_info(PiCANet, "PiCANet"), - "BASNet": curve_info(BASNet, "BASNet"), - "CPD": curve_info(CPD, "CPD"), - "PFANet": curve_info(PFANet, "PFANet"), - "PoolNet": curve_info(PoolNet, "PoolNet"), - "EGNet": curve_info(EGNet, "EGNet"), - "F3Net": curve_info(F3Net, "F3Net"), - "MINet": curve_info(MINet, "MINet"), - "ITSD": curve_info(ITSD, "ITSD"), - "GateNet": curve_info(GateNet, "GateNet"), - "ANet_SRM": curve_info(ANet_SRM, "ANet_SRM"), - "SINet": curve_info(SINet, "SINet"), - "Ours": curve_info(Ours, "Ours", line_color="red", line_width=3), - } -) - -simple_info = simple_info_generator() -methods_info_for_selecting = OrderedDict( - { - "FPN": simple_info(FPN, "FPN"), - "MaskRCNN": simple_info(MaskRCNN, "MaskRCNN"), - "PSPNet": simple_info(PSPNet, "PSPNet"), - "UNet++": simple_info(UNetPP, "UNet++"), - "MSRCNN": simple_info(MSRCNN, "MSRCNN"), - "HTC": simple_info(HTC, "HTC"), - "PiCANet": simple_info(PiCANet, "PiCANet"), - "BASNet": simple_info(BASNet, "BASNet"), - "CPD": simple_info(CPD, "CPD"), - "PFANet": simple_info(PFANet, "PFANet"), - "PoolNet": simple_info(PoolNet, "PoolNet"), - "EGNet": simple_info(EGNet, "EGNet"), - "F3Net": simple_info(F3Net, "F3Net"), - "MINet": simple_info(MINet, "MINet"), - "ITSD": simple_info(ITSD, "ITSD"), - "GateNet": simple_info(GateNet, "GateNet"), - "ANet_SRM": simple_info(ANet_SRM, "ANet_SRM"), - "SINet": simple_info(SINet, "SINet"), - "Ours": simple_info(Ours, "Ours"), - } -) diff --git a/configs/methods/rgb_cosod_methods.py b/configs/methods/rgb_cosod_methods.py deleted file mode 100644 index a82ac6c..0000000 --- a/configs/methods/rgb_cosod_methods.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -from configs.utils.config_generator import curve_info_generator, simple_info_generator - -_CoSOD_METHODS_ROOT = "/home/lart/Datasets/Saliency/PaperResults/CoSOD" -ICNet = { - "CoCA": dict( - path=os.path.join(_CoSOD_METHODS_ROOT, "NIPS2020_ICNet_os", "CoCA"), suffix=".png" - ), - "CoSal2015": dict( - path=os.path.join(_CoSOD_METHODS_ROOT, "NIPS2020_ICNet_os", "CoSal2015"), suffix=".png" - ), - "CoSOD3k": dict( - path=os.path.join(_CoSOD_METHODS_ROOT, "NIPS2020_ICNet_os", "CoSOD3k"), suffix=".png" - ), - "iCoSeg": dict( - path=os.path.join(_CoSOD_METHODS_ROOT, "NIPS2020_ICNet_os", "iCoSeg"), suffix=".png" - ), - "ImagePair": None, - "MSRC": dict( - path=os.path.join(_CoSOD_METHODS_ROOT, "NIPS2020_ICNet_os", "MSRC"), suffix=".png" - ), - "WICOS": None, -} - -_Ours_ROOT = "/home/lart/Coding/CoCODProj/output" -_Ours_FPN_ROOT = os.path.join(_Ours_ROOT, "Ours") -Ours_FPN = { - "CoCA": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/coca"), suffix=".png"), - "CoSal2015": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/cosal2015"), suffix=".png"), - "CoSOD3k": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/cosod3k"), suffix=".png"), - "iCoSeg": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/icoseg"), suffix=".png"), - "ImagePair": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/imagepair"), suffix=".png"), - "MSRC": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/msrc"), suffix=".png"), - "WICOS": dict(path=os.path.join(_Ours_FPN_ROOT, "pre/wicos"), suffix=".png"), -} - -curve_info = curve_info_generator() -methods_info_for_drawing = OrderedDict( - { - "ICNet": curve_info(ICNet, "ICNet"), - "Ours_FPN": curve_info(Ours_FPN, "Ours_FPN"), - } -) -simple_info = simple_info_generator() -methods_info_for_selecting = OrderedDict( - { - "ICNet": simple_info(ICNet, "ICNet"), - "Ours_FPN": simple_info(Ours_FPN, "Ours_FPN"), - } -) diff --git a/configs/methods/rgb_sod_methods.py b/configs/methods/rgb_sod_methods.py deleted file mode 100644 index 8f4f6d5..0000000 --- a/configs/methods/rgb_sod_methods.py +++ /dev/null @@ -1,837 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -from configs.utils.config_generator import curve_info_generator, simple_info_generator - -_RGBSOD_METHODS_ROOT = "/home/lart/Datasets/Saliency/PaperResults/RGBSOD" -_RGBSOD_DATASET_NAMES = ["PASCAL-S", "ECSSD", "HKU-IS", "DUT-OMRON", "DUTS-TE"] - -_METHODS_DIR_NAMES = { - "DGRL_2018": "DGRL", - "PAGRN_2018": "PAGRN18", - "PiCANet_R_2018": "PiCANet-R", - "RAS_2018": "RAS", - "AFNet_2019": "AFNet", - "BASNet_2019": "BASNet", - "CPD_R_2019": "CPD-R", - "PoolNet_R_2019": "PoolNet", - "EGNet_R_2019": "EGNet-R", - "HRS_D_2019": "HRS-D", - "ICNet_2019": "ICNet", - "MLMSNet_2019": "MLMSNet", - "PAGENet_2019": "PAGE-Net", - "SCRN_R_2019": "SCRN", - "F3Net_R_2020": "F3", - "R3Net_R_2020": "R3Net", - "GCPANet_2020": "GCPANet_AAAI20", - "LDF_2020": "LDF_CVPR20", - "DFI_2020": "DFI_TIP2020", - "GateNet_2020": "GateNet20", - "ITSD_2020": "ITSD20", - "MINet_R_2020": "MINetR20", -} - - -DGRL_2018 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["DGRL_2018"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["DGRL_2018"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["DGRL_2018"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["DGRL_2018"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["DGRL_2018"] - ), - suffix=".png", - ), - "SOC": None, -} - -PAGRN_2018 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["PAGRN_2018"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["PAGRN_2018"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["PAGRN_2018"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["PAGRN_2018"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["PAGRN_2018"] - ), - suffix=".png", - ), - "SOC": None, -} - -PiCANet_R_2018 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["PiCANet_R_2018"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["PiCANet_R_2018"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["PiCANet_R_2018"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["PiCANet_R_2018"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["PiCANet_R_2018"] - ), - suffix=".png", - ), - "SOC": None, -} - -RAS_2018 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["RAS_2018"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["RAS_2018"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["RAS_2018"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["RAS_2018"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["RAS_2018"] - ), - suffix=".png", - ), - "SOC": None, -} - -AFNet_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["AFNet_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["AFNet_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["AFNet_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["AFNet_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["AFNet_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -BASNet_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["BASNet_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["BASNet_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["BASNet_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["BASNet_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["BASNet_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -CPD_R_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["CPD_R_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["CPD_R_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["CPD_R_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["CPD_R_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["CPD_R_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -PoolNet_R_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["PoolNet_R_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["PoolNet_R_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["PoolNet_R_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["PoolNet_R_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["PoolNet_R_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -EGNet_R_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["EGNet_R_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["EGNet_R_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["EGNet_R_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["EGNet_R_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["EGNet_R_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -HRS_D_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["HRS_D_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["HRS_D_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["HRS_D_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["HRS_D_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["HRS_D_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -ICNet_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["ICNet_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["ICNet_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["ICNet_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["ICNet_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["ICNet_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -MLMSNet_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["MLMSNet_2019"] - ), - suffix=".jpg", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["MLMSNet_2019"] - ), - suffix=".jpg", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["MLMSNet_2019"] - ), - suffix=".jpg", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["MLMSNet_2019"] - ), - suffix=".jpg", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["MLMSNet_2019"] - ), - suffix=".jpg", - ), - "SOC": None, -} - -PAGENet_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["PAGENet_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["PAGENet_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["PAGENet_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["PAGENet_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["PAGENet_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -SCRN_R_2019 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["SCRN_R_2019"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["SCRN_R_2019"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["SCRN_R_2019"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["SCRN_R_2019"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["SCRN_R_2019"] - ), - suffix=".png", - ), - "SOC": None, -} - -F3Net_R_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["F3Net_R_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["F3Net_R_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["F3Net_R_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["F3Net_R_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["F3Net_R_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -R3Net_R_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["R3Net_R_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["R3Net_R_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["R3Net_R_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["R3Net_R_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["R3Net_R_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -GCPANet_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["GCPANet_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["GCPANet_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["GCPANet_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["GCPANet_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["GCPANet_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -LDF_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["LDF_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["LDF_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["LDF_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["LDF_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["LDF_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -DFI_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["DFI_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["DFI_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["DFI_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["DFI_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["DFI_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -GateNet_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["GateNet_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["GateNet_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["GateNet_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["GateNet_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["GateNet_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -ITSD_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["ITSD_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["ITSD_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["ITSD_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["ITSD_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["ITSD_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -MINet_R_2020 = { - "PASCAL-S": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[0], _METHODS_DIR_NAMES["MINet_R_2020"] - ), - suffix=".png", - ), - "ECSSD": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[1], _METHODS_DIR_NAMES["MINet_R_2020"] - ), - suffix=".png", - ), - "HKU-IS": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[2], _METHODS_DIR_NAMES["MINet_R_2020"] - ), - suffix=".png", - ), - "DUT-OMRON": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[3], _METHODS_DIR_NAMES["MINet_R_2020"] - ), - suffix=".png", - ), - "DUTS-TE": dict( - path=os.path.join( - _RGBSOD_METHODS_ROOT, _RGBSOD_DATASET_NAMES[4], _METHODS_DIR_NAMES["MINet_R_2020"] - ), - suffix=".png", - ), - "SOC": None, -} - -curve_info = curve_info_generator() -methods_info_for_drawing = OrderedDict( - { - "DGRL_2018": curve_info(DGRL_2018, "DGRL_2018"), - "PAGRN_2018": curve_info(PAGRN_2018, "PAGRN_2018"), - "PiCANet_R_2018": curve_info(PiCANet_R_2018, "PiCANet_R_2018"), - "RAS_2018": curve_info(RAS_2018, "RAS_2018"), - "AFNet_2019": curve_info(AFNet_2019, "AFNet_2019"), - "BASNet_2019": curve_info(BASNet_2019, "BASNet_2019"), - "CPD_R_2019": curve_info(CPD_R_2019, "CPD_R_2019"), - "PoolNet_R_2019": curve_info(PoolNet_R_2019, "PoolNet_R_2019"), - "EGNet_R_2019": curve_info(EGNet_R_2019, "EGNet_R_2019"), - "HRS_D_2019": curve_info(HRS_D_2019, "HRS_D_2019"), - "ICNet_2019": curve_info(ICNet_2019, "ICNet_2019"), - "MLMSNet_2019": curve_info(MLMSNet_2019, "MLMSNet_2019"), - "PAGENet_2019": curve_info(PAGENet_2019, "PAGENet_2019"), - "SCRN_R_2019": curve_info(SCRN_R_2019, "SCRN_R_2019"), - "F3Net_R_2020": curve_info(F3Net_R_2020, "F3Net_R_2020"), - "R3Net_R_2020": curve_info(R3Net_R_2020, "R3Net_R_2020"), - "GCPANet_2020": curve_info(GCPANet_2020, "GCPANet_2020"), - "LDF_2020": curve_info(LDF_2020, "LDF_2020"), - "DFI_2020": curve_info(DFI_2020, "DFI_2020"), - "GateNet_2020": curve_info(GateNet_2020, "GateNet_2020"), - "ITSD_2020": curve_info(ITSD_2020, "ITSD_2020"), - "MINet_R_2020": curve_info(MINet_R_2020, "MINet_R_2020"), - } -) -simple_info = simple_info_generator() -methods_info_for_selecting = OrderedDict( - { - "DGRL_2018": simple_info(MINet_R_2020, "DGRL_2018"), - "PAGRN_2018": simple_info(MINet_R_2020, "PAGRN_2018"), - "PiCANet_R_2018": simple_info(MINet_R_2020, "PiCANet_R_2018"), - "RAS_2018": simple_info(MINet_R_2020, "RAS_2018"), - "AFNet_2019": simple_info(MINet_R_2020, "AFNet_2019"), - "BASNet_2019": simple_info(MINet_R_2020, "BASNet_2019"), - "CPD_R_2019": simple_info(MINet_R_2020, "CPD_R_2019"), - "PoolNet_R_2019": simple_info(MINet_R_2020, "PoolNet_R_2019"), - "EGNet_R_2019": simple_info(MINet_R_2020, "EGNet_R_2019"), - "HRS_D_2019": simple_info(MINet_R_2020, "HRS_D_2019"), - "ICNet_2019": simple_info(MINet_R_2020, "ICNet_2019"), - "MLMSNet_2019": simple_info(MINet_R_2020, "MLMSNet_2019"), - "PAGENet_2019": simple_info(MINet_R_2020, "PAGENet_2019"), - "SCRN_R_2019": simple_info(MINet_R_2020, "SCRN_R_2019"), - "F3Net_R_2020": simple_info(MINet_R_2020, "F3Net_R_2020"), - "R3Net_R_2020": simple_info(MINet_R_2020, "R3Net_R_2020"), - "GCPANet_2020": simple_info(MINet_R_2020, "GCPANet_2020"), - "LDF_2020": simple_info(MINet_R_2020, "LDF_2020"), - "DFI_2020": simple_info(MINet_R_2020, "DFI_2020"), - "GateNet_2020": simple_info(MINet_R_2020, "GateNet_2020"), - "ITSD_2020": simple_info(MINet_R_2020, "ITSD_2020"), - "MINet_R_2020": simple_info(MINet_R_2020, "MINet_R_2020"), - } -) diff --git a/configs/methods/rgbd_sod_methods.py b/configs/methods/rgbd_sod_methods.py deleted file mode 100644 index a81e58c..0000000 --- a/configs/methods/rgbd_sod_methods.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from collections import OrderedDict - -from configs.utils.config_generator import curve_info_generator, simple_info_generator - -HDFNet_VGG16_root = "/home/lart/Coding/HDFFile/output/HDFNet/HDFNet_VGG16" -HDFNet_VGG16 = { - "LFSD": dict(path=os.path.join(HDFNet_VGG16_root, "lfsd"), suffix=".png"), - "NJUD": dict(path=os.path.join(HDFNet_VGG16_root, "njud"), suffix=".png"), - "NLPR": dict(path=os.path.join(HDFNet_VGG16_root, "nlpr"), suffix=".png"), - "RGBD135": dict(path=os.path.join(HDFNet_VGG16_root, "rgbd135"), suffix=".png"), - "SIP": dict(path=os.path.join(HDFNet_VGG16_root, "sip"), suffix=".png"), - "SSD": dict(path=os.path.join(HDFNet_VGG16_root, "ssd"), suffix=".png"), - "STEREO797": dict(path=os.path.join(HDFNet_VGG16_root, "stereo797"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(HDFNet_VGG16_root, "stereo1000"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(HDFNet_VGG16_root, "dutrgbd"), suffix=".png"), -} - -HDFNet_VGG19_root = "/home/lart/Coding/HDFFile/output/HDFNet/HDFNet_VGG19" -HDFNet_VGG19 = { - "LFSD": dict(path=os.path.join(HDFNet_VGG19_root, "lfsd"), suffix=".png"), - "NJUD": dict(path=os.path.join(HDFNet_VGG19_root, "njud"), suffix=".png"), - "NLPR": dict(path=os.path.join(HDFNet_VGG19_root, "nlpr"), suffix=".png"), - "RGBD135": dict(path=os.path.join(HDFNet_VGG19_root, "rgbd135"), suffix=".png"), - "SIP": dict(path=os.path.join(HDFNet_VGG19_root, "sip"), suffix=".png"), - "SSD": dict(path=os.path.join(HDFNet_VGG19_root, "ssd"), suffix=".png"), - "STEREO797": dict(path=os.path.join(HDFNet_VGG19_root, "stereo797"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(HDFNet_VGG19_root, "stereo1000"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(HDFNet_VGG19_root, "dutrgbd"), suffix=".png"), -} - -HDFNet_Res50_root = "/home/lart/Coding/HDFFile/output/HDFNet/HDFNet_Res50" -HDFNet_Res50 = { - "LFSD": dict(path=os.path.join(HDFNet_Res50_root, "lfsd"), suffix=".png"), - "NJUD": dict(path=os.path.join(HDFNet_Res50_root, "njud"), suffix=".png"), - "NLPR": dict(path=os.path.join(HDFNet_Res50_root, "nlpr"), suffix=".png"), - "RGBD135": dict(path=os.path.join(HDFNet_Res50_root, "rgbd135"), suffix=".png"), - "SIP": dict(path=os.path.join(HDFNet_Res50_root, "sip"), suffix=".png"), - "SSD": dict(path=os.path.join(HDFNet_Res50_root, "ssd"), suffix=".png"), - "STEREO797": dict(path=os.path.join(HDFNet_Res50_root, "stereo797"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(HDFNet_Res50_root, "stereo1000"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(HDFNet_Res50_root, "dutrgbd"), suffix=".png"), -} - -JLDCF_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/CVPR2020_JL-DCF" -JLDCF = { - "LFSD": dict(path=os.path.join(JLDCF_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(JLDCF_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(JLDCF_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(JLDCF_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(JLDCF_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(JLDCF_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(JLDCF_root, "DUT-RGBD-testing"), suffix=".png"), -} - -CoNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ECCV-CoNet" -CoNet = { - "LFSD": dict(path=os.path.join(CoNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(CoNet_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(CoNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(CoNet_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(CoNet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(CoNet_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(CoNet_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(CoNet_root, "STERE1000"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CoNet_root, "DUT-RGBD"), suffix=".png"), -} - -BBSNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/ECCV2020_BBSNet" -BBSNet = { - "LFSD": dict(path=os.path.join(BBSNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(BBSNet_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(BBSNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(BBSNet_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(BBSNet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(BBSNet_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(BBSNet_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(BBSNet_root, "DUT"), suffix=".png"), -} - -CMWNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/ECCV2020_CMWNet" -CMWNet = { - "LFSD": dict(path=os.path.join(CMWNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(CMWNet_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(CMWNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(CMWNet_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(CMWNet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(CMWNet_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(CMWNet_root, "STEREO"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CMWNet_root, "DUT-RGBD"), suffix=".png"), -} - -FRDT_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ACMMM-FRDT" -FRDT = { - "LFSD": dict(path=os.path.join(FRDT_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(FRDT_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(FRDT_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(FRDT_root, "RGBD-135"), suffix=".png"), - "SIP": None, - "SSD": dict(path=os.path.join(FRDT_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(FRDT_root, "STEREO"), suffix=".png"), - "STEREO1000": None, - "DUTRGBD": dict(path=os.path.join(FRDT_root, "DUT"), suffix=".png"), -} - -S2MA_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-CVPR-S2MA" -S2MA = { - "LFSD": dict(path=os.path.join(S2MA_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(S2MA_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(S2MA_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(S2MA_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(S2MA_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(S2MA_root, "SSD100"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(S2MA_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(S2MA_root, "DUT-RGBD"), suffix=".png"), -} - -UCNet_root = ( - "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-CVPR-UCNet_Res50/CVPR-UCNet_R50" -) -UCNet = { - "LFSD": dict(path=os.path.join(UCNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(UCNet_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(UCNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(UCNet_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(UCNet_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(UCNet_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(UCNet_root, "DUT"), suffix=".png"), -} - -UCNet_ABP_root = ( - "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-CVPR-UCNet_Res50/TPAMI_UCNet_R50_ABP" -) -UCNet_ABP = { - "LFSD": dict(path=os.path.join(UCNet_ABP_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(UCNet_ABP_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(UCNet_ABP_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(UCNet_ABP_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(UCNet_ABP_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(UCNet_ABP_root, "STERE"), suffix=".png"), - "DUTRGBD": None, -} - -UCNet_CVAE_root = ( - "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-CVPR-UCNet_Res50/TPAMI_UCNet_R50_CVAE" -) -UCNet_CVAE = { - "LFSD": dict(path=os.path.join(UCNet_CVAE_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(UCNet_CVAE_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(UCNet_CVAE_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(UCNet_CVAE_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(UCNet_CVAE_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(UCNet_CVAE_root, "STERE"), suffix=".png"), - "DUTRGBD": None, -} - -CasGNN_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ECCV-CasGNN" -CasGNN = { - "LFSD": dict(path=os.path.join(CasGNN_root, "LFSD", "pred"), suffix=".png"), - "NJUD": dict(path=os.path.join(CasGNN_root, "NJUD", "pred"), suffix=".png"), - "NLPR": dict(path=os.path.join(CasGNN_root, "NLPR", "pred"), suffix=".png"), - "RGBD135": dict(path=os.path.join(CasGNN_root, "DES", "pred"), suffix=".png"), - "SIP": None, - "SSD": dict(path=os.path.join(CasGNN_root, "SSD", "pred"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(CasGNN_root, "STERE", "pred"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CasGNN_root, "DUT-RGBD", "pred"), suffix=".png"), -} - -DANet_VGG16_root = ( - "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ECCV-DANet_VGG/DANet_vgg16" -) -DANet_VGG16 = { - "LFSD": dict(path=os.path.join(DANet_VGG16_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DANet_VGG16_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DANet_VGG16_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DANet_VGG16_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DANet_VGG16_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DANet_VGG16_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(DANet_VGG16_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DANet_VGG16_root, "DUT-RGBD"), suffix=".png"), -} - -DANet_VGG19_root = ( - "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ECCV-DANet_VGG/DANet_vgg19" -) -DANet_VGG19 = { - "LFSD": dict(path=os.path.join(DANet_VGG19_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DANet_VGG19_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DANet_VGG19_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DANet_VGG19_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DANet_VGG19_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DANet_VGG19_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(DANet_VGG19_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DANet_VGG19_root, "DUT-RGBD"), suffix=".png"), -} - -PGAR_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-ECCV-PGAR" -PGAR = { - "LFSD": dict(path=os.path.join(PGAR_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(PGAR_root, "NJUD_test"), suffix=".png"), - "NLPR": dict(path=os.path.join(PGAR_root, "NLPR_test"), suffix=".png"), - "RGBD135": dict(path=os.path.join(PGAR_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(PGAR_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(PGAR_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(PGAR_root, "DUT-RGBD"), suffix=".png"), -} - -DisenFuse_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-TIP-DisenFuse_VGG16" -DisenFuse = { - "LFSD": dict(path=os.path.join(DisenFuse_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DisenFuse_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DisenFuse_root, "NLPR"), suffix=".jpg"), - "RGBD135": dict(path=os.path.join(DisenFuse_root, "DES"), suffix=".bmp"), - "SIP": dict(path=os.path.join(DisenFuse_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(DisenFuse_root, "STEREO1000"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DisenFuse_root, "DUT"), suffix=".png"), -} - -DPANet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-TIP-DPANet" -DPANet = { - "LFSD": dict(path=os.path.join(DPANet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DPANet_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DPANet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DPANet_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DPANet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DPANet_root, "SSD100"), suffix=".png"), - "STEREO797": dict(path=os.path.join(DPANet_root, "STEREO797"), suffix=".png"), - "STEREO1000": None, - "DUTRGBD": dict(path=os.path.join(DPANet_root, "DUT"), suffix=".png"), -} - -ICNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-TIP-ICNet" -ICNet = { - "LFSD": dict(path=os.path.join(ICNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(ICNet_root, "NJU2K"), suffix=".png"), - "NLPR": dict(path=os.path.join(ICNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(ICNet_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(ICNet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(ICNet_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(ICNet_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(ICNet_root, "DUT-RGBD"), suffix=".png"), -} - -D3Net_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2020-TNNLS-D3Net" -D3Net = { - "LFSD": dict(path=os.path.join(D3Net_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(D3Net_root, "NJU2K_TEST"), suffix=".png"), - "NLPR": dict(path=os.path.join(D3Net_root, "NLPR_TEST"), suffix=".png"), - "RGBD135": dict(path=os.path.join(D3Net_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(D3Net_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(D3Net_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(D3Net_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(D3Net_root, "DUT-RGBD_TEST"), suffix=".png"), -} - -RD3D_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/2021-AAAI-RD3D" -RD3D = { - "LFSD": dict(path=os.path.join(RD3D_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(RD3D_root, "NJU2000"), suffix=".png"), - "NLPR": dict(path=os.path.join(RD3D_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(RD3D_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(RD3D_root, "SIP"), suffix=".png"), - "SSD": None, - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(RD3D_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(RD3D_root, "DUT"), suffix=".png"), -} - -AFNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/AFNet" -AFNet = { - "LFSD": dict(path=os.path.join(AFNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(AFNet_root, "NJU2K-TEST"), suffix=".png"), - "NLPR": dict(path=os.path.join(AFNet_root, "NLPR-TEST"), suffix=".png"), - "RGBD135": dict(path=os.path.join(AFNet_root, "DES"), suffix=".png"), - "SIP": dict(path=os.path.join(AFNet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(AFNet_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(AFNet_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(AFNet_root, "STERE"), suffix=".png"), - "DUTRGBD": None, -} - -CDCP_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/CDCP" -CDCP = { - "LFSD": dict(path=os.path.join(CDCP_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(CDCP_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(CDCP_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(CDCP_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(CDCP_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(CDCP_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(CDCP_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CDCP_root, "DUT-RGBD"), suffix=".png"), -} - -CPFP_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/CPFP" -CPFP = { - "LFSD": dict(path=os.path.join(CPFP_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(CPFP_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(CPFP_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(CPFP_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(CPFP_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(CPFP_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(CPFP_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CPFP_root, "DUT-RGBD"), suffix=".png"), -} - -CTMF_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/CTMF" -CTMF = { - "LFSD": dict(path=os.path.join(CTMF_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(CTMF_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(CTMF_root, "NLPR"), suffix=".jpg"), - "RGBD135": dict(path=os.path.join(CTMF_root, "RGBD135"), suffix=".bmp"), - "SIP": dict(path=os.path.join(CTMF_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(CTMF_root, "SSD"), suffix=".png"), - "STEREO797": None, - "STEREO1000": dict(path=os.path.join(CTMF_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(CTMF_root, "DUT-RGBD"), suffix=".png"), -} - -DCMC_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/DCMC" -DCMC = { - "LFSD": dict(path=os.path.join(DCMC_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DCMC_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DCMC_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DCMC_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DCMC_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DCMC_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(DCMC_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(DCMC_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DCMC_root, "DUT-RGBD"), suffix=".png"), -} - -DES_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/DES" -DES = { - "LFSD": dict(path=os.path.join(DES_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DES_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DES_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DES_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DES_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DES_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(DES_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(DES_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DES_root, "DUT-RGBD"), suffix=".png"), -} - -DF_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/DF" -DF = { - "LFSD": dict(path=os.path.join(DF_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DF_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DF_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DF_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DF_root, "SIP/SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(DF_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(DF_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(DF_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DF_root, "DUT-RGBD"), suffix=".png"), -} - -DMRA_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/DMRA" -DMRA = { - "LFSD": dict(path=os.path.join(DMRA_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(DMRA_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(DMRA_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(DMRA_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(DMRA_root, "SIP_FromAuthor"), suffix=".png"), - "SSD": dict(path=os.path.join(DMRA_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(DMRA_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(DMRA_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(DMRA_root, "DUT-RGBD"), suffix=".png"), -} - -MB_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/MB" -MB = { - "LFSD": dict(path=os.path.join(MB_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(MB_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(MB_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(MB_root, "RGBD135"), suffix=".png"), - "SIP": None, - "SSD": dict(path=os.path.join(MB_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(MB_root, "STEREO"), suffix=".png"), - "STEREO1000": None, - "DUTRGBD": dict(path=os.path.join(MB_root, "DUT-RGBD"), suffix=".png"), -} - -MMCI_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/MMCI" -MMCI = { - "LFSD": dict(path=os.path.join(MMCI_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(MMCI_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(MMCI_root, "NLPR"), suffix=".jpg"), - "RGBD135": dict(path=os.path.join(MMCI_root, "RGBD135"), suffix=".bmp"), - "SIP": dict(path=os.path.join(MMCI_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(MMCI_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(MMCI_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(MMCI_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(MMCI_root, "DUT-RGBD"), suffix=".png"), -} - -NLPR_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/NLPR" -NLPR = { - "LFSD": dict(path=os.path.join(NLPR_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(NLPR_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(NLPR_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(NLPR_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(NLPR_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(NLPR_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(NLPR_root, "STEREO-797"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(NLPR_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(NLPR_root, "DUT-RGBD"), suffix=".png"), -} - -PCANet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/PCANet" -PCANet = { - "LFSD": dict(path=os.path.join(PCANet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(PCANet_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(PCANet_root, "NLPR"), suffix=".jpg"), - "RGBD135": dict(path=os.path.join(PCANet_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(PCANet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(PCANet_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(PCANet_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(PCANet_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(PCANet_root, "DUT-RGBD"), suffix=".png"), -} - -# 当前数据有问题暂时不测 -PDNet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/PDNet" -PDNet = { - "LFSD": dict(path=os.path.join(PDNet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(PDNet_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(PDNet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(PDNet_root, "RGBD135"), suffix=".png"), - "SIP": None, - "SSD": dict(path=os.path.join(PDNet_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(PDNet_root, "STEREO"), suffix=".png"), - "STEREO1000": None, - "DUTRGBD": dict(path=os.path.join(PDNet_root, "DUT-RGBD"), suffix=".png"), -} - -TANet_root = "/home/lart/Datasets/Saliency/PaperResults/RGBDSOD/TANet" -TANet = { - "LFSD": dict(path=os.path.join(TANet_root, "LFSD"), suffix=".png"), - "NJUD": dict(path=os.path.join(TANet_root, "NJUD"), suffix=".png"), - "NLPR": dict(path=os.path.join(TANet_root, "NLPR"), suffix=".png"), - "RGBD135": dict(path=os.path.join(TANet_root, "RGBD135"), suffix=".png"), - "SIP": dict(path=os.path.join(TANet_root, "SIP"), suffix=".png"), - "SSD": dict(path=os.path.join(TANet_root, "SSD"), suffix=".png"), - "STEREO797": dict(path=os.path.join(TANet_root, "STEREO"), suffix=".png"), - "STEREO1000": dict(path=os.path.join(TANet_root, "STERE"), suffix=".png"), - "DUTRGBD": dict(path=os.path.join(TANet_root, "DUT-RGBD"), suffix=".png"), -} - -curve_info = curve_info_generator() -methods_info_for_drawing = OrderedDict( - { - "HDFNet_VGG16": curve_info(HDFNet_VGG16, "HDFNet_VGG16"), - "HDFNet_VGG19": curve_info(HDFNet_VGG19, "HDFNet_VGG19"), - "HDFNet_Res50": curve_info(HDFNet_Res50, "HDFNet_Res50"), - "JLDCF": curve_info(JLDCF, "JLDCF"), - "CoNet": curve_info(CoNet, "CoNet"), - "BBSNet": curve_info(BBSNet, "BBSNet"), - "CMWNet": curve_info(CMWNet, "CMWNet"), - "FRDT": curve_info(FRDT, "FRDT"), - "S2MA": curve_info(S2MA, "S2MA"), - "UCNet": curve_info(UCNet, "UCNet"), - "UCNet_ABP": curve_info(UCNet_ABP, "UCNet_ABP"), - "UCNet_CVAE": curve_info(UCNet_CVAE, "UCNet_CVAE"), - "CasGNN": curve_info(CasGNN, "CasGNN"), - "DANet_VGG16": curve_info(DANet_VGG16, "DANet_VGG16"), - "DANet_VGG19": curve_info(DANet_VGG19, "DANet_VGG19"), - "PGAR": curve_info(PGAR, "PGAR"), - "DisenFuse": curve_info(DisenFuse, "DisenFuse"), - "DPANet": curve_info(DPANet, "DPANet"), - "ICNet": curve_info(ICNet, "ICNet"), - "D3Net": curve_info(D3Net, "D3Net"), - "RD3D": curve_info(RD3D, "RD3D"), - "AFNet": curve_info(AFNet, "AFNet"), - "CDCP": curve_info(CDCP, "CDCP"), - "CTMF": curve_info(CTMF, "CTMF"), - "DCMC": curve_info(DCMC, "DCMC"), - "DES": curve_info(DES, "DES"), - "DF": curve_info(DF, "DF"), - "DMRA": curve_info(DMRA, "DMRA"), - "MB": curve_info(MB, "MB"), - "MMCI": curve_info(MMCI, "MMCI"), - "NLPR": curve_info(NLPR, "NLPR"), - "PCANet": curve_info(PCANet, "PCANet"), - "TANet": curve_info(TANet, "TANet"), - } -) - -simple_info = simple_info_generator() -methods_info_for_selecting = OrderedDict( - { - "HDFNet_VGG16": simple_info(HDFNet_VGG16, "HDFNet_VGG16"), - "HDFNet_VGG19": simple_info(HDFNet_VGG19, "HDFNet_VGG19"), - "HDFNet_Res50": simple_info(HDFNet_Res50, "HDFNet_Res50"), - "JLDCF": simple_info(JLDCF, "JLDCF"), - "CoNet": simple_info(CoNet, "CoNet"), - "BBSNet": simple_info(BBSNet, "BBSNet"), - "CMWNet": simple_info(CMWNet, "CMWNet"), - "FRDT": simple_info(FRDT, "FRDT"), - "S2MA": simple_info(S2MA, "S2MA"), - "UCNet": simple_info(UCNet, "UCNet"), - "UCNet_ABP": simple_info(UCNet_ABP, "UCNet_ABP"), - "UCNet_CVAE": simple_info(UCNet_CVAE, "UCNet_CVAE"), - "CasGNN": simple_info(CasGNN, "CasGNN"), - "DANet_VGG16": simple_info(DANet_VGG16, "DANet_VGG16"), - "DANet_VGG19": simple_info(DANet_VGG19, "DANet_VGG19"), - "PGAR": simple_info(PGAR, "PGAR"), - "DisenFuse": simple_info(DisenFuse, "DisenFuse"), - "DPANet": simple_info(DPANet, "DPANet"), - "ICNet": simple_info(ICNet, "ICNet"), - "D3Net": simple_info(D3Net, "D3Net"), - "RD3D": simple_info(RD3D, "RD3D"), - "AFNet": simple_info(AFNet, "AFNet"), - "CDCP": simple_info(CDCP, "CDCP"), - "CTMF": simple_info(CTMF, "CTMF"), - "DCMC": simple_info(DCMC, "DCMC"), - "DES": simple_info(DES, "DES"), - "DF": simple_info(DF, "DF"), - "DMRA": simple_info(DMRA, "DMRA"), - "MB": simple_info(MB, "MB"), - "MMCI": simple_info(MMCI, "MMCI"), - "NLPR": simple_info(NLPR, "NLPR"), - "PCANet": simple_info(PCANet, "PCANet"), - "TANet": simple_info(TANet, "TANet"), - } -) diff --git a/eval_cosod_all_methods.py b/eval_cosod_all_methods.py index e40e4f3..5cda0bb 100755 --- a/eval_cosod_all_methods.py +++ b/eval_cosod_all_methods.py @@ -1,20 +1,8 @@ # -*- coding: utf-8 -*- -import math import os -from collections import defaultdict -from pprint import pprint - -import numpy as np -from tqdm import tqdm from configs import total_info -from utils.misc import get_gt_pre_with_name, get_name_with_group_list, make_dir -from utils.recorders import ( - CurveDrawer, - MetricExcelRecorder, - MetricRecorder, - TxtRecorder, -) +from metrics.sod import cal_cosod_matrics, draw_curves """ Include: Fm Curve/PR Curves/MAE/(max/mean/weighted) Fmeasure/Smeasure/Emeasure @@ -24,255 +12,85 @@ But it needs to have uniform naming rules for `pre` and `gt`. """ +for_pr = True # 是否绘制pr曲线 + +# 当前支持rgb_cosod +data_type = "rgb_cosod" +data_info = total_info[data_type] + +# 存放输出文件的文件夹 +output_path = "./output" + +# 针对多个模型评估比较的设置 +dataset_info = data_info["dataset"] +# 包含所有待比较模型结果的信息和绘图配置的字典 +drawing_info = data_info["method"]["drawing"] + +# 用来保存测试结果的文件的路径 +txt_path = os.path.join(output_path, f"{data_type}.txt") +xlsx_path = os.path.join(output_path, f"{data_type}.xlsx") + +# 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 +save_npy = True +# 保存曲线指标数据的文件路径 +curves_npy_path = os.path.join(output_path, data_type + "_" + "curves.npy") +metrics_npy_path = os.path.join(output_path, data_type + "_" + "metrics.npy") + +row_num = 2 + +# 不同曲线的绘图配置 +axes_setting = { + # pr曲线的配置 + "pr": { + # 横坐标标签 + "x_label": "Recall", + # 纵坐标标签 + "y_label": "Precision", + # 横坐标显示范围 + "x_lim": (0.1, 1), + # 纵坐标显示范围 + "y_lim": (0.1, 1), + }, + # fm曲线的配置 + "fm": { + # 横坐标标签 + "x_label": "Threshold", + # 纵坐标标签 + "y_label": r"F$_{\beta}$", + # 横坐标显示范围 + "x_lim": (0, 1), + # 纵坐标显示范围 + "y_lim": (0, 0.9), + }, +} +# 评估结果保留的小数点后数据的位数 +num_bits = 3 + +# 是否保留之前的评估记录(针对record_path文件有效) +resume_record = True + +# 在计算指标的时候要跳过的数据集 +skipped_datasets = [] + +cal_cosod_matrics( + data_type=data_type, + txt_path=txt_path, + resume_record=resume_record, + xlsx_path=xlsx_path, + drawing_info=drawing_info, + dataset_info=dataset_info, + skipped_datasets=skipped_datasets, + save_npy=save_npy, + curves_npy_path=curves_npy_path, + metrics_npy_path=metrics_npy_path, + num_bits=num_bits, +) -def group_names(names: list) -> dict: - grouped_name_list = defaultdict(list) - for name in names: - group_name, file_name = name.split("/") - grouped_name_list[group_name].append(file_name) - return grouped_name_list - - -def mean_all_group_metrics(group_metric_recorder: dict): - recorder = defaultdict(list) - for group_name, metrics in group_metric_recorder.items(): - for metric_name, metric_array in metrics.items(): - recorder[metric_name].append(metric_array) - results = {k: np.mean(np.vstack(v), axis=0) for k, v in recorder.items()} - return results - - -def cal_all_metrics(): - """ - Save the results of all models on different datasets in a `npy` file in the form of a - dictionary. - { - dataset1:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], - ..... - }, - dataset2:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], - ..... - }, - .... - } - """ - qualitative_results = defaultdict(dict) # Two curve metrics - quantitative_results = defaultdict(dict) # Six numerical metrics - - txt_recoder = TxtRecorder( - txt_path=cfg["record_path"], - resume=cfg["resume_record"], - max_method_name_width=max([len(x) for x in cfg["drawing_info"].keys()]), # 显示完整名字 - # max_method_name_width=10, # 指定长度 - ) - excel_recorder = MetricExcelRecorder( - xlsx_path=cfg["xlsx_path"], - sheet_name=data_type, - row_header=["methods"], - dataset_names=sorted(list(cfg["dataset_info"].keys())), - metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], - ) - - for dataset_name, dataset_path in cfg["dataset_info"].items(): - if dataset_name in cfg["skipped_names"]: - print(f" ++>> {dataset_name} will be skipped.") - continue - - txt_recoder.add_row(row_name="Dataset", row_data=dataset_name, row_start_str="\n") - - # 获取真值图片信息 - gt_info = dataset_path["mask"] - gt_root = gt_info["path"] - gt_ext = gt_info["suffix"] - # 真值名字列表 - gt_index_file = dataset_path.get("index_file") - if gt_index_file: - gt_name_list = get_name_with_group_list(data_path=gt_index_file, file_ext=gt_ext) - else: - gt_name_list = get_name_with_group_list(data_path=gt_root, file_ext=gt_ext) - assert len(gt_name_list) > 0, "there is not ground truth." - - # ==>> test the intersection between pre and gt for each method <<== - for method_name, method_info in cfg["drawing_info"].items(): - method_root = method_info["path_dict"] - method_dataset_info = method_root.get(dataset_name, None) - if method_dataset_info is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - # 预测结果存放路径下的图片文件名字列表和扩展名称 - pre_ext = method_dataset_info["suffix"] - pre_root = method_dataset_info["path"] - pre_name_list = get_name_with_group_list(data_path=pre_root, file_ext=pre_ext) - - # get the intersection - eval_name_list = sorted(list(set(gt_name_list).intersection(set(pre_name_list)))) - if len(eval_name_list) == 0: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - grouped_name_list = group_names(names=eval_name_list) - print( - f" ==>> It is evaluating {method_name} with" - f" {len(eval_name_list)} images and {len(grouped_name_list)} groups" - f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images <<== " - ) - - total_metric_recorder = {} - inter_group_bar = tqdm( - grouped_name_list.items(), total=len(grouped_name_list), leave=False, ncols=119 - ) - for group_name, names_in_group in inter_group_bar: - inter_group_bar.set_description(f"({dataset_name}) group => {group_name}") - - metric_recoder = MetricRecorder() - intra_group_bar = tqdm( - names_in_group, total=len(names_in_group), leave=False, ncols=119 - ) - for img_name in intra_group_bar: - intra_group_bar.set_description(f"processing => {img_name}") - img_name = "/".join([group_name, img_name]) - gt, pre = get_gt_pre_with_name( - gt_root=gt_root, - pre_root=pre_root, - img_name=img_name, - pre_ext=pre_ext, - gt_ext=gt_ext, - to_normalize=False, - ) - metric_recoder.update(pre=pre, gt=gt) - total_metric_recorder[group_name] = metric_recoder.show(bit_num=None) - # 保留原始数据每组的结果 - all_results = mean_all_group_metrics(group_metric_recorder=total_metric_recorder) - all_results["meanFm"] = all_results["fm"].mean() - all_results["maxFm"] = all_results["fm"].max() - all_results["meanEm"] = all_results["em"].mean() - all_results["maxEm"] = all_results["em"].max() - all_results = {k: v.round(cfg["bit_num"]) for k, v in all_results.items()} - - method_curve = { - "prs": (np.flip(all_results["p"]), np.flip(all_results["r"])), - "fm": np.flip(all_results["fm"]), - "em": np.flip(all_results["em"]), - } - method_metric = { - "maxF": all_results["maxFm"], - "avgF": all_results["meanFm"], - "adpF": all_results["adpFm"].tolist(), - "maxE": all_results["maxEm"], - "avgE": all_results["meanEm"], - "adpE": all_results["adpEm"].tolist(), - "wFm": all_results["wFm"].tolist(), - "MAE": all_results["MAE"].tolist(), - "SM": all_results["Sm"].tolist(), - } - qualitative_results[dataset_name][method_name] = method_curve - quantitative_results[dataset_name][method_name] = method_metric - - excel_recorder( - row_data=method_metric, dataset_name=dataset_name, method_name=method_name - ) - txt_recoder(method_results=method_metric, method_name=method_name) - - if cfg["save_npy"]: - np.save(cfg["qualitative_npy_path"], qualitative_results) - np.save(cfg["quantitative_npy_path"], quantitative_results) - print( - f" ==>> all methods have been saved in {cfg['qualitative_npy_path']} and " - f"{cfg['quantitative_npy_path']} <<== " - ) - - print(f" ==>> all methods have been tested:") - pprint(quantitative_results, indent=2, width=119) - - -def draw_pr_fm_curve(for_pr: bool = True): - mode = "pr" if for_pr else "fm" - mode_axes_setting = cfg["axes_setting"][mode] - - x_label, y_label = mode_axes_setting["x_label"], mode_axes_setting["y_label"] - x_lim, y_lim = mode_axes_setting["x_lim"], mode_axes_setting["y_lim"] - - qualitative_results = np.load( - os.path.join(cfg["qualitative_npy_path"]), allow_pickle=True - ).item() - - row_num = 2 - curve_drawer = CurveDrawer( - row_num=row_num, col_num=math.ceil(len(cfg["dataset_info"].keys()) / row_num) - ) - - for idx, dataset_name in enumerate(cfg["dataset_info"].keys()): - # 与cfg[dataset_info]中的key保持一致 - dataset_results = qualitative_results[dataset_name] - for method_name, method_info in cfg["drawing_info"].items(): - # 与cfg[drawing_info]中的key保持一致 - method_results = dataset_results.get(method_name, None) - if method_results is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - if mode == "pr": - assert isinstance(method_results["prs"], (list, tuple)) - y_data, x_data = method_results["prs"] - else: - y_data, x_data = method_results["fm"], np.linspace(1, 0, 256) - - curve_drawer.draw_method_curve( - curr_idx=idx, - dataset_name=dataset_name.upper(), - method_curve_setting=method_info["curve_setting"], - x_label=x_label, - y_label=y_label, - x_data=x_data, - y_data=y_data, - x_lim=x_lim, - y_lim=y_lim, - ) - curve_drawer.show() - - -if __name__ == "__main__": - data_type = "rgb_cosod" - data_info = total_info[data_type] - output_path = "./output" # 存放输出文件的文件夹 - - cfg = { # 针对多个模型评估比较的设置 - "dataset_info": data_info["dataset"], - "drawing_info": data_info["method"]["drawing"], # 包含所有待比较模型结果的信息和绘图配置的字典 - "record_path": os.path.join(output_path, f"{data_type}.txt"), # 用来保存测试结果的文件的路径 - "xlsx_path": os.path.join(output_path, f"{data_type}.xlsx"), - "save_npy": True, # 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 - # 保存曲线指标数据的文件路径 - "qualitative_npy_path": os.path.join( - output_path, data_type + "_" + "qualitative_results.npy" - ), - "quantitative_npy_path": os.path.join( - output_path, data_type + "_" + "quantitative_results.npy" - ), - "axes_setting": { # 不同曲线的绘图配置 - "pr": { # pr曲线的配置 - "x_label": "Recall", # 横坐标标签 - "y_label": "Precision", # 纵坐标标签 - "x_lim": (0.1, 1), # 横坐标显示范围 - "y_lim": (0.1, 1), # 纵坐标显示范围 - }, - "fm": { # fm曲线的配置 - "x_label": "Threshold", # 横坐标标签 - "y_label": r"F$_{\beta}$", # 纵坐标标签 - "x_lim": (0, 1), # 横坐标显示范围 - "y_lim": (0, 0.9), # 纵坐标显示范围 - }, - }, - "bit_num": 3, # 评估结果保留的小数点后数据的位数 - "resume_record": True, # 是否保留之前的评估记录(针对record_path文件有效) - "skipped_names": [], - } - - make_dir(output_path) - cal_all_metrics() - # draw_pr_fm_curve(for_pr=True) +draw_curves( + for_pr=True, + axes_setting=axes_setting, + curves_npy_path=curves_npy_path, + row_num=row_num, + drawing_info=drawing_info, + dataset_info=dataset_info, +) diff --git a/eval_sod_all_methods.py b/eval_sod_all_methods.py index 1ffa560..ead9869 100755 --- a/eval_sod_all_methods.py +++ b/eval_sod_all_methods.py @@ -1,20 +1,8 @@ # -*- coding: utf-8 -*- -import math import os -from collections import defaultdict - -import numpy as np -from tqdm import tqdm from configs import total_info -from utils.misc import get_gt_pre_with_name, get_name_list, make_dir -from utils.print_formatter import print_formatter -from utils.recorders import ( - CurveDrawer, - MetricExcelRecorder, - MetricRecorder, - TxtRecorder, -) +from metrics.sod import cal_sod_matrics, draw_curves """ Include: Fm Curve/PR Curves/MAE/(max/mean/weighted) Fmeasure/Smeasure/Emeasure @@ -24,230 +12,84 @@ But it needs to have uniform naming rules for `pre` and `gt`. """ +for_pr = True # 绘制pr曲线还是fm曲线 + +# 当前支持rgb_cod, rgb_sod, rgbd_sod +data_type = "rgb_cod" +data_info = total_info[data_type] + +# 存放输出文件的文件夹 +output_path = "./output" + +# 包含所有待比较模型结果的信息和绘图配置的字典 +dataset_info = data_info["dataset"] +drawing_info = data_info["method"]["drawing"] + +# 用来保存测试结果的文件的路径 +txt_path = os.path.join(output_path, f"{data_type}.txt") +xlsx_path = os.path.join(output_path, f"{data_type}.xlsx") + +# 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 +save_npy = True +# 保存曲线指标数据的文件路径 +curves_npy_path = os.path.join(output_path, data_type + "_" + "curves.npy") +metrics_npy_path = os.path.join(output_path, data_type + "_" + "metrics.npy") + +row_num = 1 + +# 不同曲线的绘图配置 +axes_setting = { + # pr曲线的配置 + "pr": { + # 横坐标标签 + "x_label": "Recall", + # 纵坐标标签 + "y_label": "Precision", + # 横坐标显示范围 + "x_lim": (0.1, 1), + # 纵坐标显示范围 + "y_lim": (0.1, 1), + }, + # fm曲线的配置 + "fm": { + # 横坐标标签 + "x_label": "Threshold", + # 纵坐标标签 + "y_label": r"F$_{\beta}$", + # 横坐标显示范围 + "x_lim": (0, 1), + # 纵坐标显示范围 + "y_lim": (0, 0.9), + }, +} +# 评估结果保留的小数点后数据的位数 +num_bits = 3 + +# 是否保留之前的评估记录(针对txt_path文件有效) +resume_record = True + +# 在计算指标的时候要跳过的数据集 +skipped_datasets = ["COD10K"] + +cal_sod_matrics( + data_type=data_type, + txt_path=txt_path, + resume_record=resume_record, + xlsx_path=xlsx_path, + drawing_info=drawing_info, + dataset_info=dataset_info, + skipped_datasets=skipped_datasets, + save_npy=save_npy, + curves_npy_path=curves_npy_path, + metrics_npy_path=metrics_npy_path, + num_bits=num_bits, +) -def cal_all_metrics(): - """ - Save the results of all models on different datasets in a `npy` file in the form of a - dictionary. - { - dataset1:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], - ..... - }, - dataset2:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], - ..... - }, - .... - } - """ - qualitative_results = defaultdict(dict) # Two curve metrics - quantitative_results = defaultdict(dict) # Six numerical metrics - - txt_recoder = TxtRecorder( - txt_path=cfg["record_path"], - resume=cfg["resume_record"], - max_method_name_width=max([len(x) for x in cfg["drawing_info"].keys()]), # 显示完整名字 - # max_method_name_width=10, # 指定长度 - ) - excel_recorder = MetricExcelRecorder( - xlsx_path=cfg["xlsx_path"], - sheet_name=data_type, - row_header=["methods"], - dataset_names=sorted(list(cfg["dataset_info"].keys())), - metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], - ) - - for dataset_name, dataset_path in cfg["dataset_info"].items(): - if dataset_name in cfg["skipped_datasets"]: - print(f" ++>> {dataset_name} will be skipped.") - continue - - txt_recoder.add_row(row_name="Dataset", row_data=dataset_name, row_start_str="\n") - - # 获取真值图片信息 - gt_info = dataset_path["mask"] - gt_root = gt_info["path"] - gt_ext = gt_info["suffix"] - # 真值名字列表 - gt_index_file = dataset_path.get("index_file") - if gt_index_file: - gt_name_list = get_name_list(data_path=gt_index_file, file_ext=gt_ext) - else: - gt_name_list = get_name_list(data_path=gt_root, file_ext=gt_ext) - assert len(gt_name_list) > 0, "there is not ground truth." - - # ==>> test the intersection between pre and gt for each method <<== - for method_name, method_info in cfg["drawing_info"].items(): - method_root = method_info["path_dict"] - method_dataset_info = method_root.get(dataset_name, None) - if method_dataset_info is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - # 预测结果存放路径下的图片文件名字列表和扩展名称 - pre_ext = method_dataset_info["suffix"] - pre_root = method_dataset_info["path"] - pre_name_list = get_name_list(data_path=pre_root, file_ext=pre_ext) - - # get the intersection - eval_name_list = sorted(list(set(gt_name_list).intersection(set(pre_name_list)))) - if len(eval_name_list) == 0: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - print( - f" ==>> It is evaluating {method_name} with {len(eval_name_list)} images" - f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images on dataset {dataset_name} <<== " - ) - - metric_recoder = MetricRecorder() - tqdm_bar = tqdm( - eval_name_list, - total=len(eval_name_list), - leave=False, - ncols=119, - desc=f"({dataset_name})", - ) - for img_name in tqdm_bar: - # tqdm_bar.set_description(f"({dataset_name})") - gt, pre = get_gt_pre_with_name( - gt_root=gt_root, - pre_root=pre_root, - img_name=img_name, - pre_ext=pre_ext, - gt_ext=gt_ext, - to_normalize=False, - ) - metric_recoder.update(pre=pre, gt=gt) - all_results = metric_recoder.show(bit_num=None) # 保留原始数据 - all_results["meanFm"] = all_results["fm"].mean() - all_results["maxFm"] = all_results["fm"].max() - all_results["meanEm"] = all_results["em"].mean() - all_results["maxEm"] = all_results["em"].max() - all_results = {k: v.round(cfg["bit_num"]) for k, v in all_results.items()} - - method_curve = { - "prs": (np.flip(all_results["p"]), np.flip(all_results["r"])), - "fm": np.flip(all_results["fm"]), - "em": np.flip(all_results["em"]), - } - method_metric = { - "maxF": all_results["maxFm"].item(), - "avgF": all_results["meanFm"].item(), - "adpF": all_results["adpFm"].item(), - "maxE": all_results["maxEm"].item(), - "avgE": all_results["meanEm"].item(), - "adpE": all_results["adpEm"].item(), - "wFm": all_results["wFm"].item(), - "MAE": all_results["MAE"].item(), - "SM": all_results["Sm"].item(), - } - qualitative_results[dataset_name][method_name] = method_curve - quantitative_results[dataset_name][method_name] = method_metric - - excel_recorder( - row_data=method_metric, dataset_name=dataset_name, method_name=method_name - ) - txt_recoder(method_results=method_metric, method_name=method_name) - - if cfg["save_npy"]: - np.save(cfg["qualitative_npy_path"], qualitative_results) - np.save(cfg["quantitative_npy_path"], quantitative_results) - print( - f" ==>> all methods have been saved in {cfg['qualitative_npy_path']} and " - f"{cfg['quantitative_npy_path']} <<== " - ) - - formatted_string = print_formatter(quantitative_results) - print(f" ==>> all methods have been tested:\n{formatted_string}") - - -def draw_pr_fm_curve(for_pr: bool = True): - mode = "pr" if for_pr else "fm" - mode_axes_setting = cfg["axes_setting"][mode] - - x_label, y_label = mode_axes_setting["x_label"], mode_axes_setting["y_label"] - x_lim, y_lim = mode_axes_setting["x_lim"], mode_axes_setting["y_lim"] - - qualitative_results = np.load( - os.path.join(cfg["qualitative_npy_path"]), allow_pickle=True - ).item() - - row_num = 2 - curve_drawer = CurveDrawer( - row_num=row_num, col_num=math.ceil(len(cfg["dataset_info"].keys()) / row_num) - ) - - for idx, dataset_name in enumerate(cfg["dataset_info"].keys()): - # 与cfg[dataset_info]中的key保持一致 - dataset_results = qualitative_results[dataset_name] - for method_name, method_info in cfg["drawing_info"].items(): - # 与cfg[drawing_info]中的key保持一致 - method_results = dataset_results.get(method_name, None) - if method_results is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue - - if mode == "pr": - assert isinstance(method_results["prs"], (list, tuple)) - y_data, x_data = method_results["prs"] - else: - y_data, x_data = method_results["fm"], np.linspace(1, 0, 256) - - curve_drawer.draw_method_curve( - curr_idx=idx, - dataset_name=dataset_name.upper(), - method_curve_setting=method_info["curve_setting"], - x_label=x_label, - y_label=y_label, - x_data=x_data, - y_data=y_data, - x_lim=x_lim, - y_lim=y_lim, - ) - curve_drawer.show() - - -if __name__ == "__main__": - data_type = "rgbd_sod" - data_info = total_info[data_type] - output_path = "./output" # 存放输出文件的文件夹 - - cfg = { # 针对多个模型评估比较的设置 - "dataset_info": data_info["dataset"], - "drawing_info": data_info["method"]["drawing"], # 包含所有待比较模型结果的信息和绘图配置的字典 - "record_path": os.path.join(output_path, f"{data_type}.txt"), # 用来保存测试结果的文件的路径 - "xlsx_path": os.path.join(output_path, f"{data_type}.xlsx"), - "save_npy": True, # 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 - # 保存曲线指标数据的文件路径 - "qualitative_npy_path": os.path.join( - output_path, data_type + "_" + "qualitative_results.npy" - ), - "quantitative_npy_path": os.path.join( - output_path, data_type + "_" + "quantitative_results.npy" - ), - "axes_setting": { # 不同曲线的绘图配置 - "pr": { # pr曲线的配置 - "x_label": "Recall", # 横坐标标签 - "y_label": "Precision", # 纵坐标标签 - "x_lim": (0.1, 1), # 横坐标显示范围 - "y_lim": (0.1, 1), # 纵坐标显示范围 - }, - "fm": { # fm曲线的配置 - "x_label": "Threshold", # 横坐标标签 - "y_label": r"F$_{\beta}$", # 纵坐标标签 - "x_lim": (0, 1), # 横坐标显示范围 - "y_lim": (0, 0.9), # 纵坐标显示范围 - }, - }, - "bit_num": 3, # 评估结果保留的小数点后数据的位数 - "resume_record": True, # 是否保留之前的评估记录(针对record_path文件有效) - "skipped_datasets": [], - } - - make_dir(output_path) - cal_all_metrics() - # draw_pr_fm_curve(for_pr=True) +draw_curves( + for_pr=for_pr, + axes_setting=axes_setting, + curves_npy_path=curves_npy_path, + row_num=row_num, + drawing_info=drawing_info, + dataset_info=dataset_info, +) diff --git a/eval_sod_all_methods_from_mat.py b/eval_sod_all_methods_from_mat.py index 9a96786..0ab1c82 100755 --- a/eval_sod_all_methods_from_mat.py +++ b/eval_sod_all_methods_from_mat.py @@ -1,15 +1,15 @@ # -*- coding: utf-8 -*- -import math import os from collections import defaultdict -from pprint import pprint import numpy as np import scipy.io as scio from configs import total_info -from utils.misc import make_dir -from utils.recorders import CurveDrawer, MetricExcelRecorder, TxtRecorder +from metrics.sod import draw_curves +from utils.misc import colored_print, make_dir +from utils.print_formatter import print_formatter +from utils.recorders import MetricExcelRecorder, TxtRecorder """ This file can be used to plot curves with the 'mat' files from Fan's project: @@ -35,182 +35,151 @@ def export_valid_npy(): form of a dictionary. { dataset1:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], + method1:[fm, em, p, r], + method2:[fm, em, p, r], ..... }, dataset2:{ - method1:[(ps, rs), fs], - method2:[(ps, rs), fs], + method1:[fm, em, p, r], + method2:[fm, em, p, r], ..... }, .... } """ - qualitative_results = defaultdict(dict) # Two curve metrics - quantitative_results = defaultdict(dict) # Six numerical metrics + curves = defaultdict(dict) # Two curve metrics + metrics = defaultdict(dict) # Six numerical metrics txt_recoder = TxtRecorder( - txt_path=cfg["record_path"], - resume=cfg["resume_record"], - max_method_name_width=max([len(x) for x in cfg["drawing_info"].keys()]), # 显示完整名字 - # max_method_name_width=10, # 指定长度 + txt_path=txt_path, + resume=resume_record, + max_method_name_width=max([len(x) for x in drawing_info.keys()]), # 显示完整名字 ) excel_recorder = MetricExcelRecorder( - xlsx_path=cfg["xlsx_path"], + xlsx_path=xlsx_path, sheet_name=data_type, row_header=["methods"], - dataset_names=sorted(list(cfg["dataset_info"].keys())), - metric_names=["sm", "wfm", "mae", "adpfm", "avgfm", "maxfm", "adpem", "avgem", "maxem"], + dataset_names=sorted(list(dataset_info.keys())), + metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], ) - for dataset_name in cfg["dataset_info"].keys(): + for dataset_name in dataset_info.keys(): # 使用dataset_name索引各个方法在不同数据集上的结果 - for method_name, method_info in cfg["drawing_info"].items(): + for method_name, method_info in drawing_info.items(): method_result_path = method_info["path_dict"] # if dataset_name is None, `.get(dataset_name, other_value)` will return `other_value`. info_for_dataset = method_result_path.get(dataset_name, None) if info_for_dataset is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) continue mat_path = info_for_dataset.get("mat", None) if mat_path is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) continue method_result = scio.loadmat(mat_path) - - ps = method_result["column_Pr"].reshape(-1).round(cfg["bit_num"]).tolist() - rs = method_result["column_Rec"].reshape(-1).round(cfg["bit_num"]).tolist() - fm = method_result["column_F"].reshape(-1).round(cfg["bit_num"]).tolist() - - maxf = method_result["maxFm"].reshape(-1).round(cfg["bit_num"]).item() - meanf = method_result["meanFm"].reshape(-1).round(cfg["bit_num"]).item() - adpf = method_result["adpFm"].reshape(-1).round(cfg["bit_num"]).item() - maxe = method_result["maxEm"].reshape(-1).round(cfg["bit_num"]).item() - meane = method_result["meanEm"].reshape(-1).round(cfg["bit_num"]).item() - adpe = method_result["adpEm"].reshape(-1).round(cfg["bit_num"]).item() - wfm = method_result["wFm"].reshape(-1).round(cfg["bit_num"]).item() - mae = method_result["mae"].reshape(-1).round(cfg["bit_num"]).item() - sm = method_result["Sm"].reshape(-1).round(cfg["bit_num"]).item() - - method_curve = {"prs": (ps, rs), "fm": fm} - method_metric = { - "maxFm": maxf, - "meanFm": meanf, - "adpFm": adpf, - "maxEm": maxe, - "meanEm": meane, - "adpEm": adpe, - "wFm": wfm, - "MAE": mae, - "Sm": sm, + method_curves = { + "p": method_result["column_Pr"].reshape(-1).round(num_bits).tolist(), + "r": method_result["column_Rec"].reshape(-1).round(num_bits).tolist(), + "fm": method_result["column_F"].reshape(-1).round(num_bits).tolist(), + } + method_metrics = { + "maxF": method_result["maxFm"].reshape(-1).round(num_bits).item(), + "avgF": method_result["meanFm"].reshape(-1).round(num_bits).item(), + "adpF": method_result["adpFm"].reshape(-1).round(num_bits).item(), + "maxE": method_result["maxEm"].reshape(-1).round(num_bits).item(), + "avgE": method_result["meanEm"].reshape(-1).round(num_bits).item(), + "adpE": method_result["adpEm"].reshape(-1).round(num_bits).item(), + "wFm": method_result["wFm"].reshape(-1).round(num_bits).item(), + "MAE": method_result["mae"].reshape(-1).round(num_bits).item(), + "SM": method_result["Sm"].reshape(-1).round(num_bits).item(), } - qualitative_results[dataset_name][method_name] = method_curve - quantitative_results[dataset_name][method_name] = method_metric + curves[dataset_name][method_name] = method_curves + metrics[dataset_name][method_name] = method_metrics excel_recorder( - row_data=method_metric, dataset_name=dataset_name, method_name=method_name + row_data=method_metrics, dataset_name=dataset_name, method_name=method_name ) - txt_recoder(method_results=method_metric, method_name=method_name) - - if cfg["save_npy"]: - np.save(cfg["qualitative_npy_path"], qualitative_results) - np.save(cfg["quantitative_npy_path"], quantitative_results) - print( - f" ==>> all methods have been saved in {cfg['qualitative_npy_path']} and " - f"{cfg['quantitative_npy_path']} <<== " - ) + txt_recoder(method_results=method_metrics, method_name=method_name) - print(f" ==>> all methods have been tested:") - pprint(quantitative_results, indent=2, width=119) + if save_npy: + make_dir(os.path.dirname(curves_npy_path)) + np.save(curves_npy_path, curves) + np.save(metrics_npy_path, metrics) + colored_print(f"all methods have been saved in {curves_npy_path} and {metrics_npy_path}") + formatted_string = print_formatter(metrics) + colored_print(f"all methods have been tested:\n{formatted_string}") -def draw_pr_fm_curve(for_pr: bool = True): - mode = "pr" if for_pr else "fm" - mode_axes_setting = cfg["axes_setting"][mode] +if __name__ == "__main__": + for_pr = True # 绘制pr还是fm曲线 - x_label, y_label = mode_axes_setting["x_label"], mode_axes_setting["y_label"] - x_lim, y_lim = mode_axes_setting["x_lim"], mode_axes_setting["y_lim"] + data_type = "rgbd_sod" + data_info = total_info[data_type] - qualitative_results = np.load( - os.path.join(cfg["qualitative_npy_path"]), allow_pickle=True - ).item() + # 存放输出文件的文件夹 + output_path = "./output" - row_num = 1 - curve_drawer = CurveDrawer( - row_num=row_num, col_num=math.ceil(len(cfg["dataset_info"].keys()) / row_num) - ) + # 包含所有待比较模型结果的信息和绘图配置的字典 + dataset_info = data_info["dataset"] + drawing_info = data_info["method"]["drawing"] - for idx, dataset_name in enumerate(cfg["dataset_info"].keys()): - # 与cfg[dataset_info]中的key保持一致 - dataset_results = qualitative_results[dataset_name] - for method_name, method_info in cfg["drawing_info"].items(): - # 与cfg[drawing_info]中的key保持一致 - method_results = dataset_results.get(method_name, None) - if method_results is None: - print(f" ==>> {method_name} does not have results on {dataset_name} <<== ") - continue + # 用来保存测试结果的文件的路径 + txt_path = os.path.join(output_path, f"{data_type}.txt") + xlsx_path = os.path.join(output_path, f"{data_type}.xlsx") - if mode == "pr": - assert isinstance(method_results["prs"], (list, tuple)) - y_data, x_data = method_results["prs"] - else: - y_data, x_data = method_results["fm"], np.linspace(1, 0, 256) - - curve_drawer.draw_method_curve( - curr_idx=idx, - dataset_name=dataset_name.upper(), - method_curve_setting=method_info["curve_setting"], - x_label=x_label, - y_label=y_label, - x_data=x_data, - y_data=y_data, - x_lim=x_lim, - y_lim=y_lim, - ) - curve_drawer.show() + # 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 + save_npy = True + # 保存曲线指标数据的文件路径 + curves_npy_path = os.path.join(output_path, data_type + "_" + "curves.npy") + metrics_npy_path = os.path.join(output_path, data_type + "_" + "metrics.npy") + row_num = 1 -if __name__ == "__main__": - data_type = "rgbd_sod" - data_info = total_info[data_type] - output_path = "./output" # 存放输出文件的文件夹 - - cfg = { # 针对多个模型评估比较的设置 - "dataset_info": data_info["dataset"], - "drawing_info": data_info["method"]["drawing"], # 包含所有待比较模型结果的信息和绘图配置的字典 - "record_path": os.path.join(output_path, f"{data_type}.txt"), # 用来保存测试结果的文件的路径 - "xlsx_path": os.path.join(output_path, f"{data_type}.xlsx"), - "save_npy": True, # 是否将评估结果到npy文件中,该文件可用来绘制pr和fm曲线 - # 保存曲线指标数据的文件路径 - "qualitative_npy_path": os.path.join( - output_path, data_type + "_" + "qualitative_results.npy" - ), - "quantitative_npy_path": os.path.join( - output_path, data_type + "_" + "quantitative_results.npy" - ), - "axes_setting": { # 不同曲线的绘图配置 - "pr": { # pr曲线的配置 - "x_label": "Recall", # 横坐标标签 - "y_label": "Precision", # 纵坐标标签 - "x_lim": (0.1, 1), # 横坐标显示范围 - "y_lim": (0.1, 1), # 纵坐标显示范围 - }, - "fm": { # fm曲线的配置 - "x_label": "Threshold", # 横坐标标签 - "y_label": r"F$_{\beta}$", # 纵坐标标签 - "x_lim": (0, 1), # 横坐标显示范围 - "y_lim": (0, 0.9), # 纵坐标显示范围 - }, + # 不同曲线的绘图配置 + axes_setting = { + # pr曲线的配置 + "pr": { + # 横坐标标签 + "x_label": "Recall", + # 纵坐标标签 + "y_label": "Precision", + # 横坐标显示范围 + "x_lim": (0.1, 1), + # 纵坐标显示范围 + "y_lim": (0.1, 1), + }, + # fm曲线的配置 + "fm": { + # 横坐标标签 + "x_label": "Threshold", + # 纵坐标标签 + "y_label": r"F$_{\beta}$", + # 横坐标显示范围 + "x_lim": (0, 1), + # 纵坐标显示范围 + "y_lim": (0, 0.9), }, - "bit_num": 3, # 评估结果保留的小数点后数据的位数 - "resume_record": False, # 是否保留之前的评估记录(针对record_path文件有效) - "skipped_names": [], } + # 评估结果保留的小数点后数据的位数 + num_bits = 3 + + # 是否保留之前的评估记录(针对txt_path文件有效) + resume_record = True - make_dir(output_path) export_valid_npy() - # draw_pr_fm_curve(for_pr=True) + + draw_curves( + for_pr=for_pr, + axes_setting=axes_setting, + curves_npy_path=curves_npy_path, + row_num=row_num, + drawing_info=drawing_info, + dataset_info=dataset_info, + ) diff --git a/eval_sod_single_method.py b/eval_sod_single_method.py index 9d37afb..d8fef05 100644 --- a/eval_sod_single_method.py +++ b/eval_sod_single_method.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- -import copy + import os -from pprint import pprint from tqdm import tqdm from configs import total_info -from utils.misc import get_gt_pre_with_name, get_name_list, make_dir +from utils.misc import colored_print, get_gt_pre_with_name, get_name_list, make_dir +from utils.print_formatter import print_formatter from utils.recorders import MetricExcelRecorder, MetricRecorder @@ -19,10 +19,10 @@ def cal_all_metrics(): metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], ) - method_perf = {} + metrics = {} for dataset_name, dataset_path in dataset_info.items(): if dataset_name in skipped_names: - print(f" ++>> {dataset_name} will be skipped.") + colored_print(msg=f"{dataset_name} will be skipped.", mode="warning") continue # 获取真值图片信息 @@ -40,7 +40,9 @@ def cal_all_metrics(): # ==>> test the intersection between pre and gt for each method <<== method_dataset_info = pred_path.get(dataset_name, None) if method_dataset_info is None: - print(f" ==>> {model_name} does not have results on {dataset_name} <<== ") + colored_print( + msg=f"{model_name} does not have results on {dataset_name}", mode="warning" + ) continue # 预测结果存放路径下的图片文件名字列表和扩展名称 @@ -50,18 +52,22 @@ def cal_all_metrics(): # get the intersection eval_name_list = sorted(list(set(gt_name_list).intersection(set(pre_name_list)))) - print( - f" ==>> It is evaluating {model_name} with {len(eval_name_list)} images" - f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images on dataset {dataset_name} <<== " + num_names = len(eval_name_list) + + if num_names == 0: + colored_print( + msg=f"{model_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + colored_print( + f"Evaluating {model_name} with {len(eval_name_list)} images" + f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images on dataset {dataset_name}" ) metric_recoder = MetricRecorder() tqdm_bar = tqdm( - eval_name_list, - total=len(eval_name_list), - leave=False, - ncols=119, - desc=f"({dataset_name})", + eval_name_list, total=num_names, leave=False, ncols=119, desc=f"({dataset_name})" ) for img_name in tqdm_bar: gt, pre = get_gt_pre_with_name( @@ -73,27 +79,16 @@ def cal_all_metrics(): to_normalize=False, ) metric_recoder.update(pre=pre, gt=gt) - metric_results = metric_recoder.show(bit_num=None) # 保留原始数据 - - perf_on_dataset = copy.deepcopy(metric_results) - del perf_on_dataset["fm"] - del perf_on_dataset["em"] - del perf_on_dataset["p"] - del perf_on_dataset["r"] - - perf_on_dataset["meanFm"] = metric_results["fm"].mean() - perf_on_dataset["maxFm"] = metric_results["fm"].max() - perf_on_dataset["meanEm"] = metric_results["em"].mean() - perf_on_dataset["maxEm"] = metric_results["em"].max() - perf_on_dataset = {k: v.round(bit_num) for k, v in perf_on_dataset.items()} - print(perf_on_dataset) - method_perf[dataset_name] = perf_on_dataset - excel_recorder( - row_data=method_perf[dataset_name], - dataset_name=dataset_name, - method_name=model_name, - ) - pprint(method_perf) + method_results = metric_recoder.show(num_bits=num_bits, return_ndarray=False) + method_metrics = method_results["numerical"] + metrics[dataset_name] = method_metrics + + excel_recorder(row_data=method_metrics, dataset_name=dataset_name, method_name=model_name) + + print(method_metrics) + + formatted_string = print_formatter(metrics) + colored_print(f"all methods have been tested:\n{formatted_string}") if __name__ == "__main__": @@ -106,6 +101,6 @@ def cal_all_metrics(): dataset_info = data_info["dataset"] export_xlsx = False # 是否导出xlsx文件 xlsx_path = os.path.join(output_path, "resutls.xlsx") # xlsx文件的路径 - bit_num = 3 # 评估结果保留的小数点后数据的位数 + num_bits = 3 # 评估结果保留的小数点后数据的位数 skipped_names = [] # 可以跳过指定的数据集 cal_all_metrics() diff --git a/metrics/sod/__init__.py b/metrics/sod/__init__.py old mode 100755 new mode 100644 index e69de29..2c40330 --- a/metrics/sod/__init__.py +++ b/metrics/sod/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + +from .cal_cosod_matrics import cal_cosod_matrics +from .cal_sod_matrics import cal_sod_matrics +from .draw_curves import draw_curves diff --git a/metrics/sod/cal_cosod_matrics.py b/metrics/sod/cal_cosod_matrics.py new file mode 100644 index 0000000..5a04c30 --- /dev/null +++ b/metrics/sod/cal_cosod_matrics.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- + +import os +from collections import defaultdict + +import numpy as np +from tqdm import tqdm + +from utils.misc import ( + colored_print, + get_gt_pre_with_name, + get_name_with_group_list, + make_dir, +) +from utils.print_formatter import print_formatter +from utils.recorders import GroupedMetricRecorder, MetricExcelRecorder, TxtRecorder + + +def group_names(names: list) -> dict: + grouped_data = defaultdict(list) + for name in names: + group_name, file_name = name.split("/") + grouped_data[group_name].append(file_name) + return grouped_data + + +def cal_cosod_matrics( + data_type: str = "rgb_sod", + txt_path: str = "", + resume_record: bool = True, + xlsx_path: str = "", + drawing_info: dict = None, + dataset_info: dict = None, + skipped_datasets: list = None, + save_npy: bool = True, + curves_npy_path: str = "./curves.npy", + metrics_npy_path: str = "./metrics.npy", + num_bits: int = 3, +): + """ + Save the results of all models on different datasets in a `npy` file in the form of a + dictionary. + { + dataset1:{ + method1:[fm, em, p, r], + method2:[fm, em, p, r], + ..... + }, + dataset2:{ + method1:[fm, em, p, r], + method2:[fm, em, p, r], + ..... + }, + .... + } + """ + curves = defaultdict(dict) # Two curve metrics + metrics = defaultdict(dict) # Six numerical metrics + + txt_recoder = TxtRecorder( + txt_path=txt_path, + resume=resume_record, + max_method_name_width=max([len(x) for x in drawing_info.keys()]), # 显示完整名字 + ) + excel_recorder = MetricExcelRecorder( + xlsx_path=xlsx_path, + sheet_name=data_type, + row_header=["methods"], + dataset_names=sorted(list(dataset_info.keys())), + metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], + ) + + for dataset_name, dataset_path in dataset_info.items(): + if dataset_name in skipped_datasets: + colored_print(msg=f"{dataset_name} will be skipped.", mode="warning") + continue + + txt_recoder.add_row(row_name="Dataset", row_data=dataset_name, row_start_str="\n") + + # 获取真值图片信息 + gt_info = dataset_path["mask"] + gt_root = gt_info["path"] + gt_ext = gt_info["suffix"] + # 真值名字列表 + gt_index_file = dataset_path.get("index_file") + if gt_index_file: + gt_name_list = get_name_with_group_list(data_path=gt_index_file, file_ext=gt_ext) + else: + gt_name_list = get_name_with_group_list(data_path=gt_root, file_ext=gt_ext) + assert len(gt_name_list) > 0, "there is not ground truth." + + # ==>> test the intersection between pre and gt for each method <<== + for method_name, method_info in drawing_info.items(): + method_root = method_info["path_dict"] + method_dataset_info = method_root.get(dataset_name, None) + if method_dataset_info is None: + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + # 预测结果存放路径下的图片文件名字列表和扩展名称 + pre_ext = method_dataset_info["suffix"] + pre_root = method_dataset_info["path"] + pre_name_list = get_name_with_group_list(data_path=pre_root, file_ext=pre_ext) + + # get the intersection + eval_name_list = sorted(list(set(gt_name_list).intersection(set(pre_name_list)))) + num_names = len(eval_name_list) + + if num_names == 0: + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + grouped_data = group_names(names=eval_name_list) + num_groups = len(grouped_data) + + colored_print( + f"Evaluating {method_name} with {num_names} images and {num_groups} groups" + f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images on dataset {dataset_name}" + ) + + group_metric_recorder = GroupedMetricRecorder() + inter_group_bar = tqdm( + grouped_data.items(), + total=num_groups, + leave=False, + ncols=119, + desc=f"[{dataset_name}]", + ) + for group_name, names_in_group in inter_group_bar: + intra_group_bar = tqdm( + names_in_group, + total=len(names_in_group), + leave=False, + ncols=119, + desc=f"({group_name})", + ) + for img_name in intra_group_bar: + img_name_with_group = os.path.join(group_name, img_name) + gt, pre = get_gt_pre_with_name( + gt_root=gt_root, + pre_root=pre_root, + img_name=img_name_with_group, + pre_ext=pre_ext, + gt_ext=gt_ext, + to_normalize=False, + ) + group_metric_recorder.update(group_name=group_name, pre=pre, gt=gt) + method_results = group_metric_recorder.show(num_bits=num_bits, return_ndarray=False) + method_curves = method_results["sequential"] + method_metrics = method_results["numerical"] + curves[dataset_name][method_name] = method_curves + metrics[dataset_name][method_name] = method_metrics + + excel_recorder( + row_data=method_metrics, dataset_name=dataset_name, method_name=method_name + ) + txt_recoder(method_results=method_metrics, method_name=method_name) + + if save_npy: + make_dir(os.path.basename(curves_npy_path)) + np.save(curves_npy_path, curves) + np.save(metrics_npy_path, metrics) + colored_print(f"all methods have been saved in {curves_npy_path} and {metrics_npy_path}") + formatted_string = print_formatter(metrics) + colored_print(f"all methods have been tested:\n{formatted_string}") diff --git a/metrics/sod/cal_sod_matrics.py b/metrics/sod/cal_sod_matrics.py new file mode 100644 index 0000000..7dd1c72 --- /dev/null +++ b/metrics/sod/cal_sod_matrics.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- + +import os +from collections import defaultdict + +import numpy as np +from tqdm import tqdm + +from utils.misc import colored_print, get_gt_pre_with_name, get_name_list, make_dir +from utils.print_formatter import print_formatter +from utils.recorders import MetricExcelRecorder, MetricRecorder, TxtRecorder + + +def cal_sod_matrics( + data_type: str = "rgb_sod", + txt_path: str = "", + resume_record: bool = True, + xlsx_path: str = "", + drawing_info: dict = None, + dataset_info: dict = None, + skipped_datasets: list = None, + save_npy: bool = True, + curves_npy_path: str = "./curves.npy", + metrics_npy_path: str = "./metrics.npy", + num_bits: int = 3, +): + """ + Save the results of all models on different datasets in a `npy` file in the form of a + dictionary. + { + dataset1:{ + method1:[fm, em, p, r], + method2:[fm, em, p, r], + ..... + }, + dataset2:{ + method1:[fm, em, p, r], + method2:[fm, em, p, r], + ..... + }, + .... + } + """ + curves = defaultdict(dict) # Two curve metrics + metrics = defaultdict(dict) # Six numerical metrics + + txt_recoder = TxtRecorder( + txt_path=txt_path, + resume=resume_record, + max_method_name_width=max([len(x) for x in drawing_info.keys()]), # 显示完整名字 + ) + excel_recorder = MetricExcelRecorder( + xlsx_path=xlsx_path, + sheet_name=data_type, + row_header=["methods"], + dataset_names=sorted(list(dataset_info.keys())), + metric_names=["sm", "wfm", "mae", "adpf", "avgf", "maxf", "adpe", "avge", "maxe"], + ) + + for dataset_name, dataset_path in dataset_info.items(): + if dataset_name in skipped_datasets: + colored_print(msg=f"{dataset_name} will be skipped.", mode="warning") + continue + + txt_recoder.add_row(row_name="Dataset", row_data=dataset_name, row_start_str="\n") + + # 获取真值图片信息 + gt_info = dataset_path["mask"] + gt_root = gt_info["path"] + gt_ext = gt_info["suffix"] + # 真值名字列表 + gt_index_file = dataset_path.get("index_file") + if gt_index_file: + gt_name_list = get_name_list(data_path=gt_index_file, file_ext=gt_ext) + else: + gt_name_list = get_name_list(data_path=gt_root, file_ext=gt_ext) + assert len(gt_name_list) > 0, "there is not ground truth." + + # ==>> test the intersection between pre and gt for each method <<== + for method_name, method_info in drawing_info.items(): + method_root = method_info["path_dict"] + method_dataset_info = method_root.get(dataset_name, None) + if method_dataset_info is None: + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + # 预测结果存放路径下的图片文件名字列表和扩展名称 + pre_ext = method_dataset_info["suffix"] + pre_root = method_dataset_info["path"] + pre_name_list = get_name_list(data_path=pre_root, file_ext=pre_ext) + + # get the intersection + eval_name_list = sorted(list(set(gt_name_list).intersection(set(pre_name_list)))) + num_names = len(eval_name_list) + + if num_names == 0: + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + colored_print( + f"Evaluating {method_name} with {num_names} images" + f" (G:{len(gt_name_list)},P:{len(pre_name_list)}) images on dataset {dataset_name}" + ) + + metric_recoder = MetricRecorder() + tqdm_bar = tqdm( + eval_name_list, total=num_names, leave=False, ncols=119, desc=f"[{dataset_name}]" + ) + for img_name in tqdm_bar: + gt, pre = get_gt_pre_with_name( + gt_root=gt_root, + pre_root=pre_root, + img_name=img_name, + pre_ext=pre_ext, + gt_ext=gt_ext, + to_normalize=False, + ) + metric_recoder.update(pre=pre, gt=gt) + method_results = metric_recoder.show(num_bits=num_bits, return_ndarray=False) + method_curves = method_results["sequential"] + method_metrics = method_results["numerical"] + curves[dataset_name][method_name] = method_curves + metrics[dataset_name][method_name] = method_metrics + + excel_recorder( + row_data=method_metrics, dataset_name=dataset_name, method_name=method_name + ) + txt_recoder(method_results=method_metrics, method_name=method_name) + + if save_npy: + make_dir(os.path.dirname(curves_npy_path)) + np.save(curves_npy_path, curves) + np.save(metrics_npy_path, metrics) + colored_print(f"all methods have been saved in {curves_npy_path} and {metrics_npy_path}") + formatted_string = print_formatter(metrics) + colored_print(f"all methods have been tested:\n{formatted_string}") diff --git a/metrics/sod/draw_curves.py b/metrics/sod/draw_curves.py new file mode 100644 index 0000000..d69e559 --- /dev/null +++ b/metrics/sod/draw_curves.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- + +import math + +import numpy as np + +from utils.misc import colored_print +from utils.recorders import CurveDrawer + + +def draw_curves( + for_pr: bool = True, + axes_setting: dict = None, + curves_npy_path: str = "", + row_num: int = 1, + drawing_info: dict = None, + dataset_info: dict = None, +): + mode = "pr" if for_pr else "fm" + mode_axes_setting = axes_setting[mode] + + x_label, y_label = mode_axes_setting["x_label"], mode_axes_setting["y_label"] + x_lim, y_lim = mode_axes_setting["x_lim"], mode_axes_setting["y_lim"] + + curves = np.load(curves_npy_path, allow_pickle=True).item() + + curve_drawer = CurveDrawer( + row_num=row_num, col_num=math.ceil(len(dataset_info.keys()) / row_num) + ) + + for idx, dataset_name in enumerate(dataset_info.keys()): + # 与cfg[dataset_info]中的key保持一致 + dataset_results = curves[dataset_name] + for method_name, method_info in drawing_info.items(): + # 与cfg[drawing_info]中的key保持一致 + method_results = dataset_results.get(method_name, None) + if method_results is None: + colored_print( + msg=f"{method_name} does not have results on {dataset_name}", mode="warning" + ) + continue + + if mode == "pr": + assert isinstance(method_results["p"], (list, tuple)) + assert isinstance(method_results["r"], (list, tuple)) + y_data = method_results["p"] + x_data = method_results["r"] + else: + assert isinstance(method_results["fm"], (list, tuple)) + y_data = method_results["fm"] + x_data = np.linspace(0, 1, 256) + + curve_drawer.draw_method_curve( + curr_idx=idx, + dataset_name=dataset_name.upper(), + method_curve_setting=method_info["curve_setting"], + x_label=x_label, + y_label=y_label, + x_data=x_data, + y_data=y_data, + x_lim=x_lim, + y_lim=y_lim, + ) + curve_drawer.show() diff --git a/metrics/sod/metrics.py b/metrics/sod/metrics.py deleted file mode 100644 index 8cf514a..0000000 --- a/metrics/sod/metrics.py +++ /dev/null @@ -1,443 +0,0 @@ -# -*- coding: utf-8 -*- -import numpy as np -from scipy.ndimage import convolve -from scipy.ndimage import distance_transform_edt as bwdist - -__version__ = "1.2.1" - -_EPS = 1e-16 -_TYPE = np.float64 - - -def _prepare_data(pred: np.ndarray, gt: np.ndarray) -> tuple: - gt = gt > 128 - # im2double, mapminmax - pred = pred / 255 - if pred.max() != pred.min(): - pred = (pred - pred.min()) / (pred.max() - pred.min()) - return pred, gt - - -def _get_adaptive_threshold(matrix: np.ndarray, max_value: float = 1) -> float: - return min(2 * matrix.mean(), max_value) - - -class Fmeasure(object): - def __init__(self, beta: float = 0.3): - self.beta = beta - self.precisions = [] - self.recalls = [] - self.adaptive_fms = [] - self.changeable_fms = [] - - def step(self, pred: np.ndarray, gt: np.ndarray): - pred, gt = _prepare_data(pred, gt) - - adaptive_fm = self.cal_adaptive_fm(pred=pred, gt=gt) - self.adaptive_fms.append(adaptive_fm) - - precisions, recalls, changeable_fms = self.cal_pr(pred=pred, gt=gt) - self.precisions.append(precisions) - self.recalls.append(recalls) - self.changeable_fms.append(changeable_fms) - - def cal_adaptive_fm(self, pred: np.ndarray, gt: np.ndarray) -> float: - # 快速统计numpy数组的非零值建议使用np.count_nonzero, - # 一个简单的小实验可见tests/test_speed_for_count_nonzero.py - adaptive_threshold = _get_adaptive_threshold(pred, max_value=1) - binary_predcition = pred >= adaptive_threshold - area_intersection = binary_predcition[gt].sum() - if area_intersection == 0: - adaptive_fm = 0 - else: - pre = area_intersection / np.count_nonzero(binary_predcition) - rec = area_intersection / np.count_nonzero(gt) - adaptive_fm = (1 + self.beta) * pre * rec / (self.beta * pre + rec) - return adaptive_fm - - def cal_pr(self, pred: np.ndarray, gt: np.ndarray) -> tuple: - # 1. 获取预测结果在真值前背景区域中的直方图 - pred = (pred * 255).astype(np.uint8) - bins = np.linspace(0, 256, 257) - fg_hist, _ = np.histogram(pred[gt], bins=bins) # 最后一个bin为[255, 256] - bg_hist, _ = np.histogram(pred[~gt], bins=bins) - # 2. 使用累积直方图(Cumulative Histogram)获得对应真值前背景中大于不同阈值的像素数量 - # 这里使用累加(cumsum)就是为了一次性得出 >=不同阈值 的像素数量, 这里仅计算了前景区域 - fg_w_thrs = np.cumsum(np.flip(fg_hist), axis=0) - bg_w_thrs = np.cumsum(np.flip(bg_hist), axis=0) - # 3. 使用不同阈值的结果计算对应的precision和recall - # p和r的计算的真值是pred==1>==1,二者仅有分母不同,分母前者是pred==1,后者是gt==1 - # 为了同时计算不同阈值的结果,这里使用hsitogram&flip&cumsum 获得了不同各自的前景像素数量 - TPs = fg_w_thrs - Ps = fg_w_thrs + bg_w_thrs - # 为防止除0,这里针对除0的情况分析后直接对于0分母设为1,因为此时分子必为0 - Ps[Ps == 0] = 1 - T = max(np.count_nonzero(gt), 1) - # TODO: T=0 或者 特定阈值下fg_w_thrs=0或者bg_w_thrs=0,这些都会包含在TPs[i]=0的情况中, - # 但是这里使用TPs不便于处理列表 - # T=0 -> fg_w_thrs=[0, ..., 0] -> TPs=[0, ..., 0] 解决办法:T重新赋值为1 - # Ps[i] = 0 -> fg_w_thrs[i] = 0, bg_w_thrs[i] = 0 - precisions = TPs / Ps - recalls = TPs / T - - numerator = (1 + self.beta) * precisions * recalls - denominator = np.where(numerator == 0, 1, self.beta * precisions + recalls) - changeable_fms = numerator / denominator - return precisions, recalls, changeable_fms - - def get_results(self) -> dict: - adaptive_fm = np.mean(np.array(self.adaptive_fms, _TYPE)) - changeable_fm = np.mean(np.array(self.changeable_fms, dtype=_TYPE), axis=0) - precision = np.mean(np.array(self.precisions, dtype=_TYPE), axis=0) # N, 256 - recall = np.mean(np.array(self.recalls, dtype=_TYPE), axis=0) # N, 256 - return dict(fm=dict(adp=adaptive_fm, curve=changeable_fm), pr=dict(p=precision, r=recall)) - - -class MAE(object): - def __init__(self): - self.maes = [] - - def step(self, pred: np.ndarray, gt: np.ndarray): - pred, gt = _prepare_data(pred, gt) - - mae = self.cal_mae(pred, gt) - self.maes.append(mae) - - def cal_mae(self, pred: np.ndarray, gt: np.ndarray) -> float: - mae = np.mean(np.abs(pred - gt)) - return mae - - def get_results(self) -> dict: - mae = np.mean(np.array(self.maes, _TYPE)) - return dict(mae=mae) - - -class Smeasure(object): - def __init__(self, alpha: float = 0.5): - self.sms = [] - self.alpha = alpha - - def step(self, pred: np.ndarray, gt: np.ndarray): - pred, gt = _prepare_data(pred=pred, gt=gt) - - sm = self.cal_sm(pred, gt) - self.sms.append(sm) - - def cal_sm(self, pred: np.ndarray, gt: np.ndarray) -> float: - y = np.mean(gt) - if y == 0: - sm = 1 - np.mean(pred) - elif y == 1: - sm = np.mean(pred) - else: - sm = self.alpha * self.object(pred, gt) + (1 - self.alpha) * self.region(pred, gt) - sm = max(0, sm) - return sm - - def object(self, pred: np.ndarray, gt: np.ndarray) -> float: - fg = pred * gt - bg = (1 - pred) * (1 - gt) - u = np.mean(gt) - object_score = u * self.s_object(fg, gt) + (1 - u) * self.s_object(bg, 1 - gt) - return object_score - - def s_object(self, pred: np.ndarray, gt: np.ndarray) -> float: - x = np.mean(pred[gt == 1]) - sigma_x = np.std(pred[gt == 1]) - score = 2 * x / (np.power(x, 2) + 1 + sigma_x + _EPS) - return score - - def region(self, pred: np.ndarray, gt: np.ndarray) -> float: - x, y = self.centroid(gt) - part_info = self.divide_with_xy(pred, gt, x, y) - w1, w2, w3, w4 = part_info["weight"] - # assert np.isclose(w1 + w2 + w3 + w4, 1), (w1 + w2 + w3 + w4, pred.mean(), gt.mean()) - - pred1, pred2, pred3, pred4 = part_info["pred"] - gt1, gt2, gt3, gt4 = part_info["gt"] - score1 = self.ssim(pred1, gt1) - score2 = self.ssim(pred2, gt2) - score3 = self.ssim(pred3, gt3) - score4 = self.ssim(pred4, gt4) - - return w1 * score1 + w2 * score2 + w3 * score3 + w4 * score4 - - def centroid(self, matrix: np.ndarray) -> tuple: - """ - 为了保证与matlab代码的一致性,这里对中心坐标进行了加一,在后面划分区域的时候就不用使用多余的加一操作 - 因为matlab里的1:X生成的序列会包含X这个值 - """ - h, w = matrix.shape - if matrix.sum() == 0: - x = np.round(w / 2) - y = np.round(h / 2) - else: - area_object = np.sum(matrix) - row_ids = np.arange(h) - col_ids = np.arange(w) - x = np.round(np.sum(np.sum(matrix, axis=0) * col_ids) / area_object) - y = np.round(np.sum(np.sum(matrix, axis=1) * row_ids) / area_object) - return int(x) + 1, int(y) + 1 - - def divide_with_xy(self, pred: np.ndarray, gt: np.ndarray, x, y) -> dict: - h, w = gt.shape - area = h * w - - gt_LT = gt[0:y, 0:x] - gt_RT = gt[0:y, x:w] - gt_LB = gt[y:h, 0:x] - gt_RB = gt[y:h, x:w] - - pred_LT = pred[0:y, 0:x] - pred_RT = pred[0:y, x:w] - pred_LB = pred[y:h, 0:x] - pred_RB = pred[y:h, x:w] - - w1 = x * y / area - w2 = y * (w - x) / area - w3 = (h - y) * x / area - # w4 = (h - y) * (w - x) / area - w4 = 1 - w1 - w2 - w3 - - return dict( - gt=(gt_LT, gt_RT, gt_LB, gt_RB), - pred=(pred_LT, pred_RT, pred_LB, pred_RB), - weight=(w1, w2, w3, w4), - ) - - def ssim(self, pred: np.ndarray, gt: np.ndarray) -> float: - h, w = pred.shape - N = h * w - - x = np.mean(pred) - y = np.mean(gt) - - sigma_x = np.sum((pred - x) ** 2) / (N - 1) - sigma_y = np.sum((gt - y) ** 2) / (N - 1) - sigma_xy = np.sum((pred - x) * (gt - y)) / (N - 1) - - alpha = 4 * x * y * sigma_xy - beta = (x ** 2 + y ** 2) * (sigma_x + sigma_y) - - if alpha != 0: - score = alpha / (beta + _EPS) - elif alpha == 0 and beta == 0: - score = 1 - else: - score = 0 - return score - - def get_results(self) -> dict: - sm = np.mean(np.array(self.sms, dtype=_TYPE)) - return dict(sm=sm) - - -class Emeasure(object): - def __init__(self): - self.adaptive_ems = [] - self.changeable_ems = [] - - def step(self, pred: np.ndarray, gt: np.ndarray): - pred, gt = _prepare_data(pred=pred, gt=gt) - self.gt_fg_numel = np.count_nonzero(gt) - self.gt_size = gt.shape[0] * gt.shape[1] - - changeable_ems = self.cal_changeable_em(pred, gt) - self.changeable_ems.append(changeable_ems) - adaptive_em = self.cal_adaptive_em(pred, gt) - self.adaptive_ems.append(adaptive_em) - - def cal_adaptive_em(self, pred: np.ndarray, gt: np.ndarray) -> float: - adaptive_threshold = _get_adaptive_threshold(pred, max_value=1) - adaptive_em = self.cal_em_with_threshold(pred, gt, threshold=adaptive_threshold) - return adaptive_em - - def cal_changeable_em(self, pred: np.ndarray, gt: np.ndarray) -> np.ndarray: - changeable_ems = self.cal_em_with_cumsumhistogram(pred, gt) - return changeable_ems - - def cal_em_with_threshold(self, pred: np.ndarray, gt: np.ndarray, threshold: float) -> float: - """ - 函数内部变量命名规则: - pred属性(前景fg、背景bg)_gt属性(前景fg、背景bg)_变量含义 - 如果仅考虑pred或者gt,则另一个对应的属性位置使用`_`替换 - """ - binarized_pred = pred >= threshold - fg_fg_numel = np.count_nonzero(binarized_pred & gt) - fg_bg_numel = np.count_nonzero(binarized_pred & ~gt) - - fg___numel = fg_fg_numel + fg_bg_numel - bg___numel = self.gt_size - fg___numel - - if self.gt_fg_numel == 0: - enhanced_matrix_sum = bg___numel - elif self.gt_fg_numel == self.gt_size: - enhanced_matrix_sum = fg___numel - else: - parts_numel, combinations = self.generate_parts_numel_combinations( - fg_fg_numel=fg_fg_numel, - fg_bg_numel=fg_bg_numel, - pred_fg_numel=fg___numel, - pred_bg_numel=bg___numel, - ) - - results_parts = [] - for i, (part_numel, combination) in enumerate(zip(parts_numel, combinations)): - align_matrix_value = ( - 2 - * (combination[0] * combination[1]) - / (combination[0] ** 2 + combination[1] ** 2 + _EPS) - ) - enhanced_matrix_value = (align_matrix_value + 1) ** 2 / 4 - results_parts.append(enhanced_matrix_value * part_numel) - enhanced_matrix_sum = sum(results_parts) - - em = enhanced_matrix_sum / (self.gt_size - 1 + _EPS) - return em - - def cal_em_with_cumsumhistogram(self, pred: np.ndarray, gt: np.ndarray) -> np.ndarray: - """ - 函数内部变量命名规则: - pred属性(前景fg、背景bg)_gt属性(前景fg、背景bg)_变量含义 - 如果仅考虑pred或者gt,则另一个对应的属性位置使用`_`替换 - """ - pred = (pred * 255).astype(np.uint8) - bins = np.linspace(0, 256, 257) - fg_fg_hist, _ = np.histogram(pred[gt], bins=bins) - fg_bg_hist, _ = np.histogram(pred[~gt], bins=bins) - fg_fg_numel_w_thrs = np.cumsum(np.flip(fg_fg_hist), axis=0) - fg_bg_numel_w_thrs = np.cumsum(np.flip(fg_bg_hist), axis=0) - - fg___numel_w_thrs = fg_fg_numel_w_thrs + fg_bg_numel_w_thrs - bg___numel_w_thrs = self.gt_size - fg___numel_w_thrs - - if self.gt_fg_numel == 0: - enhanced_matrix_sum = bg___numel_w_thrs - elif self.gt_fg_numel == self.gt_size: - enhanced_matrix_sum = fg___numel_w_thrs - else: - parts_numel_w_thrs, combinations = self.generate_parts_numel_combinations( - fg_fg_numel=fg_fg_numel_w_thrs, - fg_bg_numel=fg_bg_numel_w_thrs, - pred_fg_numel=fg___numel_w_thrs, - pred_bg_numel=bg___numel_w_thrs, - ) - - results_parts = np.empty(shape=(4, 256), dtype=np.float64) - for i, (part_numel, combination) in enumerate(zip(parts_numel_w_thrs, combinations)): - align_matrix_value = ( - 2 - * (combination[0] * combination[1]) - / (combination[0] ** 2 + combination[1] ** 2 + _EPS) - ) - enhanced_matrix_value = (align_matrix_value + 1) ** 2 / 4 - results_parts[i] = enhanced_matrix_value * part_numel - enhanced_matrix_sum = results_parts.sum(axis=0) - - em = enhanced_matrix_sum / (self.gt_size - 1 + _EPS) - return em - - def generate_parts_numel_combinations( - self, fg_fg_numel, fg_bg_numel, pred_fg_numel, pred_bg_numel - ): - bg_fg_numel = self.gt_fg_numel - fg_fg_numel - bg_bg_numel = pred_bg_numel - bg_fg_numel - - parts_numel = [fg_fg_numel, fg_bg_numel, bg_fg_numel, bg_bg_numel] - - mean_pred_value = pred_fg_numel / self.gt_size - mean_gt_value = self.gt_fg_numel / self.gt_size - - demeaned_pred_fg_value = 1 - mean_pred_value - demeaned_pred_bg_value = 0 - mean_pred_value - demeaned_gt_fg_value = 1 - mean_gt_value - demeaned_gt_bg_value = 0 - mean_gt_value - - combinations = [ - (demeaned_pred_fg_value, demeaned_gt_fg_value), - (demeaned_pred_fg_value, demeaned_gt_bg_value), - (demeaned_pred_bg_value, demeaned_gt_fg_value), - (demeaned_pred_bg_value, demeaned_gt_bg_value), - ] - return parts_numel, combinations - - def get_results(self) -> dict: - adaptive_em = np.mean(np.array(self.adaptive_ems, dtype=_TYPE)) - changeable_em = np.mean(np.array(self.changeable_ems, dtype=_TYPE), axis=0) - return dict(em=dict(adp=adaptive_em, curve=changeable_em)) - - -class WeightedFmeasure(object): - def __init__(self, beta: float = 1): - self.beta = beta - self.weighted_fms = [] - - def step(self, pred: np.ndarray, gt: np.ndarray): - pred, gt = _prepare_data(pred=pred, gt=gt) - - if np.all(~gt): - wfm = 0 - else: - wfm = self.cal_wfm(pred, gt) - self.weighted_fms.append(wfm) - - def cal_wfm(self, pred: np.ndarray, gt: np.ndarray) -> float: - # [Dst,IDXT] = bwdist(dGT); - Dst, Idxt = bwdist(gt == 0, return_indices=True) - - # %Pixel dependency - # E = abs(FG-dGT); - E = np.abs(pred - gt) - # Et = E; - # Et(~GT)=Et(IDXT(~GT)); %To deal correctly with the edges of the foreground region - Et = np.copy(E) - Et[gt == 0] = Et[Idxt[0][gt == 0], Idxt[1][gt == 0]] - - # K = fspecial('gaussian',7,5); - # EA = imfilter(Et,K); - K = self.matlab_style_gauss2D((7, 7), sigma=5) - EA = convolve(Et, weights=K, mode="constant", cval=0) - # MIN_E_EA = E; - # MIN_E_EA(GT & EA np.ndarray: - """ - 2D gaussian mask - should give the same result as MATLAB's - fspecial('gaussian',[shape],[sigma]) - """ - m, n = [(ss - 1) / 2 for ss in shape] - y, x = np.ogrid[-m : m + 1, -n : n + 1] - h = np.exp(-(x * x + y * y) / (2 * sigma * sigma)) - h[h < np.finfo(h.dtype).eps * h.max()] = 0 - sumh = h.sum() - if sumh != 0: - h /= sumh - return h - - def get_results(self) -> dict: - weighted_fm = np.mean(np.array(self.weighted_fms, dtype=_TYPE)) - return dict(wfm=weighted_fm) diff --git a/readme.md b/readme.md index 394b74f..f5223b1 100644 --- a/readme.md +++ b/readme.md @@ -4,6 +4,26 @@ A Python-based salient object detection and video object segmentation evaluation > **重要提示**,最近基于Fan的matlab代码,实现了一份更加快速和准确的指标代码,已经整合到该代码中。 +## TODO + +- [ ] 添加更详细的注释 +- [ ] 优化xlsx导出的代码 +- [ ] 剥离USVOS部分的代码,让本仓库更专注一些 + +## 重要提示 + +- 2021年03月12日 + - 这一版本正式将sod的评估、绘图代码与配置分离,主要考虑如下 + - 用户的配置是需要调整的,这部分不适宜被git严格的监视,也便于提交后续更新的时候,直接忽略关于配置的更改,即后续更新时, + 用户配置部分会不再更新,若是添加新功能,直接调整原始的函数,其参数默认关闭新功能,保证用户不会受到影响。 + - sod和cosod评估方式有差异,但是绘图方式一致,所以现将评估绘图拆分成独立部分,置于metrics/sod文件夹下,之后或许或调整位置, + 但这种拆分策略不变。 + - 优化了cosod的评估代码,对sod和cosod的指标recorder部分进行了简化。 + - 不再使用独立的sod_metrics代码,由于我已经将PySODMetrics发布到了PyPI上,所以可以直接通过pip安装。 + - 使用添加了对于print的一个彩色增强的封装,可见`./utils/misc.py`中的`colored_print`。 + - git不再跟踪方法配置文件和数据集配置文件,这部分现有的作为示例,仅供使用者独立补充和参考。 + - 修复了之前绘制Fm曲线时x的问题,之前取反了。详见。 + ## 特性 * 提供11项显著性目标检测指标的评估 @@ -27,6 +47,12 @@ A Python-based salient object detection and video object segmentation evaluation ### General/Co-RGB/RGBD-SOD +先安装指标代码库: + +```python +pip install pysodmetrics +``` + 可见各自文件中的配置项。 ### DAVIS 2016无监督视频目标分割任务 diff --git a/utils/misc.py b/utils/misc.py index 9b6094e..6023377 100755 --- a/utils/misc.py +++ b/utils/misc.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import os +from collections import namedtuple import cv2 import numpy as np @@ -224,3 +225,22 @@ def get_target_key(target_dict: dict, key: str) -> str: """ target_keys = {k.lower(): k for k in target_dict.keys()} return target_keys.get(key.lower(), None) + + +def colored_print(msg: str, mode: str = "general"): + """ + 为不同类型的字符串消息的打印提供一些显示格式的定制 + + :param msg: 要输出的字符串消息 + :param mode: 对应的字符串打印模式,目前支持 general/warning/error + :return: + """ + if mode == "general": + msg = msg + elif mode == "warning": + msg = f"\033[5;31m{msg}\033[0m" + elif mode == "error": + msg = f"\033[1;31m{msg}\033[0m" + else: + raise ValueError(f"{mode} is invalid mode.") + print(msg) diff --git a/utils/recorders/__init__.py b/utils/recorders/__init__.py index 4f8bb42..fd14dc5 100644 --- a/utils/recorders/__init__.py +++ b/utils/recorders/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- from .curve_drawer import CurveDrawer from .excel_recorder import MetricExcelRecorder -from .metric_recorder import MetricRecorder +from .metric_recorder import GroupedMetricRecorder, MetricRecorder from .txt_recorder import TxtRecorder diff --git a/utils/recorders/excel_recorder.py b/utils/recorders/excel_recorder.py index 76826d3..578405d 100644 --- a/utils/recorders/excel_recorder.py +++ b/utils/recorders/excel_recorder.py @@ -16,6 +16,11 @@ # - Python之re模块: https://www.cnblogs.com/shenjianping/p/11647473.html class _BaseExcelRecorder(object): def __init__(self, xlsx_path: str): + """ + 提供写xlsx文档功能的基础类。主要基于openpyxl实现了一层更方便的封装。 + + :param xlsx_path: xlsx文档的路径。 + """ self.xlsx_path = xlsx_path if not os.path.exists(self.xlsx_path): print("We have created a new excel file!!!") @@ -129,6 +134,20 @@ def __init__( dataset_names=None, metric_names=None, ): + """ + 向xlsx文档写数据的类 + + :param xlsx_path: 对应的xlsx文档路径 + :param sheet_name: 要写入数据对应的sheet名字 + 默认为 `results` + :param repalce_config: 用于替换对应数据字典的键的模式,会被用于re.sub来进行替换 + 默认为 dict(lower=True, replace=(r"[_-]", "")) + :param row_header: 用于指定表格工作表左上角的内容,这里默认为 `["methods", "num_data"]` + :param dataset_names: 对应的数据集名称列表 + 默认为rgb sod的数据集合 ["pascals", "ecssd", "hkuis", "dutste", "dutomron"] + :param metric_names: 对应指标名称列表 + 默认为 ["smeasure","wfmeasure","mae","adpfm","meanfm","maxfm","adpem","meanem","maxem"] + """ super().__init__(xlsx_path=xlsx_path) if sheet_name is None: sheet_name = "results" diff --git a/utils/recorders/metric_recorder.py b/utils/recorders/metric_recorder.py index 9a4fcbf..4439449 100644 --- a/utils/recorders/metric_recorder.py +++ b/utils/recorders/metric_recorder.py @@ -2,14 +2,45 @@ # @Time : 2021/1/4 # @Author : Lart Pang # @GitHub : https://github.com/lartpang +from collections import defaultdict import numpy as np +from py_sod_metrics.sod_metrics import ( + MAE, + Emeasure, + Fmeasure, + Smeasure, + WeightedFmeasure, +) -from metrics.sod.metrics import MAE, Emeasure, Fmeasure, Smeasure, WeightedFmeasure + +def ndarray_to_basetype(data): + """ + 将单独的ndarray,或者tuple,list或者dict中的ndarray转化为基本数据类型, + 即列表(.tolist())和python标量 + """ + + def _to_list_or_scalar(item): + listed_item = item.tolist() + if isinstance(listed_item, list) and len(listed_item) == 1: + listed_item = listed_item[0] + return listed_item + + if isinstance(data, (tuple, list)): + results = [_to_list_or_scalar(item) for item in data] + elif isinstance(data, dict): + results = {k: _to_list_or_scalar(item) for k, item in data.items()} + else: + assert isinstance(data, np.ndarray) + results = _to_list_or_scalar(data) + return results class MetricRecorder(object): def __init__(self): + """ + 用于统计各种指标的类 + """ self.mae = MAE() self.fm = Fmeasure() self.sm = Smeasure() @@ -17,8 +48,9 @@ def __init__(self): self.wfm = WeightedFmeasure() def update(self, pre: np.ndarray, gt: np.ndarray): - assert pre.dtype == np.uint8, pre.dtype - assert gt.dtype == np.uint8, gt.dtype + assert pre.shape == gt.shape + assert pre.dtype == np.uint8 + assert gt.dtype == np.uint8 self.mae.step(pre, gt) self.sm.step(pre, gt) @@ -26,7 +58,13 @@ def update(self, pre: np.ndarray, gt: np.ndarray): self.em.step(pre, gt) self.wfm.step(pre, gt) - def show(self, bit_num=3) -> dict: + def show(self, num_bits: int = 3, return_ndarray: bool = False) -> dict: + """ + 返回指标计算结果: + + - 曲线数据(sequential): fm/em/p/r + - 数值指标(numerical): SM/MAE/maxE/avgE/adpE/maxF/avgF/adpF/wFm + """ fm_info = self.fm.get_results() fm = fm_info["fm"] pr = fm_info["pr"] @@ -34,17 +72,92 @@ def show(self, bit_num=3) -> dict: sm = self.sm.get_results()["sm"] em = self.em.get_results()["em"] mae = self.mae.get_results()["mae"] - results = { - "em": em["curve"], - "fm": fm["curve"], - "p": pr["p"], - "r": pr["r"], - "Sm": sm, - "wFm": wfm, + + sequential_results = { + "fm": np.flip(fm["curve"]), + "em": np.flip(em["curve"]), + "p": np.flip(pr["p"]), + "r": np.flip(pr["r"]), + } + numerical_results = { + "SM": sm, "MAE": mae, - "adpEm": em["adp"], - "adpFm": fm["adp"], + "maxE": em["curve"].max(), + "avgE": em["curve"].mean(), + "adpE": em["adp"], + "maxF": fm["curve"].max(), + "avgF": fm["curve"].mean(), + "adpF": fm["adp"], + "wFm": wfm, + } + if num_bits is not None and isinstance(num_bits, int): + numerical_results = {k: v.round(num_bits) for k, v in numerical_results.items()} + if not return_ndarray: + sequential_results = ndarray_to_basetype(sequential_results) + numerical_results = ndarray_to_basetype(numerical_results) + return {"sequential": sequential_results, "numerical": numerical_results} + + +class GroupedMetricRecorder(object): + def __init__(self): + self.metric_recorders = {} + # 这些指标会根据最终所有分组进行平均得到的曲线计算 + self.re_cal_metrics = ["maxE", "avgE", "maxF", "avgF"] + + def update(self, group_name: str, pre: np.ndarray, gt: np.ndarray): + if group_name not in self.metric_recorders: + self.metric_recorders[group_name] = MetricRecorder() + self.metric_recorders[group_name].update(pre, gt) + + def show(self, num_bits: int = 3, return_ndarray: bool = False) -> dict: + """ + 返回指标计算结果: + + - 曲线数据(sequential): fm/em/p/r + - 数值指标(numerical): SM/MAE/maxE/avgE/adpE/maxF/avgF/adpF/wFm + """ + group_metrics = {} + for k, v in self.metric_recorders.items(): + group_metric = v.show(num_bits=None, return_ndarray=True) + group_metrics[k] = { + **group_metric["sequential"], + **{ + metric_name: metric_value + for metric_name, metric_value in group_metric["numerical"].items() + if metric_name not in self.re_cal_metrics + }, + } + avg_results = self.average_group_metrics(group_metrics=group_metrics) + + sequential_results = { + "fm": avg_results["fm"], + "em": avg_results["em"], + "p": avg_results["p"], + "r": avg_results["r"], } - if isinstance(bit_num, int): - results = {k: v.round(bit_num) for k, v in results.items()} + numerical_results = { + "SM": avg_results["SM"], + "MAE": avg_results["MAE"], + "maxE": avg_results["em"].max(), + "avgE": avg_results["em"].mean(), + "adpE": avg_results["adpE"], + "maxF": avg_results["fm"].max(), + "avgF": avg_results["fm"].mean(), + "adpF": avg_results["adpF"], + "wFm": avg_results["wFm"], + } + if num_bits is not None and isinstance(num_bits, int): + numerical_results = {k: v.round(num_bits) for k, v in numerical_results.items()} + if not return_ndarray: + sequential_results = ndarray_to_basetype(sequential_results) + numerical_results = ndarray_to_basetype(numerical_results) + return {"sequential": sequential_results, "numerical": numerical_results} + + @staticmethod + def average_group_metrics(group_metrics: dict) -> dict: + recorder = defaultdict(list) + for group_name, metrics in group_metrics.items(): + for metric_name, metric_array in metrics.items(): + recorder[metric_name].append(metric_array) + results = {k: np.mean(np.vstack(v), axis=0) for k, v in recorder.items()} return results diff --git a/utils/recorders/txt_recorder.py b/utils/recorders/txt_recorder.py index ddf01a4..e21a6f0 100644 --- a/utils/recorders/txt_recorder.py +++ b/utils/recorders/txt_recorder.py @@ -8,6 +8,13 @@ class TxtRecorder: def __init__(self, txt_path, resume=True, max_method_name_width=10): + """ + 用于向txt文档写数据的类。 + + :param txt_path: txt文档路径 + :param resume: 是否要继续使用之前的文档,如果没有就重新创建 + :param max_method_name_width: 方法字符串的最大长度 + """ self.txt_path = txt_path self.max_method_name_width = max_method_name_width mode = "a" if resume else "w"