diff --git a/src/tagstudio/core/library/alchemy/registries/dupe_files_registry.py b/src/tagstudio/core/library/alchemy/registries/dupe_files_registry.py index d99a4f498..4c1c8ab5b 100644 --- a/src/tagstudio/core/library/alchemy/registries/dupe_files_registry.py +++ b/src/tagstudio/core/library/alchemy/registries/dupe_files_registry.py @@ -7,6 +7,7 @@ from tagstudio.core.library.alchemy.enums import BrowsingState from tagstudio.core.library.alchemy.library import Library from tagstudio.core.library.alchemy.models import Entry +from tagstudio.core.utils.types import unwrap logger = structlog.get_logger() @@ -28,7 +29,7 @@ def refresh_dupe_files(self, results_filepath: str | Path): A duplicate file is defined as an identical or near-identical file as determined by a DupeGuru results file. """ - library_dir = self.library.library_dir + library_dir = unwrap(self.library.library_dir) if not isinstance(results_filepath, Path): results_filepath = Path(results_filepath) @@ -43,7 +44,7 @@ def refresh_dupe_files(self, results_filepath: str | Path): files: list[Entry] = [] for element in group: if element.tag == "file": - file_path = Path(element.attrib.get("path")) + file_path = Path(unwrap(element.attrib.get("path"))) try: path_relative = file_path.relative_to(library_dir) @@ -82,5 +83,5 @@ def merge_dupe_entries(self): for i, entries in enumerate(self.groups): remove_ids = entries[1:] logger.info("Removing entries group", ids=remove_ids) - self.library.remove_entries(remove_ids) + self.library.remove_entries([e.id for e in remove_ids]) yield i - 1 # The -1 waits for the next step to finish