Skip to content

Commit

Permalink
Clarify some control flow
Browse files Browse the repository at this point in the history
  • Loading branch information
sampsyo committed Aug 21, 2022
1 parent bf9bf48 commit ca38486
Showing 1 changed file with 13 additions and 8 deletions.
21 changes: 13 additions & 8 deletions beets/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,25 +669,29 @@ def find_duplicates(self, lib):
album name as the task.
"""
info = self.chosen_info()
info['albumartist'] = info['artist']

if info['artist'] is None:
# As-is import with no artist. Skip check.
return []

duplicates = []
task_paths = {i.path for i in self.items if i}
keys = config['import']['duplicate_keys']['album'].as_str_seq()
info['albumartist'] = info['artist']
# Create an Album object so that flexible attributes can be used.
# Create a temporary Album so computed fields are available for
# duplicate detection.
tmp_album = library.Album(lib, **info)

# Don't count albums with the same files as duplicates.
task_paths = {i.path for i in self.items if i}

duplicates = []
keys = config['import']['duplicate_keys']['album'].as_str_seq()
for album in tmp_album.duplicates(*keys):
# Check whether the album paths are all present in the task
# i.e. album is being completely re-imported by the task,
# in which case it is not a duplicate (will be replaced).
album_paths = {i.path for i in album.items()}
if not (album_paths <= task_paths):
duplicates.append(album)

return duplicates

def align_album_level_fields(self):
Expand Down Expand Up @@ -926,15 +930,16 @@ def find_duplicates(self, lib):
"""
info = self.chosen_info()

found_items = []
keys = config['import']['duplicate_keys']['single'].as_str_seq()
# Create an Item object so that flexible attributes can be used.
# Use a temporary Item to provide computed fields.
tmp_item = library.Item(lib, **info)

found_items = []
keys = config['import']['duplicate_keys']['single'].as_str_seq()
for other_item in tmp_item.duplicates(*keys):
# Existing items not considered duplicates.
if other_item.path != self.item.path:
found_items.append(other_item)

return found_items

duplicate_items = find_duplicates
Expand Down

0 comments on commit ca38486

Please sign in to comment.