Skip to content

Commit

Permalink
bugfix: plugins.extract.pipeline - Exclude CPU plugins from vram calc…
Browse files Browse the repository at this point in the history
…ulations
  • Loading branch information
torzdf committed Oct 29, 2019
1 parent cc576bc commit 68109fc
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 9 deletions.
6 changes: 3 additions & 3 deletions lib/alignments.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,10 +206,10 @@ def update_face(self, frame, idx, alignment):
self.data[frame][idx] = alignment

def filter_hashes(self, hashlist, filter_out=False):
""" Filter in or out faces that match the hashlist
""" Filter in or out faces that match the hash list
filter_out=True: Remove faces that match in the hashlist
filter_out=False: Remove faces that are not in the hashlist
filter_out=True: Remove faces that match in the hash list
filter_out=False: Remove faces that are not in the hash list
"""
hashset = set(hashlist)
for filename, frame in self.data.items():
Expand Down
20 changes: 14 additions & 6 deletions plugins/extract/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,12 +443,16 @@ def _set_extractor_batchsize(self):
logger.debug("Plugin requirements within threshold: (plugin_required: %sMB, "
"vram_free: %sMB)", plugin_required, vram_free)
return
# Hacky split across 3 plugins
available_vram = (vram_free - self._total_vram_required) // 3
# Hacky split across plugins that use vram
gpu_plugin_count = sum([1 for plugin in self._all_plugins if plugin.vram != 0])
available_vram = (vram_free - self._total_vram_required) // gpu_plugin_count
for plugin in self._all_plugins:
self._set_plugin_batchsize(plugin, available_vram)
if plugin.vram != 0:
self._set_plugin_batchsize(plugin, available_vram)
else:
for plugin in self._all_plugins:
if plugin.vram == 0:
continue
vram_required = plugin.vram + self._vram_buffer
batch_required = plugin.vram_per_batch * plugin.batchsize
plugin_required = vram_required + batch_required
Expand All @@ -461,9 +465,13 @@ def _set_extractor_batchsize(self):

@staticmethod
def _set_plugin_batchsize(plugin, available_vram):
""" Set the batch size for the given plugin based on given available vram """
plugin.batchsize = int(max(1, available_vram // plugin.vram_per_batch))
logger.verbose("Reset batchsize for %s to %s", plugin.name, plugin.batchsize)
""" Set the batch size for the given plugin based on given available vram.
Do not update plugins which have a vram_per_batch of 0 (CPU plugins) due to
zero division error.
"""
if plugin.vram_per_batch != 0:
plugin.batchsize = int(max(1, available_vram // plugin.vram_per_batch))
logger.verbose("Reset batchsize for %s to %s", plugin.name, plugin.batchsize)

def _join_threads(self):
""" Join threads for current pass """
Expand Down

0 comments on commit 68109fc

Please sign in to comment.