Skip to content

Commit

Permalink
added some comments to batch caching
Browse files Browse the repository at this point in the history
  • Loading branch information
qmac committed Apr 3, 2018
1 parent a42f14d commit 36b93ff
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions pliers/transformers/base.py
Expand Up @@ -240,17 +240,21 @@ def _iterate(self, stims, validation='strict', *args, **kwargs):
non_cached = []
for stim in batch:
key = hash((hash(self), hash(stim)))
# If using the cache, only transform stims that aren't in the
# cache and haven't already appeared in the batch
if not (use_cache and (key in _cache or key in target_inds)):
target_inds[key] = len(non_cached)
non_cached.append(stim)

# _transform will likely fail if given an empty list
if len(non_cached) > 0:
batch_results = self._transform(non_cached, *args, **kwargs)
else:
batch_results = []

for i, stim in enumerate(batch):
key = hash((hash(self), hash(stim)))
# Use the target index to get the result from batch_results
if key in target_inds:
result = batch_results[target_inds[key]]
result = _log_transformation(stim, result, self)
Expand All @@ -260,6 +264,7 @@ def _iterate(self, stims, validation='strict', *args, **kwargs):
result = list(result)
_cache[key] = result
results.append(result)
# Otherwise, the result should be in the cache
else:
results.append(_cache[key])
return results
Expand Down

0 comments on commit 36b93ff

Please sign in to comment.