Skip to content

Commit

Permalink
Merge pull request #218 from prabhuramachandran/add_gpu_pull_for_equa…
Browse files Browse the repository at this point in the history
…tions

Fix subtle issue with automatic syncing on gpu.
  • Loading branch information
prabhuramachandran committed May 31, 2019
2 parents 167aba3 + a14c651 commit f6d4bb4
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 2 deletions.
1 change: 0 additions & 1 deletion pysph/sph/acceleration_eval_gpu_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,6 @@ def _sync_from_gpu(self, eq):

def _converged(self, equations):
for eq in equations:
self._sync_from_gpu(eq)
if not (eq.converged() > 0):
return False
return True
Expand Down
17 changes: 16 additions & 1 deletion pysph/sph/equation.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,21 @@ def converged(self):
"""
return 1.0

def _pull(self, *args):
"""Pull attributes from the GPU if needed.
The GPU reduce and converged methods run on the host and not on
the device and this is useful to call there. This is not useful
on the CPU as this does not matter which is why this is a
private method.
"""
if hasattr(self, '_gpu'):
ary = self._gpu.get()
if len(args) == 0:
args = ary.dtype.names
for arg in args:
setattr(self, arg, ary[arg][0])


###############################################################################
# `Group` class.
Expand Down Expand Up @@ -878,7 +893,7 @@ def get_equation_wrappers(self, known_types={}):
modified_classes = self._update_for_local_memory(predefined, eqs)

code_gen = self._Converter_Class(known_types=predefined)
ignore = ['reduce']
ignore = ['reduce', 'converged']
for cls in sorted(classes.keys()):
src = code_gen.parse_instance(eqs[cls], ignore_methods=ignore)
wrappers.append(src)
Expand Down
3 changes: 3 additions & 0 deletions pysph/sph/tests/test_acceleration_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,6 +721,9 @@ def post_loop(self, d_idx, d_au):
self.conv = 1

def converged(self):
if hasattr(self, '_pull'):
# _pull is not available on CPU.
self._pull('conv')
return self.conv

equations = [Group(
Expand Down

0 comments on commit f6d4bb4

Please sign in to comment.