-
Notifications
You must be signed in to change notification settings - Fork 0
/
GhostVector.py
executable file
·107 lines (90 loc) · 3.23 KB
/
GhostVector.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env python-mpi
import numpy as np
from mpi4py import MPI
class GhostVector:
def __init__(self):
pass
def createGhostVector(self, comm, globalSize):
rank = comm.Get_rank()
nproc = comm.Get_size()
self.globalSize = globalSize
lengthPerProc = self.globalSize/nproc
startIndex = rank*lengthPerProc
if (rank < nproc - 1):
endIndex = (rank + 1)*lengthPerProc - 1
else:
endIndex = self.globalSize - 1
self.localSize = endIndex - startIndex + 1
if (rank == 0 or rank == nproc - 1):
self.ghostSize = self.localSize + 1
else:
self.ghostSize = self.localSize + 2
self.ghostVector = np.zeros(self.ghostSize)
return self.ghostVector
def getGlobalSize(self):
return self.globalSize
def getLocalSize(self):
return self.localSize
def getGhostSize(self):
return self.ghostSize
def exchangeGhostValues(self, comm):
nproc = comm.Get_size()
rank = comm.Get_rank()
if (nproc == 1):
pass
else:
left = rank - 1
right = rank + 1
if (rank > 0):
req = comm.isend(self.ghostVector[1], dest = left)
self.ghostVector[0] = comm.recv(source = left)
MPI.Request.Wait(req)
if (rank < nproc - 1):
req = comm.isend(self.ghostVector[-2], dest = right)
self.ghostVector[-1] = comm.recv(source = right)
MPI.Request.Wait(req)
def dotProduct(self, comm):
nproc = comm.Get_size()
rank = comm.Get_rank()
if (nproc == 1):
return np.dot(self.ghostVector, self.ghostVector)
else:
if (rank == 0):
localProduct = np.dot(self.ghostVector[:-1], self.ghostVector[:-1])
elif (rank == nproc - 1):
localProduct = np.dot(self.ghostVector[1:], self.ghostVector[1:])
else:
localProduct = np.dot(self.ghostVector[1:-1], self.ghostVector[1:-1])
globalProduct = comm.allreduce(localProduct, MPI.SUM)
return globalProduct
def gather(self, comm):
nproc = comm.Get_size()
rank = comm.Get_rank()
for i in range(1, nproc):
if (rank == i):
comm.send(self.ghostVector, dest = 0, tag = 100 + i)
if (rank == 0):
ghostList = []
ghostList.append(self.ghostVector)
for i in range(1, nproc):
temp = comm.recv(source = i, tag = 100 + i)
ghostList.append(temp)
self.totalGhostVector = np.concatenate(ghostList)
else:
self.totalGhostVector = None
def gatherAll(self, comm):
self.gather(comm)
self.totalGhostVector = comm.bcast(self.totalGhostVector, root = 0)
def main():
ghostVector = GhostVector()
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nproc = comm.Get_size()
u = ghostVector.createGhostVector(comm, 20)
u[1] = -100
ghostVector.gatherAll(comm)
print "rank = ", rank, " ", ghostVector.totalGhostVector
return 0
if __name__ == "__main__":
import sys
sys.exit(main())