1 |
jmc |
1.5 |
C $Header: /u/gcmpack/MITgcm/pkg/flt/exch_recv_get_vec.F,v 1.4 2009/01/07 23:17:23 jmc Exp $ |
2 |
jmc |
1.2 |
C $Name: $ |
3 |
|
|
|
4 |
jmc |
1.5 |
#include "CPP_EEOPTIONS.h" |
5 |
|
|
#undef DBUG_EXCH_VEC |
6 |
jmc |
1.3 |
|
7 |
|
|
C-- Contents |
8 |
|
|
C-- o EXCH_RL_RECV_GET_VEC_X |
9 |
|
|
C-- o EXCH_RL_RECV_GET_VEC_Y |
10 |
|
|
|
11 |
|
|
C---+----1----+----2----+----3----+----4----+----5----+----6----+----7-|--+----| |
12 |
jmc |
1.5 |
CBOP 0 |
13 |
|
|
C !ROUTINE: EXCH_RL_RECV_GET_VEC_X |
14 |
adcroft |
1.1 |
|
15 |
jmc |
1.5 |
C !INTERFACE: |
16 |
jmc |
1.4 |
SUBROUTINE EXCH_RL_RECV_GET_VEC_X( |
17 |
|
|
U arrayE, arrayW, |
18 |
|
|
I myd1, myThid ) |
19 |
jmc |
1.5 |
C !DESCRIPTION: |
20 |
|
|
C *==========================================================* |
21 |
|
|
C | SUBROUTINE EXCH_RL_RECV_GET_VEC_X |
22 |
|
|
C | o "Receive" or "Get" X edges for RL array. |
23 |
|
|
C *==========================================================* |
24 |
|
|
C | Routine that invokes actual message passing receive |
25 |
|
|
C | of data to update buffer in X direction |
26 |
|
|
C *==========================================================* |
27 |
|
|
|
28 |
|
|
C !USES: |
29 |
adcroft |
1.1 |
IMPLICIT NONE |
30 |
|
|
|
31 |
|
|
C == Global variables == |
32 |
|
|
#include "SIZE.h" |
33 |
|
|
#include "EEPARAMS.h" |
34 |
|
|
#include "EESUPPORT.h" |
35 |
|
|
#include "EXCH.h" |
36 |
|
|
|
37 |
jmc |
1.5 |
C !INPUT/OUTPUT PARAMETERS: |
38 |
|
|
C arrayE :: buffer array to collect Eastern Neighbour values |
39 |
|
|
C arrayW :: buffer array to collect Western Neighbour values |
40 |
|
|
C myd1 :: size |
41 |
|
|
C myThid :: my Thread Id. number |
42 |
adcroft |
1.1 |
INTEGER myd1 |
43 |
|
|
_RL arrayE(myd1, nSx, nSy), arrayW(myd1, nSx, nSy) |
44 |
|
|
INTEGER myThid |
45 |
jmc |
1.5 |
CEOP |
46 |
adcroft |
1.1 |
|
47 |
jmc |
1.5 |
C !LOCAL VARIABLES: |
48 |
|
|
C bi, bj :: tile indices |
49 |
|
|
C biW, bjW :: West tile indices |
50 |
|
|
C biE, bjE :: East tile indices |
51 |
|
|
C theProc :: \ |
52 |
|
|
C theTag :: \ |
53 |
|
|
C theType :: / Variables used in message building |
54 |
|
|
C theSize :: / |
55 |
|
|
C westCommMode :: variables holding type of communication |
56 |
|
|
C eastCommMode :: a particular tile face uses. |
57 |
jmc |
1.4 |
INTEGER bi, bj |
58 |
|
|
c INTEGER biW, bjW, biE, bjE |
59 |
adcroft |
1.1 |
INTEGER westCommMode |
60 |
|
|
INTEGER eastCommMode |
61 |
|
|
INTEGER spinCount |
62 |
jmc |
1.5 |
INTEGER ioUnit |
63 |
adcroft |
1.1 |
#ifdef ALLOW_USE_MPI |
64 |
|
|
INTEGER theProc, theTag, theType, theSize |
65 |
|
|
INTEGER mpiStatus(MPI_STATUS_SIZE,4), mpiRc |
66 |
|
|
#endif |
67 |
|
|
|
68 |
jmc |
1.2 |
C-- Under a "put" scenario we |
69 |
adcroft |
1.1 |
C-- i. set completetion signal for buffer we put into. |
70 |
jmc |
1.2 |
C-- ii. wait for completetion signal indicating data has been put in |
71 |
adcroft |
1.1 |
C-- our buffer. |
72 |
|
|
C-- Under a messaging mode we "receive" the message. |
73 |
jmc |
1.5 |
C-- Under a "get" scenario <= not implemented, we |
74 |
adcroft |
1.1 |
C-- i. Check that the data is ready. |
75 |
|
|
C-- ii. Read the data. |
76 |
|
|
C-- iii. Set data read flag + memory sync. |
77 |
|
|
|
78 |
jmc |
1.5 |
ioUnit = errorMessageUnit |
79 |
adcroft |
1.1 |
|
80 |
|
|
DO bj=myByLo(myThid),myByHi(myThid) |
81 |
|
|
DO bi=myBxLo(myThid),myBxHi(myThid) |
82 |
|
|
westCommMode = _tileCommModeW(bi,bj) |
83 |
|
|
eastCommMode = _tileCommModeE(bi,bj) |
84 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
85 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_X,0 :',myProcId,bi,bj |
86 |
|
|
#endif |
87 |
jmc |
1.4 |
c biE = _tileBiE(bi,bj) |
88 |
|
|
c bjE = _tileBjE(bi,bj) |
89 |
|
|
c biW = _tileBiW(bi,bj) |
90 |
|
|
c bjW = _tileBjW(bi,bj) |
91 |
adcroft |
1.1 |
IF ( westCommMode .EQ. COMM_MSG ) THEN |
92 |
|
|
#ifdef ALLOW_USE_MPI |
93 |
|
|
#ifndef ALWAYS_USE_MPI |
94 |
|
|
IF ( usingMPI ) THEN |
95 |
|
|
#endif |
96 |
|
|
theProc = tilePidW(bi,bj) |
97 |
|
|
theTag = _tileTagRecvW(bi,bj) |
98 |
jmc |
1.5 |
theType = _MPI_TYPE_RL |
99 |
adcroft |
1.1 |
theSize = myd1 |
100 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
101 |
|
|
write(ioUnit,'(A,5I5,I8)') 'qq2xW: ',myProcId,bi,bj, |
102 |
|
|
& theProc,theTag,theSize |
103 |
|
|
#endif |
104 |
adcroft |
1.1 |
CALL MPI_Recv( arrayW(1,bi,bj), theSize, theType, |
105 |
|
|
& theProc, theTag, MPI_COMM_MODEL, |
106 |
|
|
& mpiStatus, mpiRc ) |
107 |
|
|
c if (theProc .eq. 0 .or. theProc .eq. 2) then |
108 |
|
|
c if (arrayW(1,bi,bj) .ne. 0.) then |
109 |
jmc |
1.5 |
c write(errormessageunit,*) 'qq2y: ',myProcId, |
110 |
adcroft |
1.1 |
c & theProc,theTag,theSize,(arrayW(i,bi,bj),i=1,32) |
111 |
|
|
c else |
112 |
jmc |
1.5 |
c write(errormessageunit,*) 'qq2n: ',myProcId, |
113 |
adcroft |
1.1 |
c & theProc,theTag,theSize,(arrayW(i,bi,bj),i=1,32) |
114 |
|
|
c endif |
115 |
|
|
c endif |
116 |
|
|
#ifndef ALWAYS_USE_MPI |
117 |
jmc |
1.5 |
ENDIF |
118 |
adcroft |
1.1 |
#endif |
119 |
|
|
#endif /* ALLOW_USE_MPI */ |
120 |
|
|
ENDIF |
121 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
122 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_X,1 :',myProcId,bi,bj |
123 |
|
|
#endif |
124 |
adcroft |
1.1 |
IF ( eastCommMode .EQ. COMM_MSG ) THEN |
125 |
|
|
#ifdef ALLOW_USE_MPI |
126 |
|
|
#ifndef ALWAYS_USE_MPI |
127 |
|
|
IF ( usingMPI ) THEN |
128 |
|
|
#endif |
129 |
|
|
theProc = tilePidE(bi,bj) |
130 |
|
|
theTag = _tileTagRecvE(bi,bj) |
131 |
jmc |
1.5 |
theType = _MPI_TYPE_RL |
132 |
adcroft |
1.1 |
theSize = myd1 |
133 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
134 |
|
|
write(ioUnit,'(A,5I5,I8)') 'qq2xE: ',myProcId,bi,bj, |
135 |
|
|
& theProc,theTag,theSize |
136 |
|
|
#endif |
137 |
adcroft |
1.1 |
CALL MPI_Recv( arrayE(1,bi,bj), theSize, theType, |
138 |
|
|
& theProc, theTag, MPI_COMM_MODEL, |
139 |
|
|
& mpiStatus, mpiRc ) |
140 |
|
|
#ifndef ALWAYS_USE_MPI |
141 |
jmc |
1.5 |
ENDIF |
142 |
adcroft |
1.1 |
#endif |
143 |
|
|
#endif /* ALLOW_USE_MPI */ |
144 |
|
|
ENDIF |
145 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
146 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_X,2 :',myProcId,bi,bj |
147 |
|
|
#endif |
148 |
adcroft |
1.1 |
ENDDO |
149 |
|
|
ENDDO |
150 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
151 |
|
|
write(ioUnit,'(A,5I6,I12)') 'RECV_X:',myProcId |
152 |
|
|
#endif |
153 |
adcroft |
1.1 |
|
154 |
|
|
C-- Wait for buffers I am going read to be ready. |
155 |
|
|
IF ( exchUsesBarrier ) THEN |
156 |
|
|
C o On some machines ( T90 ) use system barrier rather than spinning. |
157 |
|
|
CALL BARRIER( myThid ) |
158 |
|
|
ELSE |
159 |
|
|
C o Spin waiting for completetion flag. This avoids a global-lock |
160 |
|
|
C i.e. we only lock waiting for data that we need. |
161 |
|
|
DO bj=myByLo(myThid),myByHi(myThid) |
162 |
|
|
DO bi=myBxLo(myThid),myBxHi(myThid) |
163 |
|
|
spinCount = 0 |
164 |
|
|
westCommMode = _tileCommModeW(bi,bj) |
165 |
|
|
eastCommMode = _tileCommModeE(bi,bj) |
166 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
167 |
|
|
write(ioUnit,'(A,5I6,I12)') 'spin:', myProcId,bi,bj, |
168 |
|
|
& westRecvAck(1,bi,bj), eastRecvAck(1,bi,bj), spinCount |
169 |
|
|
#endif |
170 |
adcroft |
1.1 |
10 CONTINUE |
171 |
|
|
CALL FOOL_THE_COMPILER |
172 |
|
|
spinCount = spinCount+1 |
173 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
174 |
|
|
write(ioUnit,'(A,5I6,I12)') 'spin:', myProcId,bi,bj, |
175 |
|
|
& westRecvAck(1,bi,bj), eastRecvAck(1,bi,bj), spinCount |
176 |
|
|
IF ( myThid.EQ.1 .AND. spinCount.GT. _EXCH_SPIN_LIMIT ) THEN |
177 |
|
|
STOP ' S/R EXCH_RECV_GET_X: spinCount > _EXCH_SPIN_LIMIT' |
178 |
|
|
ENDIF |
179 |
|
|
#endif |
180 |
|
|
IF ( westRecvAck(1,bi,bj) .EQ. 0 ) GOTO 10 |
181 |
|
|
IF ( eastRecvAck(1,bi,bj) .EQ. 0 ) GOTO 10 |
182 |
adcroft |
1.1 |
C Clear outstanding requests |
183 |
jmc |
1.5 |
westRecvAck(1,bi,bj) = 0 |
184 |
|
|
eastRecvAck(1,bi,bj) = 0 |
185 |
adcroft |
1.1 |
|
186 |
|
|
IF ( exchNReqsX(1,bi,bj) .GT. 0 ) THEN |
187 |
|
|
#ifdef ALLOW_USE_MPI |
188 |
|
|
#ifndef ALWAYS_USE_MPI |
189 |
jmc |
1.5 |
IF ( usingMPI ) THEN |
190 |
adcroft |
1.1 |
#endif |
191 |
|
|
CALL MPI_Waitall( exchNReqsX(1,bi,bj), exchReqIdX(1,1,bi,bj), |
192 |
|
|
& mpiStatus, mpiRC ) |
193 |
|
|
#ifndef ALWAYS_USE_MPI |
194 |
jmc |
1.5 |
ENDIF |
195 |
adcroft |
1.1 |
#endif |
196 |
|
|
#endif /* ALLOW_USE_MPI */ |
197 |
|
|
ENDIF |
198 |
|
|
C Clear outstanding requests counter |
199 |
|
|
exchNReqsX(1,bi,bj) = 0 |
200 |
|
|
C Update statistics |
201 |
|
|
ENDDO |
202 |
|
|
ENDDO |
203 |
|
|
ENDIF |
204 |
|
|
|
205 |
|
|
RETURN |
206 |
|
|
END |
207 |
|
|
|
208 |
jmc |
1.3 |
C---+----1----+----2----+----3----+----4----+----5----+----6----+----7-|--+----| |
209 |
jmc |
1.5 |
CBOP 0 |
210 |
|
|
C !ROUTINE: EXCH_RL_RECV_GET_VEC_Y |
211 |
adcroft |
1.1 |
|
212 |
jmc |
1.5 |
C !INTERFACE: |
213 |
jmc |
1.4 |
SUBROUTINE EXCH_RL_RECV_GET_VEC_Y( |
214 |
|
|
U arrayN, arrayS, |
215 |
|
|
I myd1, myThid ) |
216 |
jmc |
1.5 |
C !DESCRIPTION: |
217 |
|
|
C *==========================================================* |
218 |
|
|
C | SUBROUTINE EXCH_RL_RECV_GET_VEC_Y |
219 |
|
|
C | o "Receive" or "Get" Y edges for RL array. |
220 |
|
|
C *==========================================================* |
221 |
|
|
C | Routine that invokes actual message passing receive |
222 |
|
|
C | of data to update buffer in Y direction |
223 |
|
|
C *==========================================================* |
224 |
|
|
|
225 |
|
|
C !USES: |
226 |
adcroft |
1.1 |
IMPLICIT NONE |
227 |
|
|
|
228 |
|
|
C == Global variables == |
229 |
|
|
#include "SIZE.h" |
230 |
|
|
#include "EEPARAMS.h" |
231 |
|
|
#include "EESUPPORT.h" |
232 |
|
|
#include "EXCH.h" |
233 |
|
|
|
234 |
jmc |
1.5 |
C !INPUT/OUTPUT PARAMETERS: |
235 |
|
|
C arrayN :: buffer array to collect Northern Neighbour values |
236 |
|
|
C arrayS :: buffer array to collect Southern Neighbour values |
237 |
|
|
C myd1 :: size |
238 |
|
|
C myThid :: my Thread Id. number |
239 |
adcroft |
1.1 |
INTEGER myd1 |
240 |
|
|
_RL arrayN(myd1, nSx, nSy), arrayS(myd1, nSx, nSy) |
241 |
|
|
INTEGER myThid |
242 |
jmc |
1.5 |
CEOP |
243 |
adcroft |
1.1 |
|
244 |
jmc |
1.5 |
C !LOCAL VARIABLES: |
245 |
|
|
C bi, bj :: tile indices |
246 |
|
|
C biS, bjS :: South tile indices |
247 |
|
|
C biN, bjN :: North tile indices |
248 |
|
|
C theProc :: \ |
249 |
|
|
C theTag :: \ |
250 |
|
|
C theType :: / Variables used in message building |
251 |
|
|
C theSize :: / |
252 |
|
|
C southCommMode :: variables holding type of communication |
253 |
|
|
C northCommMode :: a particular tile face uses. |
254 |
jmc |
1.4 |
INTEGER bi, bj |
255 |
|
|
c INTEGER biS, bjS, biN, bjN |
256 |
adcroft |
1.1 |
INTEGER southCommMode |
257 |
|
|
INTEGER northCommMode |
258 |
|
|
INTEGER spinCount |
259 |
jmc |
1.5 |
INTEGER ioUnit |
260 |
adcroft |
1.1 |
#ifdef ALLOW_USE_MPI |
261 |
|
|
INTEGER theProc, theTag, theType, theSize |
262 |
|
|
INTEGER mpiStatus(MPI_STATUS_SIZE,4), mpiRc |
263 |
|
|
#endif |
264 |
|
|
|
265 |
jmc |
1.2 |
C-- Under a "put" scenario we |
266 |
adcroft |
1.1 |
C-- i. set completetion signal for buffer we put into. |
267 |
jmc |
1.2 |
C-- ii. wait for completetion signal indicating data has been put in |
268 |
adcroft |
1.1 |
C-- our buffer. |
269 |
|
|
C-- Under a messaging mode we "receive" the message. |
270 |
jmc |
1.5 |
C-- Under a "get" scenario <= not implemented, we |
271 |
adcroft |
1.1 |
C-- i. Check that the data is ready. |
272 |
|
|
C-- ii. Read the data. |
273 |
|
|
C-- iii. Set data read flag + memory sync. |
274 |
|
|
|
275 |
jmc |
1.5 |
ioUnit = errorMessageUnit |
276 |
adcroft |
1.1 |
|
277 |
|
|
DO bj=myByLo(myThid),myByHi(myThid) |
278 |
|
|
DO bi=myBxLo(myThid),myBxHi(myThid) |
279 |
|
|
southCommMode = _tileCommModeS(bi,bj) |
280 |
|
|
northCommMode = _tileCommModeN(bi,bj) |
281 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
282 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_Y,0 :',myProcId,bi,bj |
283 |
|
|
#endif |
284 |
jmc |
1.4 |
c biN = _tileBiN(bi,bj) |
285 |
|
|
c bjN = _tileBjN(bi,bj) |
286 |
|
|
c biS = _tileBiS(bi,bj) |
287 |
|
|
c bjS = _tileBjS(bi,bj) |
288 |
adcroft |
1.1 |
IF ( southCommMode .EQ. COMM_MSG ) THEN |
289 |
|
|
#ifdef ALLOW_USE_MPI |
290 |
|
|
#ifndef ALWAYS_USE_MPI |
291 |
|
|
IF ( usingMPI ) THEN |
292 |
|
|
#endif |
293 |
|
|
theProc = tilePidS(bi,bj) |
294 |
|
|
theTag = _tileTagRecvS(bi,bj) |
295 |
jmc |
1.5 |
theType = _MPI_TYPE_RL |
296 |
adcroft |
1.1 |
theSize = myd1 |
297 |
|
|
CALL MPI_Recv( arrayS(1,bi,bj), theSize, theType, |
298 |
|
|
& theProc, theTag, MPI_COMM_MODEL, |
299 |
|
|
& mpiStatus, mpiRc ) |
300 |
|
|
#ifndef ALWAYS_USE_MPI |
301 |
jmc |
1.5 |
ENDIF |
302 |
adcroft |
1.1 |
#endif |
303 |
|
|
#endif /* ALLOW_USE_MPI */ |
304 |
|
|
ENDIF |
305 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
306 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_Y,1 :',myProcId,bi,bj |
307 |
|
|
#endif |
308 |
adcroft |
1.1 |
IF ( northCommMode .EQ. COMM_MSG ) THEN |
309 |
|
|
#ifdef ALLOW_USE_MPI |
310 |
|
|
#ifndef ALWAYS_USE_MPI |
311 |
|
|
IF ( usingMPI ) THEN |
312 |
|
|
#endif |
313 |
|
|
theProc = tilePidN(bi,bj) |
314 |
|
|
theTag = _tileTagRecvN(bi,bj) |
315 |
jmc |
1.5 |
theType = _MPI_TYPE_RL |
316 |
adcroft |
1.1 |
theSize = myd1 |
317 |
|
|
CALL MPI_Recv( arrayN(1,bi,bj), theSize, theType, |
318 |
|
|
& theProc, theTag, MPI_COMM_MODEL, |
319 |
|
|
& mpiStatus, mpiRc ) |
320 |
|
|
#ifndef ALWAYS_USE_MPI |
321 |
jmc |
1.5 |
ENDIF |
322 |
adcroft |
1.1 |
#endif |
323 |
|
|
#endif /* ALLOW_USE_MPI */ |
324 |
|
|
ENDIF |
325 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
326 |
|
|
write(ioUnit,'(A,5I6)') 'RECV_Y,2 :',myProcId,bi,bj |
327 |
|
|
#endif |
328 |
adcroft |
1.1 |
ENDDO |
329 |
|
|
ENDDO |
330 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
331 |
|
|
write(ioUnit,'(A,5I6,I12)') 'RECV_Y:',myProcId |
332 |
|
|
#endif |
333 |
adcroft |
1.1 |
|
334 |
|
|
C-- Wait for buffers I am going read to be ready. |
335 |
|
|
IF ( exchUsesBarrier ) THEN |
336 |
|
|
C o On some machines ( T90 ) use system barrier rather than spinning. |
337 |
|
|
CALL BARRIER( myThid ) |
338 |
|
|
ELSE |
339 |
|
|
C o Spin waiting for completetion flag. This avoids a global-lock |
340 |
|
|
C i.e. we only lock waiting for data that we need. |
341 |
|
|
DO bj=myByLo(myThid),myByHi(myThid) |
342 |
|
|
DO bi=myBxLo(myThid),myBxHi(myThid) |
343 |
|
|
spinCount = 0 |
344 |
|
|
southCommMode = _tileCommModeS(bi,bj) |
345 |
|
|
northCommMode = _tileCommModeN(bi,bj) |
346 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
347 |
|
|
write(ioUnit,'(A,5I6,I12)') 'spin:', myProcId,bi,bj, |
348 |
|
|
& southRecvAck(1,bi,bj), northRecvAck(1,bi,bj), spinCount |
349 |
|
|
#endif |
350 |
adcroft |
1.1 |
10 CONTINUE |
351 |
|
|
CALL FOOL_THE_COMPILER |
352 |
|
|
spinCount = spinCount+1 |
353 |
jmc |
1.5 |
#ifdef DBUG_EXCH_VEC |
354 |
|
|
write(ioUnit,'(A,5I6,I12)') 'spin:', myProcId,bi,bj, |
355 |
|
|
& southRecvAck(1,bi,bj), northRecvAck(1,bi,bj), spinCount |
356 |
|
|
IF ( myThid.EQ.1 .AND. spinCount.GT. _EXCH_SPIN_LIMIT ) THEN |
357 |
|
|
STOP ' S/R EXCH_RECV_GET_X: spinCount > _EXCH_SPIN_LIMIT' |
358 |
|
|
ENDIF |
359 |
|
|
#endif |
360 |
|
|
IF ( southRecvAck(1,bi,bj) .EQ. 0 ) GOTO 10 |
361 |
|
|
IF ( northRecvAck(1,bi,bj) .EQ. 0 ) GOTO 10 |
362 |
adcroft |
1.1 |
C Clear outstanding requests |
363 |
jmc |
1.5 |
southRecvAck(1,bi,bj) = 0 |
364 |
|
|
northRecvAck(1,bi,bj) = 0 |
365 |
adcroft |
1.1 |
|
366 |
|
|
IF ( exchNReqsY(1,bi,bj) .GT. 0 ) THEN |
367 |
|
|
#ifdef ALLOW_USE_MPI |
368 |
|
|
#ifndef ALWAYS_USE_MPI |
369 |
jmc |
1.5 |
IF ( usingMPI ) THEN |
370 |
adcroft |
1.1 |
#endif |
371 |
|
|
CALL MPI_Waitall( exchNReqsY(1,bi,bj), exchReqIdY(1,1,bi,bj), |
372 |
|
|
& mpiStatus, mpiRC ) |
373 |
|
|
#ifndef ALWAYS_USE_MPI |
374 |
jmc |
1.5 |
ENDIF |
375 |
adcroft |
1.1 |
#endif |
376 |
|
|
#endif /* ALLOW_USE_MPI */ |
377 |
|
|
ENDIF |
378 |
|
|
C Clear outstanding requests counter |
379 |
|
|
exchNReqsY(1,bi,bj) = 0 |
380 |
|
|
C Update statistics |
381 |
|
|
ENDDO |
382 |
|
|
ENDDO |
383 |
|
|
ENDIF |
384 |
|
|
|
385 |
|
|
RETURN |
386 |
|
|
END |