/[MITgcm]/MITgcm/eesupp/src/ini_procs.F
ViewVC logotype

Contents of /MITgcm/eesupp/src/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.27 - (show annotations) (download)
Wed May 6 20:33:08 2009 UTC (15 years, 1 month ago) by jmc
Branch: MAIN
CVS Tags: checkpoint61n, checkpoint61q, checkpoint61o, checkpoint61v, checkpoint61w, checkpoint61t, checkpoint61u, checkpoint61r, checkpoint61s, checkpoint61p, checkpoint61x, checkpoint61y
Changes since 1.26: +2 -8 lines
go back to cvs version 1.24 (but keep new format)

1 C $Header: /u/gcmpack/MITgcm/eesupp/src/ini_procs.F,v 1.26 2009/05/06 02:44:45 dfer Exp $
2 C $Name: $
3 #include "CPP_EEOPTIONS.h"
4 CBOP
5
6 C !ROUTINE: INI_PROCS
7
8 C !INTERFACE:
9 SUBROUTINE INI_PROCS
10 IMPLICIT NONE
11
12 C !DESCRIPTION:
13 C *==========================================================*
14 C | SUBROUTINE INI\_PROCS
15 C | o Initialise multiple concurrent processes environment.
16 C *==========================================================*
17 C | Under MPI this routine calls various MPI service routines
18 C | that map the model grid to MPI processes. The information
19 C | is then stored in a common block for later use.
20 C | Note: This routine can also be compiled with CPP
21 C | directives set so that no multi-processing is initialise.
22 C | This is OK and should work fine.
23 C *==========================================================*
24
25 C !USES:
26 C === Global data ===
27 #include "SIZE.h"
28 #include "EEPARAMS.h"
29 #include "EESUPPORT.h"
30
31 #ifdef ALLOW_USE_MPI
32 C !FUNCTIONS:
33
34 C !LOCAL VARIABLES:
35 C === Local variables ===
36 C msgBuffer :: IO buffer
37 C myThid :: Dummy thread id
38 C mpiRC :: Error code reporting variable used
39 C with MPI.
40 C mpiGridSpec :: No. of processes in X and Y.
41 C mpiPeriodicity :: Flag indicating XY priodicity to MPI.
42 C arrElSize :: Size of an array element in bytes used
43 C to define MPI datatypes for communication
44 C operations.
45 C arrElSep :: Separation in units of array elements between
46 C blocks to be communicated.
47 C elCount :: No. of blocks that are associated with MPI
48 C datatype.
49 C elLen :: Length of an MPI datatype in terms of preexisting
50 C datatype.
51 C elStride :: Distance between starting location of elements
52 C in an MPI datatype - can be bytes of datatype
53 C units.
54 CHARACTER*(MAX_LEN_MBUF) msgBuffer
55 INTEGER mpiRC
56 INTEGER mpiGridSpec(2)
57 INTEGER mpiPeriodicity(2)
58 INTEGER mpiLProcNam
59 CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
60 INTEGER arrElSize
61 INTEGER arrElSep
62 INTEGER elCount
63 INTEGER elLen
64 INTEGER elStride
65 INTEGER npe,itemp(2),ierr,istatus(MPI_STATUS_SIZE)
66 INTEGER mpiBufSize,mpiRequest
67 #endif /* ALLOW_USE_MPI */
68 INTEGER myThid
69 CEOP
70
71 C-- Default values set to single processor case
72 C pid[W-SE] are the MPI process id of the neighbor
73 C processes. A process can be its own neighbor!
74 myThid = 1
75 myPid = 1
76 nProcs = 1
77 myPx = 1
78 myPy = 1
79 myXGlobalLo = 1
80 myYGlobalLo = 1
81 pidW = 1
82 pidE = 1
83 pidN = 1
84 pidS = 1
85 c errorMessageUnit = 0
86 c standardMessageUnit = 6
87
88 #ifdef ALLOW_USE_MPI
89 C--
90 C-- MPI style full multiple-process initialisation
91 C-- ==============================================
92 #ifndef ALWAYS_USE_MPI
93 IF ( usingMPI ) THEN
94 #endif
95
96 C-- Arrange MPI processes on a cartesian grid
97 C Set variable indicating which MPI process is to the north,
98 C south, east, west, south-west, south-east, north-west
99 C and north-east of me e.g.
100 C
101 C Plan view of model domain centered on process ME
102 C ================================================
103 C
104 C : : : :
105 C : : : :
106 C : : : :
107 C .....------------------------------.....
108 C | | | |
109 C | NW | N | NE |
110 C | | | |
111 C .....------------------------------.....
112 C | | | |
113 C | W | ME | E |
114 C | | | |
115 C .....------------------------------.....
116 C | | | |
117 C | SW | S | SE |
118 C | | | |
119 C .....------------------------------.....
120 C : : : :
121 C Y : : : :
122 C / \ : : : :
123 C |
124 C |
125 C |----> X
126 C
127 C-- Set default MPI communicator to XY processor grid
128 myThid = 1
129 mpiGridSpec(1) = nPx
130 mpiGridSpec(2) = nPy
131 C Could be periodic in X and/or Y - set at run time or compile time!
132 mpiPeriodicity(1) = _mpiTRUE_
133 mpiPeriodicity(2) = _mpiTRUE_
134 #ifdef CAN_PREVENT_X_PERIODICITY
135 #ifndef ALWAYS_PREVENT_X_PERIODICITY
136 IF ( notUsingXPeriodicity ) THEN
137 #endif
138 mpiPeriodicity(1) = _mpiFALSE_
139 #ifndef ALWAYS_PREVENT_X_PERIODICITY
140 ENDIF
141 #endif
142 #endif /* CAN_PREVENT_X_PERIODICITY */
143 #ifdef CAN_PREVENT_Y_PERIODICITY
144 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
145 IF ( notUsingYPeriodicity ) THEN
146 #endif
147 mpiPeriodicity(2) = _mpiFALSE_
148 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
149 ENDIF
150 #endif
151 #endif /* CAN_PREVENT_Y_PERIODICITY */
152
153 CALL MPI_CART_CREATE(
154 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
155 O mpiComm, mpiRC )
156 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
157 eeBootError = .TRUE.
158 WRITE(msgBuffer,'(A,I5)')
159 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
160 & mpiRC
161 CALL PRINT_ERROR( msgBuffer , myThid)
162 GOTO 999
163 ENDIF
164
165 C-- Get my location on the grid
166 CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
167 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
168 eeBootError = .TRUE.
169 WRITE(msgBuffer,'(A,I5)')
170 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
171 & mpiRC
172 CALL PRINT_ERROR( msgBuffer , myThid)
173 GOTO 999
174 ENDIF
175 myPid = mpiMyId
176 mpiPx = mpiGridSpec(1)
177 mpiPy = mpiGridSpec(2)
178 mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
179 mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
180 myXGlobalLo = mpiXGlobalLo
181 myYGlobalLo = mpiYGlobalLo
182
183 C-- To speed-up mpi gather and scatter routines, myXGlobalLo
184 C and myYGlobalLo from each process are transferred to
185 C a common block array. This allows process 0 to know
186 C the location of the domains controlled by each process.
187 DO npe = 0, numberOfProcs-1
188 itemp(1) = myXGlobalLo
189 itemp(2) = myYGlobalLo
190 CALL MPI_BCAST(itemp, 2, MPI_INTEGER, npe,
191 & MPI_COMM_MODEL, ierr)
192 mpi_myXGlobalLo(npe+1) = itemp(1)
193 mpi_myYGlobalLo(npe+1) = itemp(2)
194 ENDDO
195
196 myPx = mpiPx+1
197 myPy = mpiPy+1
198 C-- Get MPI id for neighboring procs.
199 mpiGridSpec(1) = mpiPx-1
200 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
201 & .AND. mpiGridSpec(1) .LT. 0 )
202 & mpiGridSpec(1) = nPx-1
203 mpiGridSpec(2) = mpiPy
204 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
205 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
206 eeBootError = .TRUE.
207 WRITE(msgBuffer,'(A,I5)')
208 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
209 & mpiRC
210 CALL PRINT_ERROR( msgBuffer , myThid)
211 GOTO 999
212 ENDIF
213 pidW = mpiPidW
214 mpiGridSpec(1) = mpiPx+1
215 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
216 & .AND. mpiGridSpec(1) .GT. nPx-1 )
217 & mpiGridSpec(1) = 0
218 mpiGridSpec(2) = mpiPy
219 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
220 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
221 eeBootError = .TRUE.
222 WRITE(msgBuffer,'(A,I5)')
223 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
224 & mpiRC
225 CALL PRINT_ERROR( msgBuffer , myThid)
226 GOTO 999
227 ENDIF
228 pidE = mpiPidE
229 mpiGridSpec(1) = mpiPx
230 mpiGridSpec(2) = mpiPy-1
231 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
232 & .AND. mpiGridSpec(2) .LT. 0 )
233 & mpiGridSpec(2) = nPy - 1
234 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
235 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
236 eeBootError = .TRUE.
237 WRITE(msgBuffer,'(A,I5)')
238 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
239 & mpiRC
240 CALL PRINT_ERROR( msgBuffer , myThid)
241 GOTO 999
242 ENDIF
243 pidS = mpiPidS
244 mpiGridSpec(1) = mpiPx
245 mpiGridSpec(2) = mpiPy+1
246 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
247 & .AND. mpiGridSpec(2) .GT. nPy-1 )
248 & mpiGridSpec(2) = 0
249 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
250 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
251 eeBootError = .TRUE.
252 WRITE(msgBuffer,'(A,I5)')
253 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
254 & mpiRC
255 CALL PRINT_ERROR( msgBuffer , myThid)
256 GOTO 999
257 ENDIF
258 pidN = mpiPidN
259
260 C-- Print summary of processor mapping on standard output
261 CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
262 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
263 eeBootError = .TRUE.
264 WRITE(msgBuffer,'(A,I5)')
265 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
266 & mpiRC
267 CALL PRINT_ERROR( msgBuffer , myThid)
268 GOTO 999
269 ENDIF
270 WRITE(msgBuffer,'(A)')
271 & '======= Starting MPI parallel Run ========='
272 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
273 & SQUEEZE_BOTH , myThid)
274 WRITE(msgBuffer,'(A,I3,A,A)') ' My Processor Name (len:',
275 & mpilProcNam, ' ) = ', mpiProcNam(1:mpilProcNam)
276 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
277 & SQUEEZE_RIGHT , myThid)
278 WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
279 & mpiPx,',',mpiPy,
280 & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
281 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
282 & SQUEEZE_RIGHT , myThid)
283 WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
284 & mpiXGlobalLo,',',mpiYGLobalLo,
285 & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
286 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
287 & SQUEEZE_RIGHT , myThid)
288 WRITE(msgBuffer,'(A,I4.4)')
289 & ' North neighbor = processor ', mpiPidN
290 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
291 & SQUEEZE_RIGHT , myThid)
292 WRITE(msgBuffer,'(A,I4.4)')
293 & ' South neighbor = processor ', mpiPidS
294 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
295 & SQUEEZE_RIGHT , myThid)
296 WRITE(msgBuffer,'(A,I4.4)')
297 & ' East neighbor = processor ', mpiPidE
298 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
299 & SQUEEZE_RIGHT , myThid)
300 WRITE(msgBuffer,'(A,I4.4)')
301 & ' West neighbor = processor ', mpiPidW
302 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
303 & SQUEEZE_RIGHT , myThid)
304 C
305 C-- Create MPI types for transfer of array edges.
306 C-- Four and eight byte primitive (one block only) datatypes.
307 C-- These are common to all threads in the process.
308 C Notes:
309 C ======
310 C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
311 C If they are not defined code must be added to create them -
312 C the MPI standard leaves optional whether they exist.
313 C 2. Per thread datatypes that handle all the edges for a thread
314 C are defined based on the type defined here.
315 C--
316 C-- xFace datatypes (east<-->west messages)
317 C--
318 C xFace (y=constant) for XY arrays with real*4 declaration.
319 arrElSep = (sNx+OLx*2)
320 elCount = sNy+OLy*2
321 elLen = OLx
322 elStride = arrElSep
323 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
324 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
325 & mpiTypeXFaceBlock_xy_r4, mpiRC)
326 #else
327 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
328 & mpiTypeXFaceBlock_xy_r4, mpiRC)
329 #endif
330 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
331 eeBootError = .TRUE.
332 WRITE(msgBuffer,'(A,I5)')
333 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
334 & mpiRC
335 CALL PRINT_ERROR( msgBuffer , myThid)
336 ENDIF
337 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
338 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
339 eeBootError = .TRUE.
340 WRITE(msgBuffer,'(A,I5)')
341 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
342 & mpiRC
343 CALL PRINT_ERROR( msgBuffer , myThid)
344 ENDIF
345
346 C xFace (y=constant) for XY arrays with real*8 declaration.
347 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
348 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
349 & mpiTypeXFaceBlock_xy_r8, mpiRC)
350 #else
351 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
352 & mpiTypeXFaceBlock_xy_r8, mpiRC)
353 #endif
354 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
355 eeBootError = .TRUE.
356 WRITE(msgBuffer,'(A,I5)')
357 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
358 & mpiRC
359 CALL PRINT_ERROR( msgBuffer , myThid)
360 ENDIF
361 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
362 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
363 eeBootError = .TRUE.
364 WRITE(msgBuffer,'(A,I5)')
365 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
366 & mpiRC
367 CALL PRINT_ERROR( msgBuffer , myThid)
368 ENDIF
369
370 C xFace (y=constant) for XYZ arrays with real*4 declaration.
371 arrElSize = 4
372 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
373 elCount = Nr
374 elLen = 1
375 elStride = arrElSize*arrElSep
376 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
377 & mpiTypeXFaceBlock_xy_r4,
378 & mpiTypeXFaceBlock_xyz_r4, mpiRC)
379 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
380 eeBootError = .TRUE.
381 WRITE(msgBuffer,'(A,I5)')
382 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
383 & mpiRC
384 CALL PRINT_ERROR( msgBuffer , myThid)
385 ENDIF
386 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
387 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
388 eeBootError = .TRUE.
389 WRITE(msgBuffer,'(A,I5)')
390 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
391 & mpiRC
392 CALL PRINT_ERROR( msgBuffer , myThid)
393 ENDIF
394
395 C xFace (y=constant) for XYZ arrays with real*8 declaration.
396 arrElSize = 8
397 elStride = arrElSize*arrElSep
398 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
399 & mpiTypeXFaceBlock_xy_r8,
400 & mpiTypeXFaceBlock_xyz_r8, mpiRC)
401 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
402 eeBootError = .TRUE.
403 WRITE(msgBuffer,'(A,I5)')
404 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
405 & mpiRC
406 CALL PRINT_ERROR( msgBuffer , myThid)
407 ENDIF
408 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
409 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
410 eeBootError = .TRUE.
411 WRITE(msgBuffer,'(A,I5)')
412 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
413 & mpiRC
414 CALL PRINT_ERROR( msgBuffer , myThid)
415 ENDIF
416 C--
417 C-- yFace datatypes (north<-->south messages)
418 C--
419 C yFace (x=constant) for XY arrays with real*4 declaration
420 elCount = OLy*(sNx+OLx*2)
421 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
422 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
423 & mpiTypeYFaceBlock_xy_r4, mpiRC)
424 #else
425 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
426 & mpiTypeYFaceBlock_xy_r4, mpiRC)
427 #endif
428 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
429 eeBootError = .TRUE.
430 WRITE(msgBuffer,'(A,I5)')
431 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
432 & mpiRC
433 CALL PRINT_ERROR( msgBuffer , myThid)
434 ENDIF
435 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
436 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
437 eeBootError = .TRUE.
438 WRITE(msgBuffer,'(A,I5)')
439 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
440 & mpiRC
441 CALL PRINT_ERROR( msgBuffer , myThid)
442 ENDIF
443 C yFace (x=constant) for XY arrays with real*8 declaration
444 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
445 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
446 & mpiTypeYFaceBlock_xy_r8, mpiRC)
447 #else
448 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
449 & mpiTypeYFaceBlock_xy_r8, mpiRC)
450 #endif
451 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
452 eeBootError = .TRUE.
453 WRITE(msgBuffer,'(A,I5)')
454 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
455 & mpiRC
456 CALL PRINT_ERROR( msgBuffer , myThid)
457 ENDIF
458 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
459 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
460 eeBootError = .TRUE.
461 WRITE(msgBuffer,'(A,I5)')
462 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
463 & mpiRC
464 CALL PRINT_ERROR( msgBuffer , myThid)
465 ENDIF
466 C yFace (x=constant) for XYZ arrays with real*4 declaration
467 arrElSize = 4
468 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
469 elCount = Nr
470 elLen = 1
471 elStride = arrElSize*arrElSep
472 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
473 & mpiTypeYFaceBlock_xy_r4,
474 & mpiTypeYFaceBlock_xyz_r4, mpiRC)
475 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
476 eeBootError = .TRUE.
477 WRITE(msgBuffer,'(A,I5)')
478 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
479 & mpiRC
480 CALL PRINT_ERROR( msgBuffer , myThid)
481 ENDIF
482 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
483 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
484 eeBootError = .TRUE.
485 WRITE(msgBuffer,'(A,I5)')
486 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
487 & mpiRC
488 CALL PRINT_ERROR( msgBuffer , myThid)
489 ENDIF
490 C yFace (x=constant) for XYZ arrays with real*8 declaration
491 arrElSize = 8
492 elStride = arrElSize*arrElSep
493 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
494 & mpiTypeYFaceBlock_xy_r8,
495 & mpiTypeYFaceBlock_xyz_r8, mpiRC)
496 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
497 eeBootError = .TRUE.
498 WRITE(msgBuffer,'(A,I5)')
499 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
500 & mpiRC
501 CALL PRINT_ERROR( msgBuffer , myThid)
502 ENDIF
503 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
504 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
505 eeBootError = .TRUE.
506 WRITE(msgBuffer,'(A,I5)')
507 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
508 & mpiRC
509 CALL PRINT_ERROR( msgBuffer , myThid)
510 ENDIF
511
512 C-- Assign MPI values used in generating unique tags for messages.
513 mpiTagW = 1
514 mpiTagE = 2
515 mpiTagS = 3
516 mpiTagN = 4
517
518 C
519 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
520
521
522 C
523 #ifndef ALWAYS_USE_MPI
524 ENDIF
525 #endif
526 #endif /* ALLOW_USE_MPI */
527
528 999 CONTINUE
529
530 RETURN
531 END

  ViewVC Help
Powered by ViewVC 1.1.22