/[MITgcm]/MITgcm/eesupp/src/ini_procs.F
ViewVC logotype

Contents of /MITgcm/eesupp/src/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.24 - (show annotations) (download)
Wed Apr 16 20:46:46 2008 UTC (16 years, 1 month ago) by heimbach
Branch: MAIN
CVS Tags: checkpoint60, checkpoint61, checkpoint59q, checkpoint59r, checkpoint61f, checkpoint61e, checkpoint61g, checkpoint61d, checkpoint61b, checkpoint61c, checkpoint61a, checkpoint61l, checkpoint61j, checkpoint61k, checkpoint61h, checkpoint61i
Changes since 1.23: +9 -21 lines
Changed an unsafe MPI_SEND/MPI_RECV sequence into a safe MPI_BCAST
(previous construct was not robust; caused problems on CRAY XT3)
Modif. by ce107@mit.edu

1 C $Header: /u/gcmpack/MITgcm/eesupp/src/ini_procs.F,v 1.23 2005/11/05 00:51:06 jmc Exp $
2 C $Name: $
3 #include "CPP_EEOPTIONS.h"
4 CBOP
5
6 C !ROUTINE: INI_PROCS
7
8 C !INTERFACE:
9 SUBROUTINE INI_PROCS
10 IMPLICIT NONE
11
12 C !DESCRIPTION:
13 C *==========================================================*
14 C | SUBROUTINE INI\_PROCS
15 C | o Initialise multiple concurrent processes environment.
16 C *==========================================================*
17 C | Under MPI this routine calls various MPI service routines
18 C | that map the model grid to MPI processes. The information
19 C | is then stored in a common block for later use.
20 C | Note: This routine can also be compiled with CPP
21 C | directives set so that no multi-processing is initialise.
22 C | This is OK and should work fine.
23 C *==========================================================*
24
25 C !USES:
26 C === Global data ===
27 #include "SIZE.h"
28 #include "EEPARAMS.h"
29 #include "EESUPPORT.h"
30
31 C !LOCAL VARIABLES:
32 C === Local variables ===
33 #ifdef ALLOW_USE_MPI
34 C msgBuffer :: IO buffer
35 C myThid :: Dummy thread id
36 C mpiRC :: Error code reporting variable used
37 C with MPI.
38 C mpiGridSpec :: No. of processes in X and Y.
39 C mpiPeriodicity :: Flag indicating XY priodicity to MPI.
40 C arrElSize :: Size of an array element in bytes used
41 C to define MPI datatypes for communication
42 C operations.
43 C arrElSep :: Separation in units of array elements between
44 C blocks to be communicated.
45 C elCount :: No. of blocks that are associated with MPI
46 C datatype.
47 C elLen :: Length of an MPI datatype in terms of preexisting
48 C datatype.
49 C elStride :: Distance between starting location of elements
50 C in an MPI datatype - can be bytes of datatype
51 C units.
52 CHARACTER*(MAX_LEN_MBUF) msgBuffer
53 INTEGER mpiRC
54 INTEGER mpiGridSpec(2)
55 INTEGER mpiPeriodicity(2)
56 INTEGER mpiLProcNam
57 CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
58 INTEGER arrElSize
59 INTEGER arrElSep
60 INTEGER elCount
61 INTEGER elLen
62 INTEGER elStride
63 INTEGER npe,itemp(2),ierr,istatus(MPI_STATUS_SIZE)
64 INTEGER mpiBufSize,mpiRequest
65 #endif /* ALLOW_USE_MPI */
66 INTEGER myThid
67 CEOP
68
69 C-- Default values set to single processor case
70 C pid[W-SE] are the MPI process id of the neighbor
71 C processes. A process can be its own neighbor!
72 myThid = 1
73 myPid = 1
74 nProcs = 1
75 myPx = 1
76 myPy = 1
77 myXGlobalLo = 1
78 myYGlobalLo = 1
79 pidW = 1
80 pidE = 1
81 pidN = 1
82 pidS = 1
83 c errorMessageUnit = 0
84 c standardMessageUnit = 6
85
86 #ifdef ALLOW_USE_MPI
87 C--
88 C-- MPI style full multiple-process initialisation
89 C-- ==============================================
90 #ifndef ALWAYS_USE_MPI
91 IF ( usingMPI ) THEN
92 #endif
93
94 C-- Arrange MPI processes on a cartesian grid
95 C Set variable indicating which MPI process is to the north,
96 C south, east, west, south-west, south-east, north-west
97 C and north-east of me e.g.
98 C
99 C Plan view of model domain centered on process ME
100 C ================================================
101 C
102 C : : : :
103 C : : : :
104 C : : : :
105 C .....------------------------------.....
106 C | | | |
107 C | NW | N | NE |
108 C | | | |
109 C .....------------------------------.....
110 C | | | |
111 C | W | ME | E |
112 C | | | |
113 C .....------------------------------.....
114 C | | | |
115 C | SW | S | SE |
116 C | | | |
117 C .....------------------------------.....
118 C : : : :
119 C Y : : : :
120 C / \ : : : :
121 C |
122 C |
123 C |----> X
124 C
125 C-- Set default MPI communicator to XY processor grid
126 myThid = 1
127 mpiGridSpec(1) = nPx
128 mpiGridSpec(2) = nPy
129 C Could be periodic in X and/or Y - set at run time or compile time!
130 mpiPeriodicity(1) = _mpiTRUE_
131 mpiPeriodicity(2) = _mpiTRUE_
132 #ifdef CAN_PREVENT_X_PERIODICITY
133 #ifndef ALWAYS_PREVENT_X_PERIODICITY
134 IF ( notUsingXPeriodicity ) THEN
135 #endif
136 mpiPeriodicity(1) = _mpiFALSE_
137 #ifndef ALWAYS_PREVENT_X_PERIODICITY
138 ENDIF
139 #endif
140 #endif /* CAN_PREVENT_X_PERIODICITY */
141 #ifdef CAN_PREVENT_Y_PERIODICITY
142 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
143 IF ( notUsingYPeriodicity ) THEN
144 #endif
145 mpiPeriodicity(2) = _mpiFALSE_
146 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
147 ENDIF
148 #endif
149 #endif /* CAN_PREVENT_Y_PERIODICITY */
150
151 CALL MPI_CART_CREATE(
152 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
153 O mpiComm, mpiRC )
154 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
155 eeBootError = .TRUE.
156 WRITE(msgBuffer,'(A,I5)')
157 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
158 & mpiRC
159 CALL PRINT_ERROR( msgBuffer , myThid)
160 GOTO 999
161 ENDIF
162
163 C-- Get my location on the grid
164 CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
165 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
166 eeBootError = .TRUE.
167 WRITE(msgBuffer,'(A,I5)')
168 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
169 & mpiRC
170 CALL PRINT_ERROR( msgBuffer , myThid)
171 GOTO 999
172 ENDIF
173 myPid = mpiMyId
174 mpiPx = mpiGridSpec(1)
175 mpiPy = mpiGridSpec(2)
176 mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
177 mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
178 myXGlobalLo = mpiXGlobalLo
179 myYGlobalLo = mpiYGlobalLo
180
181 C-- To speed-up mpi gather and scatter routines, myXGlobalLo
182 C and myYGlobalLo from each process are transferred to
183 C a common block array. This allows process 0 to know
184 C the location of the domains controlled by each process.
185 DO npe = 0, numberOfProcs-1
186 itemp(1) = myXGlobalLo
187 itemp(2) = myYGlobalLo
188 CALL MPI_BCAST(itemp, 2, MPI_INTEGER, npe,
189 & MPI_COMM_MODEL, ierr)
190 mpi_myXGlobalLo(npe+1) = itemp(1)
191 mpi_myYGlobalLo(npe+1) = itemp(2)
192 ENDDO
193
194 myPx = mpiPx+1
195 myPy = mpiPy+1
196 C-- Get MPI id for neighboring procs.
197 mpiGridSpec(1) = mpiPx-1
198 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
199 & .AND. mpiGridSpec(1) .LT. 0 )
200 & mpiGridSpec(1) = nPx-1
201 mpiGridSpec(2) = mpiPy
202 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
203 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
204 eeBootError = .TRUE.
205 WRITE(msgBuffer,'(A,I5)')
206 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
207 & mpiRC
208 CALL PRINT_ERROR( msgBuffer , myThid)
209 GOTO 999
210 ENDIF
211 pidW = mpiPidW
212 mpiGridSpec(1) = mpiPx+1
213 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
214 & .AND. mpiGridSpec(1) .GT. nPx-1 )
215 & mpiGridSpec(1) = 0
216 mpiGridSpec(2) = mpiPy
217 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
218 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
219 eeBootError = .TRUE.
220 WRITE(msgBuffer,'(A,I5)')
221 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
222 & mpiRC
223 CALL PRINT_ERROR( msgBuffer , myThid)
224 GOTO 999
225 ENDIF
226 pidE = mpiPidE
227 mpiGridSpec(1) = mpiPx
228 mpiGridSpec(2) = mpiPy-1
229 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
230 & .AND. mpiGridSpec(2) .LT. 0 )
231 & mpiGridSpec(2) = nPy - 1
232 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
233 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
234 eeBootError = .TRUE.
235 WRITE(msgBuffer,'(A,I5)')
236 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
237 & mpiRC
238 CALL PRINT_ERROR( msgBuffer , myThid)
239 GOTO 999
240 ENDIF
241 pidS = mpiPidS
242 mpiGridSpec(1) = mpiPx
243 mpiGridSpec(2) = mpiPy+1
244 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
245 & .AND. mpiGridSpec(2) .GT. nPy-1 )
246 & mpiGridSpec(2) = 0
247 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
248 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
249 eeBootError = .TRUE.
250 WRITE(msgBuffer,'(A,I5)')
251 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
252 & mpiRC
253 CALL PRINT_ERROR( msgBuffer , myThid)
254 GOTO 999
255 ENDIF
256 pidN = mpiPidN
257
258 C-- Print summary of processor mapping on standard output
259 CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
260 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
261 eeBootError = .TRUE.
262 WRITE(msgBuffer,'(A,I5)')
263 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
264 & mpiRC
265 CALL PRINT_ERROR( msgBuffer , myThid)
266 GOTO 999
267 ENDIF
268 WRITE(msgBuffer,'(A)')
269 & '======= Starting MPI parallel Run ========='
270 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
271 & SQUEEZE_BOTH , myThid)
272 WRITE(msgBuffer,'(A,A64)') ' My Processor Name = ',
273 & mpiProcNam(1:mpilProcNam)
274 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
275 & SQUEEZE_RIGHT , myThid)
276 WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
277 & mpiPx,',',mpiPy,
278 & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
279 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
280 & SQUEEZE_RIGHT , myThid)
281 WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
282 & mpiXGlobalLo,',',mpiYGLobalLo,
283 & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
284 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
285 & SQUEEZE_RIGHT , myThid)
286 WRITE(msgBuffer,'(A,I4.4)')
287 & ' North neighbor = processor ', mpiPidN
288 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
289 & SQUEEZE_RIGHT , myThid)
290 WRITE(msgBuffer,'(A,I4.4)')
291 & ' South neighbor = processor ', mpiPidS
292 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
293 & SQUEEZE_RIGHT , myThid)
294 WRITE(msgBuffer,'(A,I4.4)')
295 & ' East neighbor = processor ', mpiPidE
296 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
297 & SQUEEZE_RIGHT , myThid)
298 WRITE(msgBuffer,'(A,I4.4)')
299 & ' West neighbor = processor ', mpiPidW
300 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
301 & SQUEEZE_RIGHT , myThid)
302 C
303 C-- Create MPI types for transfer of array edges.
304 C-- Four and eight byte primitive (one block only) datatypes.
305 C-- These are common to all threads in the process.
306 C Notes:
307 C ======
308 C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
309 C If they are not defined code must be added to create them -
310 C the MPI standard leaves optional whether they exist.
311 C 2. Per thread datatypes that handle all the edges for a thread
312 C are defined based on the type defined here.
313 C--
314 C-- xFace datatypes (east<-->west messages)
315 C--
316 C xFace (y=constant) for XY arrays with real*4 declaration.
317 arrElSep = (sNx+OLx*2)
318 elCount = sNy+OLy*2
319 elLen = OLx
320 elStride = arrElSep
321 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
322 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
323 & mpiTypeXFaceBlock_xy_r4, mpiRC)
324 #else
325 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
326 & mpiTypeXFaceBlock_xy_r4, mpiRC)
327 #endif
328 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
329 eeBootError = .TRUE.
330 WRITE(msgBuffer,'(A,I5)')
331 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
332 & mpiRC
333 CALL PRINT_ERROR( msgBuffer , myThid)
334 ENDIF
335 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
336 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
337 eeBootError = .TRUE.
338 WRITE(msgBuffer,'(A,I5)')
339 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
340 & mpiRC
341 CALL PRINT_ERROR( msgBuffer , myThid)
342 ENDIF
343
344 C xFace (y=constant) for XY arrays with real*8 declaration.
345 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
346 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
347 & mpiTypeXFaceBlock_xy_r8, mpiRC)
348 #else
349 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
350 & mpiTypeXFaceBlock_xy_r8, mpiRC)
351 #endif
352 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
353 eeBootError = .TRUE.
354 WRITE(msgBuffer,'(A,I5)')
355 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
356 & mpiRC
357 CALL PRINT_ERROR( msgBuffer , myThid)
358 ENDIF
359 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
360 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
361 eeBootError = .TRUE.
362 WRITE(msgBuffer,'(A,I5)')
363 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
364 & mpiRC
365 CALL PRINT_ERROR( msgBuffer , myThid)
366 ENDIF
367
368 C xFace (y=constant) for XYZ arrays with real*4 declaration.
369 arrElSize = 4
370 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
371 elCount = Nr
372 elLen = 1
373 elStride = arrElSize*arrElSep
374 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
375 & mpiTypeXFaceBlock_xy_r4,
376 & mpiTypeXFaceBlock_xyz_r4, mpiRC)
377 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
378 eeBootError = .TRUE.
379 WRITE(msgBuffer,'(A,I5)')
380 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
381 & mpiRC
382 CALL PRINT_ERROR( msgBuffer , myThid)
383 ENDIF
384 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
385 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
386 eeBootError = .TRUE.
387 WRITE(msgBuffer,'(A,I5)')
388 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
389 & mpiRC
390 CALL PRINT_ERROR( msgBuffer , myThid)
391 ENDIF
392
393 C xFace (y=constant) for XYZ arrays with real*8 declaration.
394 arrElSize = 8
395 elStride = arrElSize*arrElSep
396 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
397 & mpiTypeXFaceBlock_xy_r8,
398 & mpiTypeXFaceBlock_xyz_r8, mpiRC)
399 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
400 eeBootError = .TRUE.
401 WRITE(msgBuffer,'(A,I5)')
402 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
403 & mpiRC
404 CALL PRINT_ERROR( msgBuffer , myThid)
405 ENDIF
406 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
407 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
408 eeBootError = .TRUE.
409 WRITE(msgBuffer,'(A,I5)')
410 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
411 & mpiRC
412 CALL PRINT_ERROR( msgBuffer , myThid)
413 ENDIF
414 C--
415 C-- yFace datatypes (north<-->south messages)
416 C--
417 C yFace (x=constant) for XY arrays with real*4 declaration
418 elCount = OLy*(sNx+OLx*2)
419 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
420 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
421 & mpiTypeYFaceBlock_xy_r4, mpiRC)
422 #else
423 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
424 & mpiTypeYFaceBlock_xy_r4, mpiRC)
425 #endif
426 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
427 eeBootError = .TRUE.
428 WRITE(msgBuffer,'(A,I5)')
429 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
430 & mpiRC
431 CALL PRINT_ERROR( msgBuffer , myThid)
432 ENDIF
433 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
434 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
435 eeBootError = .TRUE.
436 WRITE(msgBuffer,'(A,I5)')
437 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
438 & mpiRC
439 CALL PRINT_ERROR( msgBuffer , myThid)
440 ENDIF
441 C yFace (x=constant) for XY arrays with real*8 declaration
442 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
443 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
444 & mpiTypeYFaceBlock_xy_r8, mpiRC)
445 #else
446 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
447 & mpiTypeYFaceBlock_xy_r8, mpiRC)
448 #endif
449 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
450 eeBootError = .TRUE.
451 WRITE(msgBuffer,'(A,I5)')
452 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
453 & mpiRC
454 CALL PRINT_ERROR( msgBuffer , myThid)
455 ENDIF
456 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
457 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
458 eeBootError = .TRUE.
459 WRITE(msgBuffer,'(A,I5)')
460 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
461 & mpiRC
462 CALL PRINT_ERROR( msgBuffer , myThid)
463 ENDIF
464 C yFace (x=constant) for XYZ arrays with real*4 declaration
465 arrElSize = 4
466 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
467 elCount = Nr
468 elLen = 1
469 elStride = arrElSize*arrElSep
470 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
471 & mpiTypeYFaceBlock_xy_r4,
472 & mpiTypeYFaceBlock_xyz_r4, mpiRC)
473 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
474 eeBootError = .TRUE.
475 WRITE(msgBuffer,'(A,I5)')
476 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
477 & mpiRC
478 CALL PRINT_ERROR( msgBuffer , myThid)
479 ENDIF
480 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
481 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
482 eeBootError = .TRUE.
483 WRITE(msgBuffer,'(A,I5)')
484 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
485 & mpiRC
486 CALL PRINT_ERROR( msgBuffer , myThid)
487 ENDIF
488 C yFace (x=constant) for XYZ arrays with real*8 declaration
489 arrElSize = 8
490 elStride = arrElSize*arrElSep
491 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
492 & mpiTypeYFaceBlock_xy_r8,
493 & mpiTypeYFaceBlock_xyz_r8, mpiRC)
494 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
495 eeBootError = .TRUE.
496 WRITE(msgBuffer,'(A,I5)')
497 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
498 & mpiRC
499 CALL PRINT_ERROR( msgBuffer , myThid)
500 ENDIF
501 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
502 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
503 eeBootError = .TRUE.
504 WRITE(msgBuffer,'(A,I5)')
505 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
506 & mpiRC
507 CALL PRINT_ERROR( msgBuffer , myThid)
508 ENDIF
509
510 C-- Assign MPI values used in generating unique tags for messages.
511 mpiTagW = 1
512 mpiTagE = 2
513 mpiTagS = 3
514 mpiTagN = 4
515
516 C
517 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
518
519
520 C
521 #ifndef ALWAYS_USE_MPI
522 ENDIF
523 #endif
524 #endif /* ALLOW_USE_MPI */
525
526 999 CONTINUE
527
528 RETURN
529 END
530
531 C $Id: ini_procs.F,v 1.23 2005/11/05 00:51:06 jmc Exp $

  ViewVC Help
Powered by ViewVC 1.1.22