/[MITgcm]/MITgcm_contrib/sannino/GRID_Refinemet/code/ini_procs.F
ViewVC logotype

Contents of /MITgcm_contrib/sannino/GRID_Refinemet/code/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.1 - (show annotations) (download)
Thu Jul 20 21:08:14 2006 UTC (19 years, 1 month ago) by sannino
Branch: MAIN
CVS Tags: HEAD
o Adding OASIS package
o Adding grid refinement package

1 C $Header: /u/gcmpack/MITgcm/eesupp/src/ini_procs.F,v 1.23 2005/11/05 00:51:06 jmc Exp $
2 C $Name: $
3 #include "CPP_EEOPTIONS.h"
4 CBOP
5
6 C !ROUTINE: INI_PROCS
7
8 C !INTERFACE:
9 SUBROUTINE INI_PROCS
10 IMPLICIT NONE
11
12 C !DESCRIPTION:
13 C *==========================================================*
14 C | SUBROUTINE INI\_PROCS
15 C | o Initialise multiple concurrent processes environment.
16 C *==========================================================*
17 C | Under MPI this routine calls various MPI service routines
18 C | that map the model grid to MPI processes. The information
19 C | is then stored in a common block for later use.
20 C | Note: This routine can also be compiled with CPP
21 C | directives set so that no multi-processing is initialise.
22 C | This is OK and should work fine.
23 C *==========================================================*
24
25 C !USES:
26 C === Global data ===
27 #include "SIZE.h"
28 #include "EEPARAMS.h"
29 #include "EESUPPORT.h"
30
31 C !LOCAL VARIABLES:
32 C === Local variables ===
33 #ifdef ALLOW_USE_MPI
34 C msgBuffer :: IO buffer
35 C myThid :: Dummy thread id
36 C mpiRC :: Error code reporting variable used
37 C with MPI.
38 C mpiGridSpec :: No. of processes in X and Y.
39 C mpiPeriodicity :: Flag indicating XY priodicity to MPI.
40 C arrElSize :: Size of an array element in bytes used
41 C to define MPI datatypes for communication
42 C operations.
43 C arrElSep :: Separation in units of array elements between
44 C blocks to be communicated.
45 C elCount :: No. of blocks that are associated with MPI
46 C datatype.
47 C elLen :: Length of an MPI datatype in terms of preexisting
48 C datatype.
49 C elStride :: Distance between starting location of elements
50 C in an MPI datatype - can be bytes of datatype
51 C units.
52 CHARACTER*(MAX_LEN_MBUF) msgBuffer
53 INTEGER mpiRC
54 INTEGER mpiGridSpec(2)
55 INTEGER mpiPeriodicity(2)
56 INTEGER mpiLProcNam
57 CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
58 INTEGER arrElSize
59 INTEGER arrElSep
60 INTEGER elCount
61 INTEGER elLen
62 INTEGER elStride
63 INTEGER npe,itemp,ierr,istatus(MPI_STATUS_SIZE)
64 INTEGER mpiBufSize,mpiRequest
65 #endif /* ALLOW_USE_MPI */
66 INTEGER myThid
67 CEOP
68
69 C-- Default values set to single processor case
70 C pid[W-SE] are the MPI process id of the neighbor
71 C processes. A process can be its own neighbor!
72 myThid = 1
73 myPid = 1
74 nProcs = 1
75 myPx = 1
76 myPy = 1
77 myXGlobalLo = 1
78 myYGlobalLo = 1
79 pidW = 1
80 pidE = 1
81 pidN = 1
82 pidS = 1
83 c errorMessageUnit = 0
84 c standardMessageUnit = 6
85
86 #ifdef ALLOW_USE_MPI
87 C--
88 C-- MPI style full multiple-process initialisation
89 C-- ==============================================
90 #ifndef ALWAYS_USE_MPI
91 IF ( usingMPI ) THEN
92 #endif
93
94 C-- Arrange MPI processes on a cartesian grid
95 C Set variable indicating which MPI process is to the north,
96 C south, east, west, south-west, south-east, north-west
97 C and north-east of me e.g.
98 C
99 C Plan view of model domain centered on process ME
100 C ================================================
101 C
102 C : : : :
103 C : : : :
104 C : : : :
105 C .....------------------------------.....
106 C | | | |
107 C | NW | N | NE |
108 C | | | |
109 C .....------------------------------.....
110 C | | | |
111 C | W | ME | E |
112 C | | | |
113 C .....------------------------------.....
114 C | | | |
115 C | SW | S | SE |
116 C | | | |
117 C .....------------------------------.....
118 C : : : :
119 C Y : : : :
120 C / \ : : : :
121 C |
122 C |
123 C |----> X
124 C
125 C-- Set default MPI communicator to XY processor grid
126 myThid = 1
127 mpiGridSpec(1) = nPx
128 mpiGridSpec(2) = nPy
129 C Could be periodic in X and/or Y - set at run time or compile time!
130 mpiPeriodicity(1) = _mpiTRUE_
131 mpiPeriodicity(2) = _mpiTRUE_
132 #ifdef CAN_PREVENT_X_PERIODICITY
133 #ifndef ALWAYS_PREVENT_X_PERIODICITY
134 IF ( notUsingXPeriodicity ) THEN
135 #endif
136 mpiPeriodicity(1) = _mpiFALSE_
137 #ifndef ALWAYS_PREVENT_X_PERIODICITY
138 ENDIF
139 #endif
140 #endif /* CAN_PREVENT_X_PERIODICITY */
141 #ifdef CAN_PREVENT_Y_PERIODICITY
142 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
143 IF ( notUsingYPeriodicity ) THEN
144 #endif
145 mpiPeriodicity(2) = _mpiFALSE_
146 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
147 ENDIF
148 #endif
149 #endif /* CAN_PREVENT_Y_PERIODICITY */
150
151 cgmCASPUR(
152 c mpiPeriodicity(1) = _mpiFALSE_
153 c mpiPeriodicity(2) = _mpiTRUE_
154 cgmCASPUR)
155
156
157
158 CALL MPI_CART_CREATE(
159 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
160 O mpiComm, mpiRC )
161 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
162 eeBootError = .TRUE.
163 WRITE(msgBuffer,'(A,I5)')
164 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
165 & mpiRC
166 CALL PRINT_ERROR( msgBuffer , myThid)
167 GOTO 999
168 ENDIF
169
170 C-- Get my location on the grid
171 CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
172 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
173 eeBootError = .TRUE.
174 WRITE(msgBuffer,'(A,I5)')
175 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
176 & mpiRC
177 CALL PRINT_ERROR( msgBuffer , myThid)
178 GOTO 999
179 ENDIF
180 myPid = mpiMyId
181 mpiPx = mpiGridSpec(1)
182 mpiPy = mpiGridSpec(2)
183 mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
184 mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
185 myXGlobalLo = mpiXGlobalLo
186 myYGlobalLo = mpiYGlobalLo
187
188 C-- To speed-up mpi gather and scatter routines, myXGlobalLo
189 C and myYGlobalLo from each process are transferred to
190 C a common block array. This allows process 0 to know
191 C the location of the domains controlled by each process.
192 mpiBufSize=1
193 mpiRequest=0
194 DO npe = 0, numberOfProcs-1
195 CALL MPI_ISEND (myXGlobalLo, mpiBufSize, MPI_INTEGER,
196 & npe, mpiMyId, MPI_COMM_MODEL, mpiRequest, ierr)
197 ENDDO
198 DO npe = 0, numberOfProcs-1
199 CALL MPI_RECV (itemp, mpiBufSize, MPI_INTEGER,
200 & npe, npe, MPI_COMM_MODEL, istatus, ierr)
201 mpi_myXGlobalLo(npe+1) = itemp
202 ENDDO
203 DO npe = 0, numberOfProcs-1
204 CALL MPI_ISEND (myYGlobalLo, mpiBufSize, MPI_INTEGER,
205 & npe, mpiMyId, MPI_COMM_MODEL, mpiRequest, ierr)
206 ENDDO
207 DO npe = 0, numberOfProcs-1
208 CALL MPI_RECV (itemp, mpiBufSize, MPI_INTEGER,
209 & npe, npe, MPI_COMM_MODEL, istatus, ierr)
210 mpi_myYGlobalLo(npe+1) = itemp
211 ENDDO
212
213 myPx = mpiPx+1
214 myPy = mpiPy+1
215 C-- Get MPI id for neighboring procs.
216 mpiGridSpec(1) = mpiPx-1
217 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
218 & .AND. mpiGridSpec(1) .LT. 0 )
219 & mpiGridSpec(1) = nPx-1
220 mpiGridSpec(2) = mpiPy
221 cgmCASPUR(
222 IF ( mpiPeriodicity(1) .EQ. _mpiFALSE_
223 & .AND. mpiGridSpec(1) .LT. 0 )
224 & mpiGridSpec(1) = 0
225 cgmCASPUR)
226
227 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
228 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
229 eeBootError = .TRUE.
230 WRITE(msgBuffer,'(A,I5)')
231 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
232 & mpiRC
233 CALL PRINT_ERROR( msgBuffer , myThid)
234 GOTO 999
235 ENDIF
236 pidW = mpiPidW
237 mpiGridSpec(1) = mpiPx+1
238 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
239 & .AND. mpiGridSpec(1) .GT. nPx-1 )
240 & mpiGridSpec(1) = 0
241 mpiGridSpec(2) = mpiPy
242 cgmCASPUR(
243 IF ( mpiPeriodicity(1) .EQ. _mpiFALSE_
244 & .AND. mpiGridSpec(1) .GT. nPx-1 )
245 & mpiGridSpec(1) = nPx-1
246 cgmCASPUR)
247
248 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
249 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
250 eeBootError = .TRUE.
251 WRITE(msgBuffer,'(A,I5)')
252 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
253 & mpiRC
254 CALL PRINT_ERROR( msgBuffer , myThid)
255 GOTO 999
256 ENDIF
257 pidE = mpiPidE
258 mpiGridSpec(1) = mpiPx
259 mpiGridSpec(2) = mpiPy-1
260 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
261 & .AND. mpiGridSpec(2) .LT. 0 )
262 & mpiGridSpec(2) = nPy - 1
263 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
264 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
265 eeBootError = .TRUE.
266 WRITE(msgBuffer,'(A,I5)')
267 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
268 & mpiRC
269 CALL PRINT_ERROR( msgBuffer , myThid)
270 GOTO 999
271 ENDIF
272 pidS = mpiPidS
273 mpiGridSpec(1) = mpiPx
274 mpiGridSpec(2) = mpiPy+1
275 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
276 & .AND. mpiGridSpec(2) .GT. nPy-1 )
277 & mpiGridSpec(2) = 0
278 CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
279 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
280 eeBootError = .TRUE.
281 WRITE(msgBuffer,'(A,I5)')
282 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
283 & mpiRC
284 CALL PRINT_ERROR( msgBuffer , myThid)
285 GOTO 999
286 ENDIF
287 pidN = mpiPidN
288
289 C-- Print summary of processor mapping on standard output
290 CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
291 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
292 eeBootError = .TRUE.
293 WRITE(msgBuffer,'(A,I5)')
294 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
295 & mpiRC
296 CALL PRINT_ERROR( msgBuffer , myThid)
297 GOTO 999
298 ENDIF
299 WRITE(msgBuffer,'(A)')
300 & '======= Starting MPI parallel Run ========='
301 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
302 & SQUEEZE_BOTH , myThid)
303 WRITE(msgBuffer,'(A,A64)') ' My Processor Name = ',
304 & mpiProcNam(1:mpilProcNam)
305 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
306 & SQUEEZE_RIGHT , myThid)
307 WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
308 & mpiPx,',',mpiPy,
309 & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
310 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
311 & SQUEEZE_RIGHT , myThid)
312 WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
313 & mpiXGlobalLo,',',mpiYGLobalLo,
314 & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
315 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
316 & SQUEEZE_RIGHT , myThid)
317 WRITE(msgBuffer,'(A,I4.4)')
318 & ' North neighbor = processor ', mpiPidN
319 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
320 & SQUEEZE_RIGHT , myThid)
321 WRITE(msgBuffer,'(A,I4.4)')
322 & ' South neighbor = processor ', mpiPidS
323 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
324 & SQUEEZE_RIGHT , myThid)
325 WRITE(msgBuffer,'(A,I4.4)')
326 & ' East neighbor = processor ', mpiPidE
327 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
328 & SQUEEZE_RIGHT , myThid)
329 WRITE(msgBuffer,'(A,I4.4)')
330 & ' West neighbor = processor ', mpiPidW
331 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
332 & SQUEEZE_RIGHT , myThid)
333 C
334 C-- Create MPI types for transfer of array edges.
335 C-- Four and eight byte primitive (one block only) datatypes.
336 C-- These are common to all threads in the process.
337 C Notes:
338 C ======
339 C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
340 C If they are not defined code must be added to create them -
341 C the MPI standard leaves optional whether they exist.
342 C 2. Per thread datatypes that handle all the edges for a thread
343 C are defined based on the type defined here.
344 C--
345 C-- xFace datatypes (east<-->west messages)
346 C--
347 C xFace (y=constant) for XY arrays with real*4 declaration.
348 arrElSep = (sNx+OLx*2)
349 elCount = sNy+OLy*2
350 elLen = OLx
351 elStride = arrElSep
352 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
353 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
354 & mpiTypeXFaceBlock_xy_r4, mpiRC)
355 #else
356 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
357 & mpiTypeXFaceBlock_xy_r4, mpiRC)
358 #endif
359 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
360 eeBootError = .TRUE.
361 WRITE(msgBuffer,'(A,I5)')
362 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
363 & mpiRC
364 CALL PRINT_ERROR( msgBuffer , myThid)
365 ENDIF
366 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
367 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
368 eeBootError = .TRUE.
369 WRITE(msgBuffer,'(A,I5)')
370 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
371 & mpiRC
372 CALL PRINT_ERROR( msgBuffer , myThid)
373 ENDIF
374
375 C xFace (y=constant) for XY arrays with real*8 declaration.
376 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
377 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
378 & mpiTypeXFaceBlock_xy_r8, mpiRC)
379 #else
380 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
381 & mpiTypeXFaceBlock_xy_r8, mpiRC)
382 #endif
383 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
384 eeBootError = .TRUE.
385 WRITE(msgBuffer,'(A,I5)')
386 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
387 & mpiRC
388 CALL PRINT_ERROR( msgBuffer , myThid)
389 ENDIF
390 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
391 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
392 eeBootError = .TRUE.
393 WRITE(msgBuffer,'(A,I5)')
394 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
395 & mpiRC
396 CALL PRINT_ERROR( msgBuffer , myThid)
397 ENDIF
398
399 C xFace (y=constant) for XYZ arrays with real*4 declaration.
400 arrElSize = 4
401 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
402 elCount = Nr
403 elLen = 1
404 elStride = arrElSize*arrElSep
405 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
406 & mpiTypeXFaceBlock_xy_r4,
407 & mpiTypeXFaceBlock_xyz_r4, mpiRC)
408 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
409 eeBootError = .TRUE.
410 WRITE(msgBuffer,'(A,I5)')
411 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
412 & mpiRC
413 CALL PRINT_ERROR( msgBuffer , myThid)
414 ENDIF
415 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
416 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
417 eeBootError = .TRUE.
418 WRITE(msgBuffer,'(A,I5)')
419 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
420 & mpiRC
421 CALL PRINT_ERROR( msgBuffer , myThid)
422 ENDIF
423
424 C xFace (y=constant) for XYZ arrays with real*8 declaration.
425 arrElSize = 8
426 elStride = arrElSize*arrElSep
427 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
428 & mpiTypeXFaceBlock_xy_r8,
429 & mpiTypeXFaceBlock_xyz_r8, mpiRC)
430 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
431 eeBootError = .TRUE.
432 WRITE(msgBuffer,'(A,I5)')
433 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
434 & mpiRC
435 CALL PRINT_ERROR( msgBuffer , myThid)
436 ENDIF
437 CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
438 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
439 eeBootError = .TRUE.
440 WRITE(msgBuffer,'(A,I5)')
441 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
442 & mpiRC
443 CALL PRINT_ERROR( msgBuffer , myThid)
444 ENDIF
445 C--
446 C-- yFace datatypes (north<-->south messages)
447 C--
448 C yFace (x=constant) for XY arrays with real*4 declaration
449 elCount = OLy*(sNx+OLx*2)
450 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
451 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
452 & mpiTypeYFaceBlock_xy_r4, mpiRC)
453 #else
454 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
455 & mpiTypeYFaceBlock_xy_r4, mpiRC)
456 #endif
457 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
458 eeBootError = .TRUE.
459 WRITE(msgBuffer,'(A,I5)')
460 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
461 & mpiRC
462 CALL PRINT_ERROR( msgBuffer , myThid)
463 ENDIF
464 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
465 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
466 eeBootError = .TRUE.
467 WRITE(msgBuffer,'(A,I5)')
468 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
469 & mpiRC
470 CALL PRINT_ERROR( msgBuffer , myThid)
471 ENDIF
472 C yFace (x=constant) for XY arrays with real*8 declaration
473 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
474 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
475 & mpiTypeYFaceBlock_xy_r8, mpiRC)
476 #else
477 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
478 & mpiTypeYFaceBlock_xy_r8, mpiRC)
479 #endif
480 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
481 eeBootError = .TRUE.
482 WRITE(msgBuffer,'(A,I5)')
483 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
484 & mpiRC
485 CALL PRINT_ERROR( msgBuffer , myThid)
486 ENDIF
487 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
488 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
489 eeBootError = .TRUE.
490 WRITE(msgBuffer,'(A,I5)')
491 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
492 & mpiRC
493 CALL PRINT_ERROR( msgBuffer , myThid)
494 ENDIF
495 C yFace (x=constant) for XYZ arrays with real*4 declaration
496 arrElSize = 4
497 arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
498 elCount = Nr
499 elLen = 1
500 elStride = arrElSize*arrElSep
501 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
502 & mpiTypeYFaceBlock_xy_r4,
503 & mpiTypeYFaceBlock_xyz_r4, mpiRC)
504 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
505 eeBootError = .TRUE.
506 WRITE(msgBuffer,'(A,I5)')
507 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
508 & mpiRC
509 CALL PRINT_ERROR( msgBuffer , myThid)
510 ENDIF
511 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
512 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
513 eeBootError = .TRUE.
514 WRITE(msgBuffer,'(A,I5)')
515 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
516 & mpiRC
517 CALL PRINT_ERROR( msgBuffer , myThid)
518 ENDIF
519 C yFace (x=constant) for XYZ arrays with real*8 declaration
520 arrElSize = 8
521 elStride = arrElSize*arrElSep
522 CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
523 & mpiTypeYFaceBlock_xy_r8,
524 & mpiTypeYFaceBlock_xyz_r8, mpiRC)
525 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
526 eeBootError = .TRUE.
527 WRITE(msgBuffer,'(A,I5)')
528 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
529 & mpiRC
530 CALL PRINT_ERROR( msgBuffer , myThid)
531 ENDIF
532 CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
533 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
534 eeBootError = .TRUE.
535 WRITE(msgBuffer,'(A,I5)')
536 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
537 & mpiRC
538 CALL PRINT_ERROR( msgBuffer , myThid)
539 ENDIF
540
541 C-- Assign MPI values used in generating unique tags for messages.
542 mpiTagW = 1
543 mpiTagE = 2
544 mpiTagS = 3
545 mpiTagN = 4
546
547 C
548 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
549
550
551 C
552 #ifndef ALWAYS_USE_MPI
553 ENDIF
554 #endif
555 #endif /* ALLOW_USE_MPI */
556
557 999 CONTINUE
558
559 RETURN
560 END
561
562 C $Id: ini_procs.F,v 1.23 2005/11/05 00:51:06 jmc Exp $

  ViewVC Help
Powered by ViewVC 1.1.22