/[MITgcm]/MITgcm/eesupp/src/ini_procs.F
ViewVC logotype

Annotation of /MITgcm/eesupp/src/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.13 - (hide annotations) (download)
Tue Aug 21 17:23:45 2001 UTC (22 years, 10 months ago) by adcroft
Branch: MAIN
CVS Tags: checkpoint40pre9, checkpoint40
Changes since 1.12: +23 -3 lines
Added alternate forms of MPI_REAL? for SGI's.

1 adcroft 1.13 C $Header: /u/gcmpack/models/MITgcmUV/eesupp/src/ini_procs.F,v 1.12 2001/02/04 14:38:43 cnh Exp $
2     C $Name: $
3 cnh 1.1 #include "CPP_EEOPTIONS.h"
4     CStartOfInterface
5     SUBROUTINE INI_PROCS
6     C /==========================================================\
7     C | SUBROUTINE INI_PROCS |
8     C | o Initialise multiple concurrent processes environment. |
9     C |==========================================================|
10     C | Under MPI this routine calls various MPI service routines|
11     C | that map the model grid to MPI processes. The information|
12     C | is then stored in a common block for later use. |
13     C | Note: This routine can also be compiled with CPP |
14     C | directives set so that no multi-processing is initialise.|
15     C | This is OK and should work fine. |
16     C \==========================================================/
17 adcroft 1.10 IMPLICIT NONE
18 cnh 1.1
19     C === Global data ===
20     #include "SIZE.h"
21     #include "EEPARAMS.h"
22     #include "EESUPPORT.h"
23     CEndOfInterface
24    
25     C === Local variables ===
26     #ifdef ALLOW_USE_MPI
27     C msgBuffer - IO buffer
28     C myThid - Dummy thread id
29     C mpiRC - Error code reporting variable used
30     C with MPI.
31     C mpiGridSpec - No. of processes in X and Y.
32     C mpiPeriodicity - Flag indicating XY priodicity to MPI.
33     C arrElSize - Size of an array element in bytes used
34     C to define MPI datatypes for communication
35     C operations.
36     C arrElSep - Separation in units of array elements between
37     C blocks to be communicated.
38     C elCount - No. of blocks that are associated with MPI
39     C datatype.
40     C elLen - Length of an MPI datatype in terms of preexisting
41     C datatype.
42     C elStride - Distance between starting location of elements
43     C in an MPI datatype - can be bytes of datatype
44     C units.
45     CHARACTER*(MAX_LEN_MBUF) msgBuffer
46     INTEGER mpiRC
47     INTEGER mpiGridSpec(2)
48     INTEGER mpiPeriodicity(2)
49     INTEGER mpiLProcNam
50     CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
51     INTEGER arrElSize
52     INTEGER arrElSep
53     INTEGER elCount
54     INTEGER elLen
55     INTEGER elStride
56     #endif /* ALLOW_USE_MPI */
57 cnh 1.6 INTEGER myThid
58 cnh 1.1
59     C-- Default values set to single processor case
60 cnh 1.7 C pid[W-SE] are the MPI process id of the neighbor
61 cnh 1.1 C processes. A process can be its own neighbor!
62 cnh 1.6 myThid = 1
63     myPid = 1
64     nProcs = 1
65     myPx = 1
66     myPy = 1
67     myXGlobalLo = 1
68     myYGlobalLo = 1
69     pidW = 1
70     pidE = 1
71     pidN = 1
72     pidS = 1
73     errorMessageUnit = 0
74     standardMessageUnit = 6
75    
76 cnh 1.1 #ifdef ALLOW_USE_MPI
77     C--
78     C-- MPI style full multiple-process initialisation
79     C-- ==============================================
80     #ifndef ALWAYS_USE_MPI
81     IF ( usingMPI ) THEN
82     #endif
83    
84     C-- Arrange MPI processes on a cartesian grid
85     C Set variable indicating which MPI process is to the north,
86     C south, east, west, south-west, south-east, north-west
87     C and north-east of me e.g.
88     C
89     C Plan view of model domain centered on process ME
90     C ================================================
91     C
92     C : : : :
93     C : : : :
94     C : : : :
95     C .....------------------------------.....
96     C | | | |
97     C | NW | N | NE |
98     C | | | |
99     C .....------------------------------.....
100     C | | | |
101     C | W | ME | E |
102     C | | | |
103     C .....------------------------------.....
104     C | | | |
105     C | SW | S | SE |
106     C | | | |
107     C .....------------------------------.....
108     C : : : :
109     C Y : : : :
110     C / \ : : : :
111     C |
112     C |
113     C |----> X
114     C
115     C-- Set default MPI communicator to XY processor grid
116     myThid = 1
117     mpiGridSpec(1) = nPx
118     mpiGridSpec(2) = nPy
119     C Could be periodic in X and/or Y - set at run time or compile time!
120     mpiPeriodicity(1) = _mpiTRUE_
121     mpiPeriodicity(2) = _mpiTRUE_
122     #ifdef CAN_PREVENT_X_PERIODICITY
123     #ifndef ALWAYS_PREVENT_X_PERIODICITY
124     IF ( notUsingXPeriodicity ) THEN
125     #endif
126     mpiPeriodicity(1) = _mpiFALSE_
127     #ifndef ALWAYS_PREVENT_X_PERIODICITY
128     ENDIF
129     #endif
130     #endif /* CAN_PREVENT_X_PERIODICITY */
131     #ifdef CAN_PREVENT_Y_PERIODICITY
132     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
133     IF ( notUsingYPeriodicity ) THEN
134     #endif
135     mpiPeriodicity(2) = _mpiFALSE_
136     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
137     ENDIF
138     #endif
139     #endif /* CAN_PREVENT_Y_PERIODICITY */
140    
141     CALL MPI_CART_CREATE(
142 adcroft 1.8 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
143 cnh 1.1 O mpiComm, mpiRC )
144     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
145     eeBootError = .TRUE.
146 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
147 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
148     & mpiRC
149     CALL PRINT_ERROR( msgBuffer , myThid)
150     GOTO 999
151     ENDIF
152    
153     C-- Get my location on the grid
154     CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
155     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
156     eeBootError = .TRUE.
157 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
158 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
159     & mpiRC
160     CALL PRINT_ERROR( msgBuffer , myThid)
161     GOTO 999
162     ENDIF
163 cnh 1.6 myPid = mpiMyId
164 cnh 1.1 mpiPx = mpiGridSpec(1)
165     mpiPy = mpiGridSpec(2)
166     mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
167     mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
168     myXGlobalLo = mpiXGlobalLo
169     myYGlobalLo = mpiYGlobalLo
170 cnh 1.6 myPx = mpiPx+1
171     myPy = mpiPy+1
172 cnh 1.1 C-- Get MPI id for neighboring procs.
173     mpiGridSpec(1) = mpiPx-1
174 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
175     & .AND. mpiGridSpec(1) .LT. 0 )
176 cnh 1.1 & mpiGridSpec(1) = nPx-1
177     mpiGridSpec(2) = mpiPy
178     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
179     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
180     eeBootError = .TRUE.
181 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
182 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
183     & mpiRC
184     CALL PRINT_ERROR( msgBuffer , myThid)
185     GOTO 999
186     ENDIF
187     pidW = mpiPidW
188     mpiGridSpec(1) = mpiPx+1
189 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
190     & .AND. mpiGridSpec(1) .GT. nPx-1 )
191 cnh 1.1 & mpiGridSpec(1) = 0
192     mpiGridSpec(2) = mpiPy
193     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
194     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
195     eeBootError = .TRUE.
196 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
197 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
198     & mpiRC
199     CALL PRINT_ERROR( msgBuffer , myThid)
200     GOTO 999
201     ENDIF
202     pidE = mpiPidE
203     mpiGridSpec(1) = mpiPx
204     mpiGridSpec(2) = mpiPy-1
205 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
206     & .AND. mpiGridSpec(2) .LT. 0 )
207 cnh 1.1 & mpiGridSpec(2) = nPy - 1
208     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
209     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
210     eeBootError = .TRUE.
211 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
212 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
213     & mpiRC
214     CALL PRINT_ERROR( msgBuffer , myThid)
215     GOTO 999
216     ENDIF
217     pidS = mpiPidS
218     mpiGridSpec(1) = mpiPx
219     mpiGridSpec(2) = mpiPy+1
220 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
221     & .AND. mpiGridSpec(2) .GT. nPy-1 )
222 cnh 1.1 & mpiGridSpec(2) = 0
223     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
224     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
225     eeBootError = .TRUE.
226 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
227 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
228     & mpiRC
229     CALL PRINT_ERROR( msgBuffer , myThid)
230     GOTO 999
231     ENDIF
232     pidN = mpiPidN
233    
234     C-- Print summary of processor mapping on standard output
235     CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
236     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
237     eeBootError = .TRUE.
238 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
239 cnh 1.1 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
240     & mpiRC
241     CALL PRINT_ERROR( msgBuffer , myThid)
242     GOTO 999
243     ENDIF
244 adcroft 1.9 WRITE(msgBuffer,'(A)')
245     & '======= Starting MPI parallel Run ========='
246 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
247     & SQUEEZE_BOTH , myThid)
248     WRITE(msgBuffer,'(A,A64)') ' My Processor Name = ',
249     & mpiProcNam(1:mpilProcNam)
250     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
251     & SQUEEZE_RIGHT , myThid)
252     WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
253     & mpiPx,',',mpiPy,
254     & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
255     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
256     & SQUEEZE_RIGHT , myThid)
257     WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
258     & mpiXGlobalLo,',',mpiYGLobalLo,
259     & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
260     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
261     & SQUEEZE_RIGHT , myThid)
262 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
263     & ' North neighbor = processor ', mpiPidN
264 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
265     & SQUEEZE_RIGHT , myThid)
266 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
267     & ' South neighbor = processor ', mpiPidS
268 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
269     & SQUEEZE_RIGHT , myThid)
270 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
271     & ' East neighbor = processor ', mpiPidE
272 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
273     & SQUEEZE_RIGHT , myThid)
274 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
275     & ' West neighbor = processor ', mpiPidW
276 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
277     & SQUEEZE_RIGHT , myThid)
278     C
279     C-- Create MPI types for transfer of array edges.
280     C-- Four and eight byte primitive (one block only) datatypes.
281     C-- These are common to all threads in the process.
282     C Notes:
283     C ======
284     C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
285     C If they are not defined code must be added to create them -
286     C the MPI standard leaves optional whether they exist.
287     C 2. Per thread datatypes that handle all the edges for a thread
288     C are defined based on the type defined here.
289     C--
290     C-- xFace datatypes (east<-->west messages)
291     C--
292     C xFace (y=constant) for XY arrays with real*4 declaration.
293     arrElSep = (sNx+OLx*2)
294     elCount = sNy+OLy*2
295     elLen = OLx
296     elStride = arrElSep
297 adcroft 1.13 #ifdef TARGET_SGI
298     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
299     & mpiTypeXFaceBlock_xy_r4, mpiRC)
300     #else
301 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
302     & mpiTypeXFaceBlock_xy_r4, mpiRC)
303 adcroft 1.13 #endif
304 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
305     eeBootError = .TRUE.
306 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
307 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
308 cnh 1.1 & mpiRC
309     CALL PRINT_ERROR( msgBuffer , myThid)
310     ENDIF
311     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
312     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
313     eeBootError = .TRUE.
314 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
315 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
316 cnh 1.1 & mpiRC
317     CALL PRINT_ERROR( msgBuffer , myThid)
318     ENDIF
319    
320     C xFace (y=constant) for XY arrays with real*8 declaration.
321 adcroft 1.13 #ifdef TARGET_SGI
322     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
323     & mpiTypeXFaceBlock_xy_r8, mpiRC)
324     #else
325 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
326     & mpiTypeXFaceBlock_xy_r8, mpiRC)
327 adcroft 1.13 #endif
328 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
329     eeBootError = .TRUE.
330 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
331 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
332 cnh 1.1 & mpiRC
333     CALL PRINT_ERROR( msgBuffer , myThid)
334     ENDIF
335     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
336     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
337     eeBootError = .TRUE.
338 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
339 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
340 cnh 1.1 & mpiRC
341     CALL PRINT_ERROR( msgBuffer , myThid)
342     ENDIF
343    
344     C xFace (y=constant) for XYZ arrays with real*4 declaration.
345     arrElSize = 4
346     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
347 cnh 1.5 elCount = Nr
348 cnh 1.1 elLen = 1
349     elStride = arrElSize*arrElSep
350     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
351     & mpiTypeXFaceBlock_xy_r4,
352     & mpiTypeXFaceBlock_xyz_r4, mpiRC)
353     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
354     eeBootError = .TRUE.
355 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
356 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
357 cnh 1.1 & mpiRC
358     CALL PRINT_ERROR( msgBuffer , myThid)
359     ENDIF
360     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
361     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
362     eeBootError = .TRUE.
363 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
364 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
365 cnh 1.1 & mpiRC
366     CALL PRINT_ERROR( msgBuffer , myThid)
367     ENDIF
368    
369     C xFace (y=constant) for XYZ arrays with real*8 declaration.
370     arrElSize = 8
371     elStride = arrElSize*arrElSep
372     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
373     & mpiTypeXFaceBlock_xy_r8,
374     & mpiTypeXFaceBlock_xyz_r8, mpiRC)
375     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
376     eeBootError = .TRUE.
377 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
378 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
379 cnh 1.1 & mpiRC
380     CALL PRINT_ERROR( msgBuffer , myThid)
381     ENDIF
382     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
383     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
384     eeBootError = .TRUE.
385 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
386 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
387 cnh 1.1 & mpiRC
388     CALL PRINT_ERROR( msgBuffer , myThid)
389     ENDIF
390     C--
391     C-- yFace datatypes (north<-->south messages)
392     C--
393     C yFace (x=constant) for XY arrays with real*4 declaration
394     elCount = OLy*(sNx+OLx*2)
395 adcroft 1.13 #ifdef TARGET_SGI
396     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
397     & mpiTypeYFaceBlock_xy_r4, mpiRC)
398     #else
399 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
400     & mpiTypeYFaceBlock_xy_r4, mpiRC)
401 adcroft 1.13 #endif
402 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
403     eeBootError = .TRUE.
404 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
405 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
406 cnh 1.1 & mpiRC
407     CALL PRINT_ERROR( msgBuffer , myThid)
408     ENDIF
409     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
410     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
411     eeBootError = .TRUE.
412 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
413 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
414 cnh 1.1 & mpiRC
415     CALL PRINT_ERROR( msgBuffer , myThid)
416     ENDIF
417     C yFace (x=constant) for XY arrays with real*8 declaration
418 adcroft 1.13 #ifdef TARGET_SGI
419     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
420     & mpiTypeYFaceBlock_xy_r8, mpiRC)
421     #else
422 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
423     & mpiTypeYFaceBlock_xy_r8, mpiRC)
424 adcroft 1.13 #endif
425 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
426     eeBootError = .TRUE.
427 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
428 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
429 cnh 1.1 & mpiRC
430     CALL PRINT_ERROR( msgBuffer , myThid)
431     ENDIF
432     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
433     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
434     eeBootError = .TRUE.
435 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
436 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
437 cnh 1.1 & mpiRC
438     CALL PRINT_ERROR( msgBuffer , myThid)
439     ENDIF
440     C yFace (x=constant) for XYZ arrays with real*4 declaration
441     arrElSize = 4
442     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
443 cnh 1.5 elCount = Nr
444 cnh 1.1 elLen = 1
445     elStride = arrElSize*arrElSep
446     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
447     & mpiTypeYFaceBlock_xy_r4,
448     & mpiTypeYFaceBlock_xyz_r4, mpiRC)
449     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
450     eeBootError = .TRUE.
451 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
452 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
453 cnh 1.1 & mpiRC
454     CALL PRINT_ERROR( msgBuffer , myThid)
455     ENDIF
456     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
457     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
458     eeBootError = .TRUE.
459 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
460 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
461 cnh 1.1 & mpiRC
462     CALL PRINT_ERROR( msgBuffer , myThid)
463     ENDIF
464     C yFace (x=constant) for XYZ arrays with real*8 declaration
465     arrElSize = 8
466     elStride = arrElSize*arrElSep
467     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
468     & mpiTypeYFaceBlock_xy_r8,
469     & mpiTypeYFaceBlock_xyz_r8, mpiRC)
470     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
471     eeBootError = .TRUE.
472 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
473 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
474 cnh 1.1 & mpiRC
475     CALL PRINT_ERROR( msgBuffer , myThid)
476     ENDIF
477     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
478     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
479     eeBootError = .TRUE.
480 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
481 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
482 cnh 1.1 & mpiRC
483     CALL PRINT_ERROR( msgBuffer , myThid)
484     ENDIF
485    
486     C-- Assign MPI values used in generating unique tags for messages.
487     mpiTagW = 1
488     mpiTagE = 2
489     mpiTagS = 3
490     mpiTagN = 4
491    
492     C
493 adcroft 1.8 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
494 cnh 1.1
495    
496     C
497     #ifndef ALWAYS_USE_MPI
498     ENDIF
499     #endif
500     #endif /* ALLOW_USE_MPI */
501    
502     999 CONTINUE
503    
504     RETURN
505     END
506 cnh 1.6
507 adcroft 1.13 C $Id: ini_procs.F,v 1.12 2001/02/04 14:38:43 cnh Exp $

  ViewVC Help
Powered by ViewVC 1.1.22