/[MITgcm]/MITgcm/eesupp/src/ini_procs.F
ViewVC logotype

Annotation of /MITgcm/eesupp/src/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.19 - (hide annotations) (download)
Tue Apr 6 00:25:56 2004 UTC (20 years, 2 months ago) by dimitri
Branch: MAIN
Changes since 1.18: +4 -8 lines
o extending useSingleCpuIO option to work with new exch2 I/O format
  - old-style, missing-tile I/O is still accessible by defining CPP
    option MISSING_TILE_IO in pkg/mdsio/MDSIO_OPTIONS.h

1 dimitri 1.19 C $Header: /u/gcmpack/MITgcm/eesupp/src/ini_procs.F,v 1.18 2004/03/27 03:51:51 edhill Exp $
2 adcroft 1.13 C $Name: $
3 cnh 1.1 #include "CPP_EEOPTIONS.h"
4 cnh 1.14 CBOP
5    
6     C !ROUTINE: INI_PROCS
7    
8     C !INTERFACE:
9 cnh 1.1 SUBROUTINE INI_PROCS
10 adcroft 1.10 IMPLICIT NONE
11 cnh 1.1
12 cnh 1.14 C !DESCRIPTION:
13     C *==========================================================*
14 edhill 1.18 C | SUBROUTINE INI\_PROCS
15 cnh 1.14 C | o Initialise multiple concurrent processes environment.
16     C *==========================================================*
17     C | Under MPI this routine calls various MPI service routines
18     C | that map the model grid to MPI processes. The information
19     C | is then stored in a common block for later use.
20     C | Note: This routine can also be compiled with CPP
21     C | directives set so that no multi-processing is initialise.
22     C | This is OK and should work fine.
23     C *==========================================================*
24    
25     C !USES:
26 cnh 1.1 C === Global data ===
27     #include "SIZE.h"
28     #include "EEPARAMS.h"
29     #include "EESUPPORT.h"
30    
31 cnh 1.14 C !LOCAL VARIABLES:
32 cnh 1.1 C === Local variables ===
33     #ifdef ALLOW_USE_MPI
34 cnh 1.14 C msgBuffer :: IO buffer
35     C myThid :: Dummy thread id
36     C mpiRC :: Error code reporting variable used
37     C with MPI.
38     C mpiGridSpec :: No. of processes in X and Y.
39     C mpiPeriodicity :: Flag indicating XY priodicity to MPI.
40     C arrElSize :: Size of an array element in bytes used
41     C to define MPI datatypes for communication
42     C operations.
43     C arrElSep :: Separation in units of array elements between
44     C blocks to be communicated.
45     C elCount :: No. of blocks that are associated with MPI
46     C datatype.
47     C elLen :: Length of an MPI datatype in terms of preexisting
48     C datatype.
49     C elStride :: Distance between starting location of elements
50     C in an MPI datatype - can be bytes of datatype
51     C units.
52 cnh 1.1 CHARACTER*(MAX_LEN_MBUF) msgBuffer
53     INTEGER mpiRC
54     INTEGER mpiGridSpec(2)
55     INTEGER mpiPeriodicity(2)
56     INTEGER mpiLProcNam
57     CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
58     INTEGER arrElSize
59     INTEGER arrElSep
60     INTEGER elCount
61     INTEGER elLen
62     INTEGER elStride
63 dimitri 1.15
64     C-- Variables needed for mpi gather scatter routines.
65     COMMON /GlobalLo/ mpi_myXGlobalLo, mpi_myYGlobalLo
66     INTEGER mpi_myXGlobalLo(nPx*nPy)
67     INTEGER mpi_myYGlobalLo(nPx*nPy)
68     INTEGER npe,itemp,ierr,istatus(MPI_STATUS_SIZE)
69 adcroft 1.16 INTEGER mpiBufSize,mpiRequest
70 dimitri 1.15
71 cnh 1.1 #endif /* ALLOW_USE_MPI */
72 cnh 1.6 INTEGER myThid
73 cnh 1.14 CEOP
74 cnh 1.1
75     C-- Default values set to single processor case
76 cnh 1.7 C pid[W-SE] are the MPI process id of the neighbor
77 cnh 1.1 C processes. A process can be its own neighbor!
78 cnh 1.6 myThid = 1
79     myPid = 1
80     nProcs = 1
81     myPx = 1
82     myPy = 1
83     myXGlobalLo = 1
84     myYGlobalLo = 1
85     pidW = 1
86     pidE = 1
87     pidN = 1
88     pidS = 1
89 adcroft 1.17 c errorMessageUnit = 0
90     c standardMessageUnit = 6
91 cnh 1.6
92 cnh 1.1 #ifdef ALLOW_USE_MPI
93     C--
94     C-- MPI style full multiple-process initialisation
95     C-- ==============================================
96     #ifndef ALWAYS_USE_MPI
97     IF ( usingMPI ) THEN
98     #endif
99    
100     C-- Arrange MPI processes on a cartesian grid
101     C Set variable indicating which MPI process is to the north,
102     C south, east, west, south-west, south-east, north-west
103     C and north-east of me e.g.
104     C
105     C Plan view of model domain centered on process ME
106     C ================================================
107     C
108     C : : : :
109     C : : : :
110     C : : : :
111     C .....------------------------------.....
112     C | | | |
113     C | NW | N | NE |
114     C | | | |
115     C .....------------------------------.....
116     C | | | |
117     C | W | ME | E |
118     C | | | |
119     C .....------------------------------.....
120     C | | | |
121     C | SW | S | SE |
122     C | | | |
123     C .....------------------------------.....
124     C : : : :
125     C Y : : : :
126     C / \ : : : :
127     C |
128     C |
129     C |----> X
130     C
131     C-- Set default MPI communicator to XY processor grid
132     myThid = 1
133     mpiGridSpec(1) = nPx
134     mpiGridSpec(2) = nPy
135     C Could be periodic in X and/or Y - set at run time or compile time!
136     mpiPeriodicity(1) = _mpiTRUE_
137     mpiPeriodicity(2) = _mpiTRUE_
138     #ifdef CAN_PREVENT_X_PERIODICITY
139     #ifndef ALWAYS_PREVENT_X_PERIODICITY
140     IF ( notUsingXPeriodicity ) THEN
141     #endif
142     mpiPeriodicity(1) = _mpiFALSE_
143     #ifndef ALWAYS_PREVENT_X_PERIODICITY
144     ENDIF
145     #endif
146     #endif /* CAN_PREVENT_X_PERIODICITY */
147     #ifdef CAN_PREVENT_Y_PERIODICITY
148     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
149     IF ( notUsingYPeriodicity ) THEN
150     #endif
151     mpiPeriodicity(2) = _mpiFALSE_
152     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
153     ENDIF
154     #endif
155     #endif /* CAN_PREVENT_Y_PERIODICITY */
156    
157     CALL MPI_CART_CREATE(
158 adcroft 1.8 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
159 cnh 1.1 O mpiComm, mpiRC )
160     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
161     eeBootError = .TRUE.
162 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
163 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
164     & mpiRC
165     CALL PRINT_ERROR( msgBuffer , myThid)
166     GOTO 999
167     ENDIF
168    
169     C-- Get my location on the grid
170     CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
171     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
172     eeBootError = .TRUE.
173 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
174 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
175     & mpiRC
176     CALL PRINT_ERROR( msgBuffer , myThid)
177     GOTO 999
178     ENDIF
179 cnh 1.6 myPid = mpiMyId
180 cnh 1.1 mpiPx = mpiGridSpec(1)
181     mpiPy = mpiGridSpec(2)
182     mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
183     mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
184     myXGlobalLo = mpiXGlobalLo
185     myYGlobalLo = mpiYGlobalLo
186 dimitri 1.15
187     C-- To speed-up mpi gather and scatter routines, myXGlobalLo
188     C and myYGlobalLo from each process are transferred to
189     C a common block array. This allows process 0 to know
190     C the location of the domains controlled by each process.
191 dimitri 1.19 mpiBufSize=1
192     mpiRequest=0
193 dimitri 1.15 DO npe = 0, numberOfProcs-1
194 adcroft 1.16 CALL MPI_ISEND (myXGlobalLo, mpiBufSize, MPI_INTEGER,
195     & npe, mpiMyId, MPI_COMM_MODEL, mpiRequest, ierr)
196 dimitri 1.15 ENDDO
197     DO npe = 0, numberOfProcs-1
198 adcroft 1.16 CALL MPI_RECV (itemp, mpiBufSize, MPI_INTEGER,
199 dimitri 1.15 & npe, npe, MPI_COMM_MODEL, istatus, ierr)
200     mpi_myXGlobalLo(npe+1) = itemp
201     ENDDO
202     DO npe = 0, numberOfProcs-1
203 adcroft 1.16 CALL MPI_ISEND (myYGlobalLo, mpiBufSize, MPI_INTEGER,
204     & npe, mpiMyId, MPI_COMM_MODEL, mpiRequest, ierr)
205 dimitri 1.15 ENDDO
206     DO npe = 0, numberOfProcs-1
207 adcroft 1.16 CALL MPI_RECV (itemp, mpiBufSize, MPI_INTEGER,
208 dimitri 1.15 & npe, npe, MPI_COMM_MODEL, istatus, ierr)
209     mpi_myYGlobalLo(npe+1) = itemp
210     ENDDO
211    
212 cnh 1.6 myPx = mpiPx+1
213     myPy = mpiPy+1
214 cnh 1.1 C-- Get MPI id for neighboring procs.
215     mpiGridSpec(1) = mpiPx-1
216 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
217     & .AND. mpiGridSpec(1) .LT. 0 )
218 cnh 1.1 & mpiGridSpec(1) = nPx-1
219     mpiGridSpec(2) = mpiPy
220     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
221     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
222     eeBootError = .TRUE.
223 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
224 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
225     & mpiRC
226     CALL PRINT_ERROR( msgBuffer , myThid)
227     GOTO 999
228     ENDIF
229     pidW = mpiPidW
230     mpiGridSpec(1) = mpiPx+1
231 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
232     & .AND. mpiGridSpec(1) .GT. nPx-1 )
233 cnh 1.1 & mpiGridSpec(1) = 0
234     mpiGridSpec(2) = mpiPy
235     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
236     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
237     eeBootError = .TRUE.
238 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
239 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
240     & mpiRC
241     CALL PRINT_ERROR( msgBuffer , myThid)
242     GOTO 999
243     ENDIF
244     pidE = mpiPidE
245     mpiGridSpec(1) = mpiPx
246     mpiGridSpec(2) = mpiPy-1
247 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
248     & .AND. mpiGridSpec(2) .LT. 0 )
249 cnh 1.1 & mpiGridSpec(2) = nPy - 1
250     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
251     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
252     eeBootError = .TRUE.
253 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
254 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
255     & mpiRC
256     CALL PRINT_ERROR( msgBuffer , myThid)
257     GOTO 999
258     ENDIF
259     pidS = mpiPidS
260     mpiGridSpec(1) = mpiPx
261     mpiGridSpec(2) = mpiPy+1
262 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
263     & .AND. mpiGridSpec(2) .GT. nPy-1 )
264 cnh 1.1 & mpiGridSpec(2) = 0
265     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
266     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
267     eeBootError = .TRUE.
268 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
269 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
270     & mpiRC
271     CALL PRINT_ERROR( msgBuffer , myThid)
272     GOTO 999
273     ENDIF
274     pidN = mpiPidN
275    
276     C-- Print summary of processor mapping on standard output
277     CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
278     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
279     eeBootError = .TRUE.
280 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
281 cnh 1.1 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
282     & mpiRC
283     CALL PRINT_ERROR( msgBuffer , myThid)
284     GOTO 999
285     ENDIF
286 adcroft 1.9 WRITE(msgBuffer,'(A)')
287     & '======= Starting MPI parallel Run ========='
288 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
289     & SQUEEZE_BOTH , myThid)
290     WRITE(msgBuffer,'(A,A64)') ' My Processor Name = ',
291     & mpiProcNam(1:mpilProcNam)
292     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
293     & SQUEEZE_RIGHT , myThid)
294     WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
295     & mpiPx,',',mpiPy,
296     & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
297     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
298     & SQUEEZE_RIGHT , myThid)
299     WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
300     & mpiXGlobalLo,',',mpiYGLobalLo,
301     & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
302     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
303     & SQUEEZE_RIGHT , myThid)
304 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
305     & ' North neighbor = processor ', mpiPidN
306 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
307     & SQUEEZE_RIGHT , myThid)
308 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
309     & ' South neighbor = processor ', mpiPidS
310 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
311     & SQUEEZE_RIGHT , myThid)
312 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
313     & ' East neighbor = processor ', mpiPidE
314 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
315     & SQUEEZE_RIGHT , myThid)
316 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
317     & ' West neighbor = processor ', mpiPidW
318 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
319     & SQUEEZE_RIGHT , myThid)
320     C
321     C-- Create MPI types for transfer of array edges.
322     C-- Four and eight byte primitive (one block only) datatypes.
323     C-- These are common to all threads in the process.
324     C Notes:
325     C ======
326     C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
327     C If they are not defined code must be added to create them -
328     C the MPI standard leaves optional whether they exist.
329     C 2. Per thread datatypes that handle all the edges for a thread
330     C are defined based on the type defined here.
331     C--
332     C-- xFace datatypes (east<-->west messages)
333     C--
334     C xFace (y=constant) for XY arrays with real*4 declaration.
335     arrElSep = (sNx+OLx*2)
336     elCount = sNy+OLy*2
337     elLen = OLx
338     elStride = arrElSep
339 adcroft 1.13 #ifdef TARGET_SGI
340     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
341     & mpiTypeXFaceBlock_xy_r4, mpiRC)
342     #else
343 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
344     & mpiTypeXFaceBlock_xy_r4, mpiRC)
345 adcroft 1.13 #endif
346 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
347     eeBootError = .TRUE.
348 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
349 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
350 cnh 1.1 & mpiRC
351     CALL PRINT_ERROR( msgBuffer , myThid)
352     ENDIF
353     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
354     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
355     eeBootError = .TRUE.
356 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
357 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
358 cnh 1.1 & mpiRC
359     CALL PRINT_ERROR( msgBuffer , myThid)
360     ENDIF
361    
362     C xFace (y=constant) for XY arrays with real*8 declaration.
363 adcroft 1.13 #ifdef TARGET_SGI
364     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
365     & mpiTypeXFaceBlock_xy_r8, mpiRC)
366     #else
367 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
368     & mpiTypeXFaceBlock_xy_r8, mpiRC)
369 adcroft 1.13 #endif
370 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
371     eeBootError = .TRUE.
372 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
373 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
374 cnh 1.1 & mpiRC
375     CALL PRINT_ERROR( msgBuffer , myThid)
376     ENDIF
377     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
378     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
379     eeBootError = .TRUE.
380 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
381 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
382 cnh 1.1 & mpiRC
383     CALL PRINT_ERROR( msgBuffer , myThid)
384     ENDIF
385    
386     C xFace (y=constant) for XYZ arrays with real*4 declaration.
387     arrElSize = 4
388     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
389 cnh 1.5 elCount = Nr
390 cnh 1.1 elLen = 1
391     elStride = arrElSize*arrElSep
392     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
393     & mpiTypeXFaceBlock_xy_r4,
394     & mpiTypeXFaceBlock_xyz_r4, mpiRC)
395     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
396     eeBootError = .TRUE.
397 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
398 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
399 cnh 1.1 & mpiRC
400     CALL PRINT_ERROR( msgBuffer , myThid)
401     ENDIF
402     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
403     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
404     eeBootError = .TRUE.
405 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
406 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
407 cnh 1.1 & mpiRC
408     CALL PRINT_ERROR( msgBuffer , myThid)
409     ENDIF
410    
411     C xFace (y=constant) for XYZ arrays with real*8 declaration.
412     arrElSize = 8
413     elStride = arrElSize*arrElSep
414     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
415     & mpiTypeXFaceBlock_xy_r8,
416     & mpiTypeXFaceBlock_xyz_r8, mpiRC)
417     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
418     eeBootError = .TRUE.
419 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
420 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
421 cnh 1.1 & mpiRC
422     CALL PRINT_ERROR( msgBuffer , myThid)
423     ENDIF
424     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
425     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
426     eeBootError = .TRUE.
427 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
428 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
429 cnh 1.1 & mpiRC
430     CALL PRINT_ERROR( msgBuffer , myThid)
431     ENDIF
432     C--
433     C-- yFace datatypes (north<-->south messages)
434     C--
435     C yFace (x=constant) for XY arrays with real*4 declaration
436     elCount = OLy*(sNx+OLx*2)
437 adcroft 1.13 #ifdef TARGET_SGI
438     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
439     & mpiTypeYFaceBlock_xy_r4, mpiRC)
440     #else
441 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
442     & mpiTypeYFaceBlock_xy_r4, mpiRC)
443 adcroft 1.13 #endif
444 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
445     eeBootError = .TRUE.
446 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
447 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
448 cnh 1.1 & mpiRC
449     CALL PRINT_ERROR( msgBuffer , myThid)
450     ENDIF
451     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
452     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
453     eeBootError = .TRUE.
454 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
455 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
456 cnh 1.1 & mpiRC
457     CALL PRINT_ERROR( msgBuffer , myThid)
458     ENDIF
459     C yFace (x=constant) for XY arrays with real*8 declaration
460 adcroft 1.13 #ifdef TARGET_SGI
461     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
462     & mpiTypeYFaceBlock_xy_r8, mpiRC)
463     #else
464 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
465     & mpiTypeYFaceBlock_xy_r8, mpiRC)
466 adcroft 1.13 #endif
467 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
468     eeBootError = .TRUE.
469 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
470 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
471 cnh 1.1 & mpiRC
472     CALL PRINT_ERROR( msgBuffer , myThid)
473     ENDIF
474     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
475     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
476     eeBootError = .TRUE.
477 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
478 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
479 cnh 1.1 & mpiRC
480     CALL PRINT_ERROR( msgBuffer , myThid)
481     ENDIF
482     C yFace (x=constant) for XYZ arrays with real*4 declaration
483     arrElSize = 4
484     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
485 cnh 1.5 elCount = Nr
486 cnh 1.1 elLen = 1
487     elStride = arrElSize*arrElSep
488     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
489     & mpiTypeYFaceBlock_xy_r4,
490     & mpiTypeYFaceBlock_xyz_r4, mpiRC)
491     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
492     eeBootError = .TRUE.
493 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
494 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
495 cnh 1.1 & mpiRC
496     CALL PRINT_ERROR( msgBuffer , myThid)
497     ENDIF
498     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
499     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
500     eeBootError = .TRUE.
501 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
502 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
503 cnh 1.1 & mpiRC
504     CALL PRINT_ERROR( msgBuffer , myThid)
505     ENDIF
506     C yFace (x=constant) for XYZ arrays with real*8 declaration
507     arrElSize = 8
508     elStride = arrElSize*arrElSep
509     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
510     & mpiTypeYFaceBlock_xy_r8,
511     & mpiTypeYFaceBlock_xyz_r8, mpiRC)
512     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
513     eeBootError = .TRUE.
514 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
515 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
516 cnh 1.1 & mpiRC
517     CALL PRINT_ERROR( msgBuffer , myThid)
518     ENDIF
519     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
520     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
521     eeBootError = .TRUE.
522 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
523 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
524 cnh 1.1 & mpiRC
525     CALL PRINT_ERROR( msgBuffer , myThid)
526     ENDIF
527    
528     C-- Assign MPI values used in generating unique tags for messages.
529     mpiTagW = 1
530     mpiTagE = 2
531     mpiTagS = 3
532     mpiTagN = 4
533    
534     C
535 adcroft 1.8 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
536 cnh 1.1
537    
538     C
539     #ifndef ALWAYS_USE_MPI
540     ENDIF
541     #endif
542     #endif /* ALLOW_USE_MPI */
543    
544     999 CONTINUE
545    
546     RETURN
547     END
548 cnh 1.6
549 dimitri 1.19 C $Id: ini_procs.F,v 1.18 2004/03/27 03:51:51 edhill Exp $

  ViewVC Help
Powered by ViewVC 1.1.22