/[MITgcm]/MITgcm/eesupp/src/ini_procs.F
ViewVC logotype

Annotation of /MITgcm/eesupp/src/ini_procs.F

Parent Directory Parent Directory | Revision Log Revision Log | View Revision Graph Revision Graph


Revision 1.15 - (hide annotations) (download)
Tue Feb 18 05:33:53 2003 UTC (21 years, 4 months ago) by dimitri
Branch: MAIN
CVS Tags: checkpoint50c_pre, checkpoint48i_post, checkpoint50, checkpoint50b_pre, checkpoint48f_post, checkpoint48h_post, checkpoint50a_post, checkpoint49, checkpoint48g_post, checkpoint50b_post
Changes since 1.14: +33 -2 lines
Merging from release1_p12:
o Modifications for using pkg/exf with pkg/seaice
  - improved description of the various forcing configurations
  - added basic radiation bulk formulae to pkg/exf
  - units/sign fix for evap computation in exf_getffields.F
  - updated verification/global_with_exf/results/output.txt
o Added pkg/sbo for computing IERS Special Bureau for the Oceans
  (SBO) core products, including oceanic mass, center-of-mass,
  angular, and bottom pressure (see pkg/sbo/README.sbo).
o Lower bound for viscosity/diffusivity in pkg/kpp/kpp_routines.F
  to avoid negative values in shallow regions.
  - updated verification/natl_box/results/output.txt
  - updated verification/lab_sea/results/output.txt
o MPI gather, scatter: eesupp/src/gather_2d.F and scatter_2d.F
o Added useSingleCpuIO option (see PARAMS.h).
o Updated useSingleCpuIO option in mdsio_writefield.F to
  work with multi-field files, e.g., for single-file pickup.
o pkg/seaice:
  - bug fix in growth.F: QNET for no shortwave case
  - added HeffFile for specifying initial sea-ice thickness
  - changed SEAICE_EXTERNAL_FLUXES wind stress implementation
o Added missing /* */ to CPP comments in pkg/seaice, pkg/exf,
  kpp_transport_t.F, forward_step.F, and the_main_loop.F
o pkg/seaice:
  - adjoint-friendly modifications
  - added a SEAICE_WRITE_PICKUP at end of the_model_main.F

1 dimitri 1.15 C $Header: /u/gcmpack/MITgcm/eesupp/src/ini_procs.F,v 1.14.4.1 2003/02/05 07:12:59 dimitri Exp $
2 adcroft 1.13 C $Name: $
3 cnh 1.1 #include "CPP_EEOPTIONS.h"
4 cnh 1.14 CBOP
5    
6     C !ROUTINE: INI_PROCS
7    
8     C !INTERFACE:
9 cnh 1.1 SUBROUTINE INI_PROCS
10 adcroft 1.10 IMPLICIT NONE
11 cnh 1.1
12 cnh 1.14 C !DESCRIPTION:
13     C *==========================================================*
14     C | SUBROUTINE INI_PROCS
15     C | o Initialise multiple concurrent processes environment.
16     C *==========================================================*
17     C | Under MPI this routine calls various MPI service routines
18     C | that map the model grid to MPI processes. The information
19     C | is then stored in a common block for later use.
20     C | Note: This routine can also be compiled with CPP
21     C | directives set so that no multi-processing is initialise.
22     C | This is OK and should work fine.
23     C *==========================================================*
24    
25     C !USES:
26 cnh 1.1 C === Global data ===
27     #include "SIZE.h"
28     #include "EEPARAMS.h"
29     #include "EESUPPORT.h"
30    
31 cnh 1.14 C !LOCAL VARIABLES:
32 cnh 1.1 C === Local variables ===
33     #ifdef ALLOW_USE_MPI
34 cnh 1.14 C msgBuffer :: IO buffer
35     C myThid :: Dummy thread id
36     C mpiRC :: Error code reporting variable used
37     C with MPI.
38     C mpiGridSpec :: No. of processes in X and Y.
39     C mpiPeriodicity :: Flag indicating XY priodicity to MPI.
40     C arrElSize :: Size of an array element in bytes used
41     C to define MPI datatypes for communication
42     C operations.
43     C arrElSep :: Separation in units of array elements between
44     C blocks to be communicated.
45     C elCount :: No. of blocks that are associated with MPI
46     C datatype.
47     C elLen :: Length of an MPI datatype in terms of preexisting
48     C datatype.
49     C elStride :: Distance between starting location of elements
50     C in an MPI datatype - can be bytes of datatype
51     C units.
52 cnh 1.1 CHARACTER*(MAX_LEN_MBUF) msgBuffer
53     INTEGER mpiRC
54     INTEGER mpiGridSpec(2)
55     INTEGER mpiPeriodicity(2)
56     INTEGER mpiLProcNam
57     CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
58     INTEGER arrElSize
59     INTEGER arrElSep
60     INTEGER elCount
61     INTEGER elLen
62     INTEGER elStride
63 dimitri 1.15
64     C-- Variables needed for mpi gather scatter routines.
65     COMMON /GlobalLo/ mpi_myXGlobalLo, mpi_myYGlobalLo
66     INTEGER mpi_myXGlobalLo(nPx*nPy)
67     INTEGER mpi_myYGlobalLo(nPx*nPy)
68     INTEGER npe,itemp,ierr,istatus(MPI_STATUS_SIZE)
69    
70 cnh 1.1 #endif /* ALLOW_USE_MPI */
71 cnh 1.6 INTEGER myThid
72 cnh 1.14 CEOP
73 cnh 1.1
74     C-- Default values set to single processor case
75 cnh 1.7 C pid[W-SE] are the MPI process id of the neighbor
76 cnh 1.1 C processes. A process can be its own neighbor!
77 cnh 1.6 myThid = 1
78     myPid = 1
79     nProcs = 1
80     myPx = 1
81     myPy = 1
82     myXGlobalLo = 1
83     myYGlobalLo = 1
84     pidW = 1
85     pidE = 1
86     pidN = 1
87     pidS = 1
88     errorMessageUnit = 0
89     standardMessageUnit = 6
90    
91 cnh 1.1 #ifdef ALLOW_USE_MPI
92     C--
93     C-- MPI style full multiple-process initialisation
94     C-- ==============================================
95     #ifndef ALWAYS_USE_MPI
96     IF ( usingMPI ) THEN
97     #endif
98    
99     C-- Arrange MPI processes on a cartesian grid
100     C Set variable indicating which MPI process is to the north,
101     C south, east, west, south-west, south-east, north-west
102     C and north-east of me e.g.
103     C
104     C Plan view of model domain centered on process ME
105     C ================================================
106     C
107     C : : : :
108     C : : : :
109     C : : : :
110     C .....------------------------------.....
111     C | | | |
112     C | NW | N | NE |
113     C | | | |
114     C .....------------------------------.....
115     C | | | |
116     C | W | ME | E |
117     C | | | |
118     C .....------------------------------.....
119     C | | | |
120     C | SW | S | SE |
121     C | | | |
122     C .....------------------------------.....
123     C : : : :
124     C Y : : : :
125     C / \ : : : :
126     C |
127     C |
128     C |----> X
129     C
130     C-- Set default MPI communicator to XY processor grid
131     myThid = 1
132     mpiGridSpec(1) = nPx
133     mpiGridSpec(2) = nPy
134     C Could be periodic in X and/or Y - set at run time or compile time!
135     mpiPeriodicity(1) = _mpiTRUE_
136     mpiPeriodicity(2) = _mpiTRUE_
137     #ifdef CAN_PREVENT_X_PERIODICITY
138     #ifndef ALWAYS_PREVENT_X_PERIODICITY
139     IF ( notUsingXPeriodicity ) THEN
140     #endif
141     mpiPeriodicity(1) = _mpiFALSE_
142     #ifndef ALWAYS_PREVENT_X_PERIODICITY
143     ENDIF
144     #endif
145     #endif /* CAN_PREVENT_X_PERIODICITY */
146     #ifdef CAN_PREVENT_Y_PERIODICITY
147     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
148     IF ( notUsingYPeriodicity ) THEN
149     #endif
150     mpiPeriodicity(2) = _mpiFALSE_
151     #ifndef ALWAYS_PREVENT_Y_PERIODICITY
152     ENDIF
153     #endif
154     #endif /* CAN_PREVENT_Y_PERIODICITY */
155    
156     CALL MPI_CART_CREATE(
157 adcroft 1.8 I MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
158 cnh 1.1 O mpiComm, mpiRC )
159     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
160     eeBootError = .TRUE.
161 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
162 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_CREATE return code',
163     & mpiRC
164     CALL PRINT_ERROR( msgBuffer , myThid)
165     GOTO 999
166     ENDIF
167    
168     C-- Get my location on the grid
169     CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
170     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
171     eeBootError = .TRUE.
172 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
173 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_COORDS return code',
174     & mpiRC
175     CALL PRINT_ERROR( msgBuffer , myThid)
176     GOTO 999
177     ENDIF
178 cnh 1.6 myPid = mpiMyId
179 cnh 1.1 mpiPx = mpiGridSpec(1)
180     mpiPy = mpiGridSpec(2)
181     mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
182     mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
183     myXGlobalLo = mpiXGlobalLo
184     myYGlobalLo = mpiYGlobalLo
185 dimitri 1.15
186     C-- To speed-up mpi gather and scatter routines, myXGlobalLo
187     C and myYGlobalLo from each process are transferred to
188     C a common block array. This allows process 0 to know
189     C the location of the domains controlled by each process.
190     DO npe = 0, numberOfProcs-1
191     CALL MPI_SEND (myXGlobalLo, 1, MPI_INTEGER,
192     & npe, mpiMyId, MPI_COMM_MODEL, ierr)
193     ENDDO
194     DO npe = 0, numberOfProcs-1
195     CALL MPI_RECV (itemp, 1, MPI_INTEGER,
196     & npe, npe, MPI_COMM_MODEL, istatus, ierr)
197     mpi_myXGlobalLo(npe+1) = itemp
198     ENDDO
199     DO npe = 0, numberOfProcs-1
200     CALL MPI_SEND (myYGlobalLo, 1, MPI_INTEGER,
201     & npe, mpiMyId, MPI_COMM_MODEL, ierr)
202     ENDDO
203     DO npe = 0, numberOfProcs-1
204     CALL MPI_RECV (itemp, 1, MPI_INTEGER,
205     & npe, npe, MPI_COMM_MODEL, istatus, ierr)
206     mpi_myYGlobalLo(npe+1) = itemp
207     ENDDO
208    
209 cnh 1.6 myPx = mpiPx+1
210     myPy = mpiPy+1
211 cnh 1.1 C-- Get MPI id for neighboring procs.
212     mpiGridSpec(1) = mpiPx-1
213 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
214     & .AND. mpiGridSpec(1) .LT. 0 )
215 cnh 1.1 & mpiGridSpec(1) = nPx-1
216     mpiGridSpec(2) = mpiPy
217     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
218     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
219     eeBootError = .TRUE.
220 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
221 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
222     & mpiRC
223     CALL PRINT_ERROR( msgBuffer , myThid)
224     GOTO 999
225     ENDIF
226     pidW = mpiPidW
227     mpiGridSpec(1) = mpiPx+1
228 adcroft 1.9 IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
229     & .AND. mpiGridSpec(1) .GT. nPx-1 )
230 cnh 1.1 & mpiGridSpec(1) = 0
231     mpiGridSpec(2) = mpiPy
232     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
233     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
234     eeBootError = .TRUE.
235 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
236 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
237     & mpiRC
238     CALL PRINT_ERROR( msgBuffer , myThid)
239     GOTO 999
240     ENDIF
241     pidE = mpiPidE
242     mpiGridSpec(1) = mpiPx
243     mpiGridSpec(2) = mpiPy-1
244 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
245     & .AND. mpiGridSpec(2) .LT. 0 )
246 cnh 1.1 & mpiGridSpec(2) = nPy - 1
247     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
248     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
249     eeBootError = .TRUE.
250 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
251 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
252     & mpiRC
253     CALL PRINT_ERROR( msgBuffer , myThid)
254     GOTO 999
255     ENDIF
256     pidS = mpiPidS
257     mpiGridSpec(1) = mpiPx
258     mpiGridSpec(2) = mpiPy+1
259 adcroft 1.9 IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
260     & .AND. mpiGridSpec(2) .GT. nPy-1 )
261 cnh 1.1 & mpiGridSpec(2) = 0
262     CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
263     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
264     eeBootError = .TRUE.
265 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
266 cnh 1.1 & 'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
267     & mpiRC
268     CALL PRINT_ERROR( msgBuffer , myThid)
269     GOTO 999
270     ENDIF
271     pidN = mpiPidN
272    
273     C-- Print summary of processor mapping on standard output
274     CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
275     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
276     eeBootError = .TRUE.
277 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
278 cnh 1.1 & 'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
279     & mpiRC
280     CALL PRINT_ERROR( msgBuffer , myThid)
281     GOTO 999
282     ENDIF
283 adcroft 1.9 WRITE(msgBuffer,'(A)')
284     & '======= Starting MPI parallel Run ========='
285 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
286     & SQUEEZE_BOTH , myThid)
287     WRITE(msgBuffer,'(A,A64)') ' My Processor Name = ',
288     & mpiProcNam(1:mpilProcNam)
289     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
290     & SQUEEZE_RIGHT , myThid)
291     WRITE(msgBuffer,'(A,I3,A,I3,A,I3,A,I3,A)') ' Located at (',
292     & mpiPx,',',mpiPy,
293     & ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
294     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
295     & SQUEEZE_RIGHT , myThid)
296     WRITE(msgBuffer,'(A,I4,A,I4,A,I4,A,I4,A)') ' Origin at (',
297     & mpiXGlobalLo,',',mpiYGLobalLo,
298     & ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
299     CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
300     & SQUEEZE_RIGHT , myThid)
301 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
302     & ' North neighbor = processor ', mpiPidN
303 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
304     & SQUEEZE_RIGHT , myThid)
305 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
306     & ' South neighbor = processor ', mpiPidS
307 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
308     & SQUEEZE_RIGHT , myThid)
309 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
310     & ' East neighbor = processor ', mpiPidE
311 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
312     & SQUEEZE_RIGHT , myThid)
313 adcroft 1.9 WRITE(msgBuffer,'(A,I4.4)')
314     & ' West neighbor = processor ', mpiPidW
315 cnh 1.1 CALL PRINT_MESSAGE( msgBuffer, standardMessageUnit,
316     & SQUEEZE_RIGHT , myThid)
317     C
318     C-- Create MPI types for transfer of array edges.
319     C-- Four and eight byte primitive (one block only) datatypes.
320     C-- These are common to all threads in the process.
321     C Notes:
322     C ======
323     C 1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
324     C If they are not defined code must be added to create them -
325     C the MPI standard leaves optional whether they exist.
326     C 2. Per thread datatypes that handle all the edges for a thread
327     C are defined based on the type defined here.
328     C--
329     C-- xFace datatypes (east<-->west messages)
330     C--
331     C xFace (y=constant) for XY arrays with real*4 declaration.
332     arrElSep = (sNx+OLx*2)
333     elCount = sNy+OLy*2
334     elLen = OLx
335     elStride = arrElSep
336 adcroft 1.13 #ifdef TARGET_SGI
337     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
338     & mpiTypeXFaceBlock_xy_r4, mpiRC)
339     #else
340 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
341     & mpiTypeXFaceBlock_xy_r4, mpiRC)
342 adcroft 1.13 #endif
343 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
344     eeBootError = .TRUE.
345 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
346 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
347 cnh 1.1 & mpiRC
348     CALL PRINT_ERROR( msgBuffer , myThid)
349     ENDIF
350     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
351     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
352     eeBootError = .TRUE.
353 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
354 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
355 cnh 1.1 & mpiRC
356     CALL PRINT_ERROR( msgBuffer , myThid)
357     ENDIF
358    
359     C xFace (y=constant) for XY arrays with real*8 declaration.
360 adcroft 1.13 #ifdef TARGET_SGI
361     CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
362     & mpiTypeXFaceBlock_xy_r8, mpiRC)
363     #else
364 cnh 1.1 CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
365     & mpiTypeXFaceBlock_xy_r8, mpiRC)
366 adcroft 1.13 #endif
367 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
368     eeBootError = .TRUE.
369 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
370 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
371 cnh 1.1 & mpiRC
372     CALL PRINT_ERROR( msgBuffer , myThid)
373     ENDIF
374     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
375     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
376     eeBootError = .TRUE.
377 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
378 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
379 cnh 1.1 & mpiRC
380     CALL PRINT_ERROR( msgBuffer , myThid)
381     ENDIF
382    
383     C xFace (y=constant) for XYZ arrays with real*4 declaration.
384     arrElSize = 4
385     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
386 cnh 1.5 elCount = Nr
387 cnh 1.1 elLen = 1
388     elStride = arrElSize*arrElSep
389     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
390     & mpiTypeXFaceBlock_xy_r4,
391     & mpiTypeXFaceBlock_xyz_r4, mpiRC)
392     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
393     eeBootError = .TRUE.
394 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
395 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
396 cnh 1.1 & mpiRC
397     CALL PRINT_ERROR( msgBuffer , myThid)
398     ENDIF
399     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
400     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
401     eeBootError = .TRUE.
402 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
403 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r4)',
404 cnh 1.1 & mpiRC
405     CALL PRINT_ERROR( msgBuffer , myThid)
406     ENDIF
407    
408     C xFace (y=constant) for XYZ arrays with real*8 declaration.
409     arrElSize = 8
410     elStride = arrElSize*arrElSep
411     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
412     & mpiTypeXFaceBlock_xy_r8,
413     & mpiTypeXFaceBlock_xyz_r8, mpiRC)
414     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
415     eeBootError = .TRUE.
416 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
417 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
418 cnh 1.1 & mpiRC
419     CALL PRINT_ERROR( msgBuffer , myThid)
420     ENDIF
421     CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
422     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
423     eeBootError = .TRUE.
424 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
425 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
426 cnh 1.1 & mpiRC
427     CALL PRINT_ERROR( msgBuffer , myThid)
428     ENDIF
429     C--
430     C-- yFace datatypes (north<-->south messages)
431     C--
432     C yFace (x=constant) for XY arrays with real*4 declaration
433     elCount = OLy*(sNx+OLx*2)
434 adcroft 1.13 #ifdef TARGET_SGI
435     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
436     & mpiTypeYFaceBlock_xy_r4, mpiRC)
437     #else
438 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
439     & mpiTypeYFaceBlock_xy_r4, mpiRC)
440 adcroft 1.13 #endif
441 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
442     eeBootError = .TRUE.
443 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
444 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
445 cnh 1.1 & mpiRC
446     CALL PRINT_ERROR( msgBuffer , myThid)
447     ENDIF
448     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
449     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
450     eeBootError = .TRUE.
451 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
452 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
453 cnh 1.1 & mpiRC
454     CALL PRINT_ERROR( msgBuffer , myThid)
455     ENDIF
456     C yFace (x=constant) for XY arrays with real*8 declaration
457 adcroft 1.13 #ifdef TARGET_SGI
458     CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
459     & mpiTypeYFaceBlock_xy_r8, mpiRC)
460     #else
461 cnh 1.1 CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
462     & mpiTypeYFaceBlock_xy_r8, mpiRC)
463 adcroft 1.13 #endif
464 cnh 1.1 IF ( mpiRC .NE. MPI_SUCCESS ) THEN
465     eeBootError = .TRUE.
466 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
467 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
468 cnh 1.1 & mpiRC
469     CALL PRINT_ERROR( msgBuffer , myThid)
470     ENDIF
471     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
472     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
473     eeBootError = .TRUE.
474 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
475 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
476 cnh 1.1 & mpiRC
477     CALL PRINT_ERROR( msgBuffer , myThid)
478     ENDIF
479     C yFace (x=constant) for XYZ arrays with real*4 declaration
480     arrElSize = 4
481     arrElSep = (sNx+OLx*2)*(sNy+OLy*2)
482 cnh 1.5 elCount = Nr
483 cnh 1.1 elLen = 1
484     elStride = arrElSize*arrElSep
485     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
486     & mpiTypeYFaceBlock_xy_r4,
487     & mpiTypeYFaceBlock_xyz_r4, mpiRC)
488     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
489     eeBootError = .TRUE.
490 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
491 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
492 cnh 1.1 & mpiRC
493     CALL PRINT_ERROR( msgBuffer , myThid)
494     ENDIF
495     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
496     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
497     eeBootError = .TRUE.
498 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
499 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
500 cnh 1.1 & mpiRC
501     CALL PRINT_ERROR( msgBuffer , myThid)
502     ENDIF
503     C yFace (x=constant) for XYZ arrays with real*8 declaration
504     arrElSize = 8
505     elStride = arrElSize*arrElSep
506     CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
507     & mpiTypeYFaceBlock_xy_r8,
508     & mpiTypeYFaceBlock_xyz_r8, mpiRC)
509     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
510     eeBootError = .TRUE.
511 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
512 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
513 cnh 1.1 & mpiRC
514     CALL PRINT_ERROR( msgBuffer , myThid)
515     ENDIF
516     CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
517     IF ( mpiRC .NE. MPI_SUCCESS ) THEN
518     eeBootError = .TRUE.
519 adcroft 1.11 WRITE(msgBuffer,'(A,I5)')
520 adcroft 1.9 & 'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
521 cnh 1.1 & mpiRC
522     CALL PRINT_ERROR( msgBuffer , myThid)
523     ENDIF
524    
525     C-- Assign MPI values used in generating unique tags for messages.
526     mpiTagW = 1
527     mpiTagE = 2
528     mpiTagS = 3
529     mpiTagN = 4
530    
531     C
532 adcroft 1.8 CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
533 cnh 1.1
534    
535     C
536     #ifndef ALWAYS_USE_MPI
537     ENDIF
538     #endif
539     #endif /* ALLOW_USE_MPI */
540    
541     999 CONTINUE
542    
543     RETURN
544     END
545 cnh 1.6
546 dimitri 1.15 C $Id: ini_procs.F,v 1.14.4.1 2003/02/05 07:12:59 dimitri Exp $

  ViewVC Help
Powered by ViewVC 1.1.22