| 1 |
gforget |
1.1 |
C $Header: /u/gcmpack/MITgcm/eesupp/src/scatter_2d_wh_rx.template,v 1.1 2010/09/23 05:32:17 gforget Exp $ |
| 2 |
|
|
C $Name: $ |
| 3 |
|
|
|
| 4 |
|
|
#include "PACKAGES_CONFIG.h" |
| 5 |
|
|
#include "CPP_EEOPTIONS.h" |
| 6 |
|
|
|
| 7 |
|
|
CBOP |
| 8 |
|
|
C !ROUTINE: SCATTER_2D_WH_RX |
| 9 |
|
|
C !INTERFACE: |
| 10 |
|
|
SUBROUTINE SCATTER_2D_WH_RX( |
| 11 |
|
|
I gloBuff, |
| 12 |
|
|
O procBuff, |
| 13 |
|
|
I myThid ) |
| 14 |
|
|
C !DESCRIPTION: |
| 15 |
|
|
C Scatter elements, including halos, of a global 2-D array from mpi process 0 to all processes. |
| 16 |
|
|
C Note: done by Master-Thread ; might need barrier calls before and after |
| 17 |
|
|
C this S/R call. |
| 18 |
|
|
|
| 19 |
|
|
C !USES: |
| 20 |
|
|
IMPLICIT NONE |
| 21 |
|
|
#include "SIZE.h" |
| 22 |
|
|
#include "EEPARAMS.h" |
| 23 |
|
|
#include "EESUPPORT.h" |
| 24 |
|
|
|
| 25 |
|
|
C !INPUT/OUTPUT PARAMETERS: |
| 26 |
|
|
C gloBuff ( _RX ) :: full-domain 2D IO-buffer array (Input) |
| 27 |
|
|
C procBuff ( _RX ) :: proc-domain 2D IO-buffer array (Input) |
| 28 |
|
|
C myThid (integer):: my Thread Id number |
| 29 |
|
|
|
| 30 |
|
|
C sNxWh :: x tile size with halo included |
| 31 |
|
|
C sNyWh :: y tile size with halo included |
| 32 |
|
|
C pocNyWh :: processor sum of sNyWh |
| 33 |
|
|
C gloNyWh :: global sum of sNyWh |
| 34 |
|
|
INTEGER sNxWh |
| 35 |
|
|
INTEGER sNyWh |
| 36 |
|
|
INTEGER procNyWh |
| 37 |
|
|
INTEGER gloNyWh |
| 38 |
|
|
PARAMETER ( sNxWh = sNx+2*Olx ) |
| 39 |
|
|
PARAMETER ( sNyWh = sNy+2*Oly ) |
| 40 |
|
|
PARAMETER ( procNyWh = sNyWh*nSy*nSx ) |
| 41 |
|
|
PARAMETER ( gloNyWh = procNyWh*nPy*nPx ) |
| 42 |
|
|
|
| 43 |
|
|
_RX gloBuff(sNxWh,gloNyWh) |
| 44 |
|
|
_RX procBuff(sNxWh,procNyWh) |
| 45 |
|
|
INTEGER myThid |
| 46 |
|
|
CEOP |
| 47 |
|
|
|
| 48 |
|
|
C !LOCAL VARIABLES: |
| 49 |
|
|
INTEGER i,j |
| 50 |
|
|
#ifdef ALLOW_USE_MPI |
| 51 |
|
|
INTEGER jj, np0, np |
| 52 |
|
|
_RX temp(sNxWh,gloNyWh) |
| 53 |
|
|
INTEGER istatus(MPI_STATUS_SIZE), ierr |
| 54 |
|
|
INTEGER lbuff, isource, itag |
| 55 |
|
|
#endif /* ALLOW_USE_MPI */ |
| 56 |
|
|
|
| 57 |
|
|
_BEGIN_MASTER( myThid ) |
| 58 |
|
|
|
| 59 |
|
|
#ifdef ALLOW_USE_MPI |
| 60 |
|
|
|
| 61 |
|
|
lbuff = sNxWh*procNyWh |
| 62 |
|
|
isource = 0 |
| 63 |
|
|
itag = 0 |
| 64 |
|
|
|
| 65 |
|
|
IF( mpiMyId .EQ. 0 ) THEN |
| 66 |
|
|
|
| 67 |
|
|
C-- Process 0 sends local arrays to all other processes |
| 68 |
|
|
DO np = 2, numberOfProcs |
| 69 |
|
|
np0 = np - 1 |
| 70 |
|
|
|
| 71 |
|
|
C-- Process 0 extract the local arrays from the global buffer. |
| 72 |
|
|
DO j=1,procNyWh |
| 73 |
|
|
DO i=1,sNxWh |
| 74 |
|
|
jj=j+procNyWh*(np-1) |
| 75 |
|
|
temp(i,j) = gloBuff(i,jj) |
| 76 |
|
|
ENDDO |
| 77 |
|
|
ENDDO |
| 78 |
|
|
|
| 79 |
|
|
C-- Process 0 sends local arrays to all other processes |
| 80 |
|
|
CALL MPI_SEND (temp, lbuff, _MPI_TYPE_RX, |
| 81 |
|
|
& np0, itag, MPI_COMM_MODEL, ierr) |
| 82 |
|
|
|
| 83 |
|
|
C- end loop on np |
| 84 |
|
|
ENDDO |
| 85 |
|
|
|
| 86 |
|
|
ELSE |
| 87 |
|
|
|
| 88 |
|
|
C-- All proceses except 0 receive local array from process 0 |
| 89 |
|
|
CALL MPI_RECV (procBuff, lbuff, _MPI_TYPE_RX, |
| 90 |
|
|
& isource, itag, MPI_COMM_MODEL, istatus, ierr) |
| 91 |
|
|
|
| 92 |
|
|
ENDIF |
| 93 |
|
|
|
| 94 |
|
|
#endif /* ALLOW_USE_MPI */ |
| 95 |
|
|
|
| 96 |
|
|
IF( myProcId .EQ. 0 ) THEN |
| 97 |
|
|
C-- Process 0 fills-in its local data |
| 98 |
|
|
|
| 99 |
|
|
DO j=1,procNyWh |
| 100 |
|
|
DO i=1,sNxWh |
| 101 |
|
|
procBuff(i,j) = gloBuff(i,j) |
| 102 |
|
|
ENDDO |
| 103 |
|
|
ENDDO |
| 104 |
|
|
|
| 105 |
|
|
c DO j=1,gloNyWh |
| 106 |
|
|
c DO i=1,sNxWh |
| 107 |
|
|
c gloBuff(i,j) = 0. |
| 108 |
|
|
c ENDDO |
| 109 |
|
|
c ENDDO |
| 110 |
|
|
|
| 111 |
|
|
C- end if myProcId = 0 |
| 112 |
|
|
ENDIF |
| 113 |
|
|
|
| 114 |
|
|
_END_MASTER( myThid ) |
| 115 |
|
|
|
| 116 |
|
|
RETURN |
| 117 |
|
|
END |
| 118 |
|
|
|
| 119 |
|
|
C---+----1----+----2----+----3----+----4----+----5----+----6----+----7-|--+----| |