1 |
afe |
1.1 |
subroutine kfilt_new(A,z,H,ns,no,varmes,nrsamp) |
2 |
|
|
|
3 |
|
|
c*** A is the matrix of ensemble forecasts, z is the observation, |
4 |
|
|
c*** H is the observation operator, ns is the size of the model |
5 |
|
|
c*** state, no is the number of observation locations, varmes is |
6 |
|
|
c*** the observational uncertainty, and nrsamp is the ensemble size. |
7 |
|
|
|
8 |
|
|
include 'mpif.h' |
9 |
|
|
#include "parallel.h" |
10 |
|
|
|
11 |
|
|
integer ns, no, nrsamp, MASTER, FROM_MASTER, FROM_WORKER |
12 |
|
|
parameter (MASTER = 0) |
13 |
|
|
parameter (FROM_MASTER = 1) |
14 |
|
|
parameter (FROM_WORKER = 2) |
15 |
|
|
|
16 |
|
|
integer taskid, ierr, numtasks, numworkers, snumworkers |
17 |
|
|
integer source, dest, mtype, num, avenum, extra, offset |
18 |
|
|
integer ijim |
19 |
|
|
integer status(MPI_STATUS_SIZE) |
20 |
|
|
|
21 |
|
|
real, intent(inout) :: A(ns,nrsamp) |
22 |
|
|
real, intent(in) :: z(no) |
23 |
|
|
|
24 |
|
|
integer i,j,m,mm,info,idum,ibawk |
25 |
|
|
real rcond |
26 |
|
|
logical zlz,svd,lud |
27 |
|
|
real H(no,ns) |
28 |
|
|
real vave(ns), vvar(ns), varmes(no) |
29 |
|
|
real temp |
30 |
|
|
|
31 |
|
|
real, allocatable :: K(:,:) |
32 |
|
|
real, allocatable :: RR(:,:) |
33 |
|
|
real, allocatable :: RRR(:,:) |
34 |
|
|
real, allocatable :: w(:) |
35 |
|
|
real, allocatable :: tmp(:,:) |
36 |
|
|
real, allocatable :: d(:) |
37 |
|
|
real, allocatable :: pd(:,:) |
38 |
|
|
real, allocatable :: transA(:,:) |
39 |
|
|
real, allocatable :: transtmp(:,:) |
40 |
|
|
real, allocatable :: tmp2(:,:) |
41 |
|
|
|
42 |
|
|
zlz=.true. |
43 |
|
|
svd=.false. |
44 |
|
|
lud=.false. |
45 |
|
|
|
46 |
|
|
allocate (K(no,ns), RR(no,no), RRR(no,no)) |
47 |
|
|
allocate (transA(nrsamp,ns),transtmp(nrsamp,no)) |
48 |
|
|
allocate (w(ns), tmp(no,nrsamp)) |
49 |
|
|
allocate (d(no), pd(1,ns), tmp2(1,ns)) |
50 |
|
|
|
51 |
|
|
call MPI_COMM_RANK(MPI_COMM_WORLD,taskid,ierr) |
52 |
|
|
call MPI_COMM_SIZE(MPI_COMM_WORLD,numtasks,ierr) |
53 |
|
|
|
54 |
|
|
numworkers = numtasks-1 |
55 |
|
|
temp=sqrt(float(numworkers)) |
56 |
|
|
snumworkers=int(temp) |
57 |
|
|
|
58 |
|
|
if(taskid.eq.MASTER)then |
59 |
|
|
|
60 |
|
|
c*** initialising the observational error covariance matrix and |
61 |
|
|
c*** the projector from model space to observational space. |
62 |
|
|
RR=0.0 |
63 |
|
|
|
64 |
|
|
c*** define obs error covariance matrix as a diagonal matrix |
65 |
|
|
do m=1,no |
66 |
|
|
RR(m,m)=varmes(m) |
67 |
|
|
enddo |
68 |
|
|
|
69 |
|
|
c*** calculate ensemble mean |
70 |
|
|
call ranmean2(A,w,ns,nrsamp) |
71 |
|
|
|
72 |
|
|
c*** remove ensemble mean from the ensemble members |
73 |
|
|
do i=1,nrsamp |
74 |
|
|
A(:,i)=A(:,i)-w(:) |
75 |
|
|
enddo |
76 |
|
|
|
77 |
|
|
endif ! MASTER if |
78 |
|
|
|
79 |
|
|
c*** tmp takes on the product of the observation projector and |
80 |
|
|
c*** the ensemble anomalies. |
81 |
|
|
#ifdef pmatmul |
82 |
|
|
if(taskid.eq.MASTER) write(6,*) 'first pmm' |
83 |
|
|
call pmm(H,A,tmp,no,ns,nrsamp) |
84 |
|
|
#else |
85 |
|
|
if(taskid.eq.MASTER)then |
86 |
|
|
write(6,*) 'first matmul' |
87 |
|
|
tmp=matmul(H,A) |
88 |
|
|
write(6,*) 'completed first matmul' |
89 |
|
|
endif ! MASTER if |
90 |
|
|
#endif |
91 |
|
|
|
92 |
|
|
c*** K takes on the value of H(A-ABAR)(A-ABAR)^T |
93 |
|
|
#ifdef pmatmul |
94 |
|
|
if(taskid.eq.MASTER)then |
95 |
|
|
transA=transpose(A) |
96 |
|
|
endif ! MASTER if |
97 |
|
|
|
98 |
|
|
if(taskid.eq.MASTER) write(6,*) 'second pmm' |
99 |
|
|
call pmm(tmp,transA,K,no,nrsamp,ns) |
100 |
|
|
#else |
101 |
|
|
if(taskid.eq.MASTER)then |
102 |
|
|
write(6,*) 'second matmul' |
103 |
|
|
K=matmul(tmp,transpose(A)) |
104 |
|
|
write(6,*) 'completed second matmul' |
105 |
|
|
endif ! MASTER if |
106 |
|
|
#endif |
107 |
|
|
|
108 |
|
|
if(taskid.eq.MASTER)then |
109 |
|
|
|
110 |
|
|
c*** K is what is called B in eqn (6) |
111 |
|
|
K=(1.0/(float(nrsamp)-1.0))*K |
112 |
|
|
|
113 |
|
|
c*** restore A by adding back ensemble mean value. |
114 |
|
|
do i=1,nrsamp |
115 |
|
|
A(:,i)=A(:,i)+w(:) |
116 |
|
|
enddo |
117 |
|
|
|
118 |
|
|
c*** RR is obs error cov matrix, and the forecast error cov is added to |
119 |
|
|
c*** it, making it the first half of the right hand side of equation (8). |
120 |
|
|
c*** if one was to wipe out spurious long distance correlations, it would |
121 |
|
|
c*** happen here. |
122 |
|
|
#ifdef pmatmul |
123 |
|
|
transtmp=transpose(tmp) |
124 |
|
|
|
125 |
|
|
endif ! MASTER if |
126 |
|
|
|
127 |
|
|
if(taskid.eq.MASTER) write(6,*) 'third pmm' |
128 |
|
|
call pmm(tmp,transtmp,RRR,no,nrsamp,no) |
129 |
|
|
|
130 |
|
|
if(taskid.eq.MASTER)then |
131 |
|
|
RR=RR+(1./(float(nrsamp)-1.))*RRR |
132 |
|
|
endif ! MASTER if |
133 |
|
|
#else |
134 |
|
|
write(6,*) 'third matmul' |
135 |
|
|
RR=RR+(1./(float(nrsamp)-1.))*matmul(tmp,transpose(tmp)) |
136 |
|
|
write(6,*) 'completed third matmul' |
137 |
|
|
endif ! MASTER if |
138 |
|
|
#endif |
139 |
|
|
|
140 |
|
|
if(taskid.eq.MASTER)then |
141 |
|
|
|
142 |
|
|
c*** standard solver for coefficients beta |
143 |
|
|
call spoco(RR,no,no,rcond,w,info) |
144 |
|
|
|
145 |
|
|
|
146 |
|
|
#ifdef pcorrect |
147 |
|
|
|
148 |
|
|
endif ! MASTER if |
149 |
|
|
|
150 |
|
|
if(taskid.eq.MASTER) then |
151 |
|
|
|
152 |
|
|
write(6,*) 'updating ensemble members' |
153 |
|
|
|
154 |
|
|
c*** figure out how many ens members go to each processor and look for extras |
155 |
|
|
avenum = nrsamp/numworkers |
156 |
|
|
extra = mod(nrsamp, numworkers) |
157 |
|
|
offset = 1 |
158 |
|
|
mtype = FROM_MASTER |
159 |
|
|
|
160 |
|
|
c*** loop over the worker nodes |
161 |
|
|
do dest=1, numworkers |
162 |
|
|
|
163 |
|
|
c*** pad in the extra columns until they've all been accounted for |
164 |
|
|
if (dest .le. extra) then |
165 |
|
|
num = avenum + 1 |
166 |
|
|
else |
167 |
|
|
num = avenum |
168 |
|
|
endif |
169 |
|
|
|
170 |
|
|
c*** send worker nodes the offset, number of cols, a and b |
171 |
|
|
call MPI_SEND(offset, 1, MPI_INTEGER, dest, mtype, |
172 |
|
|
> MPI_COMM_WORLD, ierr) |
173 |
|
|
call MPI_SEND(num, 1, MPI_INTEGER, dest, mtype, |
174 |
|
|
> MPI_COMM_WORLD, ierr) |
175 |
|
|
call MPI_SEND(z, no, MPI_REAL, dest, mtype, |
176 |
|
|
> MPI_COMM_WORLD, ierr) |
177 |
|
|
call MPI_SEND(varmes, no, MPI_REAL, dest, |
178 |
|
|
> mtype, MPI_COMM_WORLD, ierr) |
179 |
|
|
call MPI_SEND(RR, no*no, MPI_REAL, dest, |
180 |
|
|
> mtype, MPI_COMM_WORLD, ierr) |
181 |
|
|
call MPI_SEND(H, no*ns, MPI_REAL, dest, |
182 |
|
|
> mtype, MPI_COMM_WORLD, ierr) |
183 |
|
|
call MPI_SEND(K, no*ns, MPI_REAL, dest, |
184 |
|
|
> mtype, MPI_COMM_WORLD, ierr) |
185 |
|
|
call MPI_SEND(A(1,offset), num*ns, MPI_REAL, |
186 |
|
|
> dest, mtype, MPI_COMM_WORLD, ierr) |
187 |
|
|
|
188 |
|
|
c*** increment the offset |
189 |
|
|
offset = offset + num |
190 |
|
|
|
191 |
|
|
enddo |
192 |
|
|
|
193 |
|
|
c*** receive results from worker tasks |
194 |
|
|
mtype = FROM_WORKER |
195 |
|
|
do i=1, numworkers |
196 |
|
|
source = i |
197 |
|
|
call MPI_RECV(offset, 1, MPI_INTEGER, source, |
198 |
|
|
> mtype, MPI_COMM_WORLD, status, ierr) |
199 |
|
|
call MPI_RECV(num, 1, MPI_INTEGER, source, |
200 |
|
|
> mtype, MPI_COMM_WORLD, status, ierr) |
201 |
|
|
call MPI_RECV(A(1,offset), num*ns, MPI_REAL, source, |
202 |
|
|
> mtype, MPI_COMM_WORLD, status, ierr) |
203 |
|
|
enddo |
204 |
|
|
|
205 |
|
|
endif ! end of MASTER bits |
206 |
|
|
|
207 |
|
|
if(taskid.gt.MASTER)then |
208 |
|
|
|
209 |
|
|
c*** receive matrix data from master task |
210 |
|
|
mtype = FROM_MASTER |
211 |
|
|
call MPI_RECV(offset, 1, MPI_INTEGER, MASTER, |
212 |
|
|
> mtype, MPI_COMM_WORLD, status, ierr) |
213 |
|
|
call MPI_RECV(num, 1, MPI_INTEGER, MASTER, |
214 |
|
|
> mtype, MPI_COMM_WORLD, status, ierr) |
215 |
|
|
call MPI_RECV(z, no, MPI_REAL, MASTER, mtype, |
216 |
|
|
> MPI_COMM_WORLD, status, ierr) |
217 |
|
|
call MPI_RECV(varmes, no, MPI_REAL, MASTER, mtype, |
218 |
|
|
> MPI_COMM_WORLD, status, ierr) |
219 |
|
|
call MPI_RECV(RR, no*no, MPI_REAL, MASTER, mtype, |
220 |
|
|
> MPI_COMM_WORLD, status, ierr) |
221 |
|
|
call MPI_RECV(H, no*ns, MPI_REAL, MASTER, mtype, |
222 |
|
|
> MPI_COMM_WORLD, status, ierr) |
223 |
|
|
call MPI_RECV(K, no*ns, MPI_REAL, MASTER, mtype, |
224 |
|
|
> MPI_COMM_WORLD, status, ierr) |
225 |
|
|
call MPI_RECV(A, num*ns, MPI_REAL, MASTER, mtype, |
226 |
|
|
> MPI_COMM_WORLD, status, ierr) |
227 |
|
|
|
228 |
|
|
|
229 |
|
|
do j=1,num |
230 |
|
|
call random2(d,no) |
231 |
|
|
d=z+sqrt(varmes)*d-matmul(H,A(:,j)) |
232 |
|
|
call sposl(RR,no,no,d) |
233 |
|
|
A(:,j)=A(:,j)+matmul(d,K) |
234 |
|
|
enddo |
235 |
|
|
|
236 |
|
|
c*** send results back to master task |
237 |
|
|
mtype = FROM_WORKER |
238 |
|
|
call MPI_SEND(offset, 1, MPI_INTEGER, MASTER, mtype, |
239 |
|
|
> MPI_COMM_WORLD, ierr) |
240 |
|
|
call MPI_SEND(num, 1, MPI_INTEGER, MASTER, mtype, |
241 |
|
|
> MPI_COMM_WORLD, ierr) |
242 |
|
|
call MPI_SEND(A, num*ns, MPI_REAL, MASTER, mtype, |
243 |
|
|
> MPI_COMM_WORLD, ierr) |
244 |
|
|
|
245 |
|
|
endif ! end of SLAVE bits |
246 |
|
|
|
247 |
|
|
#else |
248 |
|
|
write(6,*) 'in correction bit' |
249 |
|
|
c*** correct each member of the ensemble |
250 |
|
|
do j=1,nrsamp |
251 |
|
|
|
252 |
|
|
call random2(d,no) |
253 |
|
|
d=z+sqrt(varmes)*d-matmul(H,A(:,j)) |
254 |
|
|
call sposl(RR,no,no,d) |
255 |
|
|
A(:,j)=A(:,j)+matmul(d,K) |
256 |
|
|
|
257 |
|
|
enddo !A has now been corrected by the data. |
258 |
|
|
|
259 |
|
|
write(6,*) 'end of correction bit' |
260 |
|
|
|
261 |
|
|
endif ! MASTER if |
262 |
|
|
|
263 |
|
|
#endif |
264 |
|
|
|
265 |
|
|
deallocate (K, RR, RRR) |
266 |
|
|
deallocate (transA, transtmp) |
267 |
|
|
deallocate (w, tmp) |
268 |
|
|
deallocate (d,pd,tmp2) |
269 |
|
|
|
270 |
|
|
return |
271 |
|
|
end |
272 |
|
|
|
273 |
|
|
|
274 |
|
|
|
275 |
|
|
|
276 |
|
|
|