#include "dims.h" ! module mpi_module ! ! Handle message-passing and related operations in ! distributed memory system, e.g., AIX. ! implicit none #ifdef MPI #include "mpif.h" integer :: | irstat(MPI_STATUS_SIZE) ! mpi receive status #endif #include "params.h" #include "fgcom.h" ! integer :: | ntask, ! number of mpi tasks | ntlats, ! number of latitudes for this task (lat1-lat0+1) | mytid ! task id of current mpi process integer,parameter :: mxtasks=zjmx integer :: | lats_need(4,0:mxtasks-1), ! bndry lats each task needs | lats_have(zjmx,0:mxtasks-1) ! lats each task has ! ! msg id's for dynamo dependency fields sigma1,sigma2,z,h,u,v,w integer :: msg_dynv(7)=(/1,2,3,4,5,6,7/) ! ! If not an mpi job, this module contains no subroutines. ! #ifdef MPI contains !----------------------------------------------------------------------- subroutine mp_init ! ! Initialize mpi, get ntask and mytid. Set lat0,lat1 for each task. ! This is called from main tgcm.F *before* input. ! ! Local: integer :: ier,n,j,i,jj ! call mpi_init(ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_init: ier=', | i4)") ier call mpi_comm_size(MPI_COMM_WORLD,ntask,ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_comm_size:', | ' ier=',i4)") ier call mpi_comm_rank(MPI_COMM_WORLD,mytid,ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_comm_rank:', | ' ier=',i4)") ier write(6,"('mp_init: ntask=',i3,' mytid=',i3)") ntask,mytid ! lats_have(:,:) = ispval lats_need(:,:) = ispval ! ! Set first and last latitude indices lat0,lat1 for parallel j loops. ! Define lats_need and lats_have for *all* tasks, then define lat0,lat1, ! and ntlats for current task. ! do n=0,ntask-1 call distribute_1d(1,zjmx,ntask,n,lat0,lat1) ntlats = lat1-lat0+1 jj = 0 do j=lat0,lat1 jj = jj+1 lats_have(jj,n) = j enddo ! ! Define boundary lats for each task: ! (this will include j -1, 0, zjmx+1, zjmx+2) lats_need(1,n) = lat0-2 lats_need(2,n) = lat0-1 lats_need(3,n) = lat1+1 lats_need(4,n) = lat1+2 ! write(6,"(/,'mp_init: for task n=',i2,' ntask=',i2,' lat0,1=', | 2i3,' ntlats=',i3,' lats_have=',/,(12i4))") | n,ntask,lat0,lat1,ntlats,(lats_have(i,n),i=1,ntlats) write(6,"('lats_need=',4i4)") lats_need(:,n) enddo ! ! Define lat0,lat1,ntlats for current task (for rest of program): call distribute_1d(1,zjmx,ntask,mytid,lat0,lat1) ntlats = lat1-lat0+1 ! write(6,"(/,'mp_init: for mytid=',i2,' lats_have=',/,(12i4))") | mytid,(lats_have(i,mytid),i=1,ntlats) write(6,"(9x,'lats_need=',4i4)") lats_need(:,mytid) end subroutine mp_init !----------------------------------------------------------------------- subroutine mp_sndfg(lat,ixt,idest) ! ! Send fg(:,:,lat,ixt) to task idest. ! !#include "fgcom.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to send | ixt, ! time index to receive | idest ! destination task ! ! Local: integer :: ier,ireqsend,msgtag ! msgtag = mytid+idest+lat call mpi_isend(fg(1,1,lat,ixt),zimxp*zfldx,MPI_REAL8,idest, | msgtag,MPI_COMM_WORLD,ireqsend,ier) if (ier==0) then ! write(6,"('mp_sndfg: lat=',i2,' ixt=',i2,' idest=',i2)") ! | lat,ixt,idest else write(6,"(/,'>>> Error return from mp_sndfg: ier=',i4, | ' lat=',i2,' ixt=',i2,' idest=',i2,/)") ier,lat,ixt,idest stop 'mp_sndfg' endif end subroutine mp_sndfg !----------------------------------------------------------------------- subroutine mp_rcvfg(lat,ixt) ! ! Receive a lat slice, defining fg(:,:,lat,ixt) ! !#include "fgcom.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to receive | ixt ! time index to receive ! ! Local: integer :: isrc,ier,ierwait,ireqrecv,msgtag ! ! Get source task id: isrc = itasklat(lat) ! msgtag = mytid+isrc+lat call mpi_irecv(fg(1,1,lat,ixt),zimxp*zfldx,MPI_REAL8,isrc, | msgtag,MPI_COMM_WORLD,ireqrecv,ier) call mpi_wait(ireqrecv,irstat,ierwait) if (ierwait /= 0) write(6,"('>>> mp_rcvfg: error', | ' from mpi_wait, receiving lat ',i3,' from task ', | i3,': ierwait=',i3,' ireqrecv=',i3,' msgtag=',i4)") | lat,isrc,ierwait,ireqrecv,msgtag if (ier==0) then ! write(6,"('mp_rcvfg: lat=',i2,' ixt=',i2,' mytid=',i2)") ! | lat,ixt,mytid else write(6,"(/,'>>> Error return from mp_rcvfg: ier=',i4, | ' lat=',i2,' ixt=',i2,/)") ier,lat,ixt stop 'mp_rcvfg' endif end subroutine mp_rcvfg !----------------------------------------------------------------------- subroutine mp_sndfsech(ifld,lat,idest) ! ! Send fsech(ifld)%data(:,lat,:) to task idest (2d nlon x nlev). ! (this is called by slaves, who send to master) ! use fields_module,only: fsech ! type(field) ! ! Args: integer,intent(in) :: | ifld, ! field index to send | lat, ! latitude index to send | idest ! destination task ! ! Local: integer :: ier,ireqsend,msgtag real :: tmp(zimxp,zkmxp),fmin,fmax ! msgtag = mytid+idest+lat+ifld tmp(:,:) = fsech(ifld)%data(:,lat,:) ! call fminmax(tmp,zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_sndfsech: ifld=',i2,' lat=',i2,' tmp fmin,max=', ! | 2e12.4,' msgtag=',i4)") ifld,lat,fmin,fmax,msgtag call mpi_send(tmp,zimxp*zkmxp,MPI_REAL8,idest,msgtag, | MPI_COMM_WORLD,ier) if (ier==0) then ! write(6,"('mp_sndfg: ifld=',i3,' lat=',i2,' idest=',i2)") ! | ifld,lat,idest else write(6,"(/,'>>> Error return from mp_sndfsech: ier=',i4, | ' ifld=',i3,' lat=',i2,' idest=',i2,/)") ier,ifld,lat,idest stop 'mp_sndfsech' endif end subroutine mp_sndfsech !----------------------------------------------------------------------- subroutine mp_rcvfsech(ifld,lat) ! ! Receive a lat slice, defining fsech(ifld)%data(:,lat,:) ! (this is called by master task to get slave slices before ! writing fsech to secondary history) ! use fields_module,only: fsech ! type(field) ! ! Args: integer,intent(in) :: | ifld, ! field index to receive | lat ! latitude index to receive ! ! Local: integer :: isrc,ier,ierwait,ireqrecv,msgtag real :: tmp(zimxp,zkmxp),fmin,fmax ! ! Get source task id: isrc = itasklat(lat) ! msgtag = mytid+isrc+lat+ifld call mpi_recv(tmp,zimxp*zkmxp,MPI_REAL8,isrc,msgtag, | MPI_COMM_WORLD,irstat,ier) if (ier==0) then ! write(6,"('mp_rcvfsech: ifld=',i3,' lat=',i2,' mytid=',i2)") ! | ifld,lat,mytid else write(6,"(/,'>>> Error return from mp_rcvfsech: ier=',i4, | ' ifld=',i3,' lat=',i2,/)") ier,ifld,lat stop 'mp_rcvfsech' endif ! ! Update fsech: ! call fminmax(tmp,zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_rcvfsech: ifld=',i2,' lat=',i2,' isrc=',i2, ! | ' tmp fmin,max=',2e12.4,' msgtag=',i4)") ifld,lat,isrc,fmin, ! | fmax,msgtag fsech(ifld)%data(:,lat,:) = tmp(:,:) end subroutine mp_rcvfsech !----------------------------------------------------------------------- subroutine mp_snddyn(lat,idest) ! ! Send fields needed for dynamo to task idest. ! These were defined in main latitude loop by lamdas, and are ! sent to the master by the slaves after the main latitude loop ! and before the dynamo. ! #include "dynamo.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to send | idest ! destination task ! ! Local: real :: ftmp(zimxp1,zkmx),ftmpkp(zimxp1,zkmxp) integer :: ier ! ! SIGMA1 (Hall conductivity) ftmp(:,:) = sigma1(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(1), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for SIGMA1: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! SIGMA2 (Pedersen conductivity) ftmp(:,:) = sigma2(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(2), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for SIGMA2: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! Z (geopotential) ftmpkp(:,:) = z(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(3), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for Z: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! H (scale height) ftmp(:,:) = h(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(4), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for H: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! U (eastward velocity) ftmp(:,:) = u(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(5), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for U: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! V (westward velocity) ftmp(:,:) = v(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(6), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for V: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! W (vertical velocity) ftmp(:,:) = w(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(7), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for W: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! write(6,"('mp_snddyn: mytid ',i2,' sent lat ',i2,' to idest ', ! | i2)") mytid,lat,idest end subroutine mp_snddyn !----------------------------------------------------------------------- subroutine mp_rcvdyn(lat) ! ! Receive fields needed for dynamo. ! These were defined in main latitude loop by lamdas, and are ! received by the master from the slaves before the master ! calls the dynamo. ! #include "dynamo.h" ! ! Args: integer,intent(in) :: | lat ! latitude index to receive ! ! Local: real :: ftmp(zimxp1,zkmx),ftmpkp(zimxp1,zkmxp) integer :: isrc,ier ! ! Get source task id: isrc = itasklat(lat) ! ! SIGMA1 (Hall conductivity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(1), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for SIGMA1: ier=',i3,' lat=',i3)") ier,lat sigma1(:,lat,:) = ftmp(:,:) ! ! SIGMA2 (Pedersen conductivity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(2), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for SIGMA2: ier=',i3,' lat=',i3)") ier,lat sigma2(:,lat,:) = ftmp(:,:) ! ! Z (geopotential) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(3), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for Z: ier=',i3,' lat=',i3)") ier,lat z(:,lat,:) = ftmpkp(:,:) ! ! H (scale height) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(4), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for Z: ier=',i3,' lat=',i3)") ier,lat h(:,lat,:) = ftmp(:,:) ! ! U (eastward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(5), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for U: ier=',i3,' lat=',i3)") ier,lat u(:,lat,:) = ftmp(:,:) ! ! V (westward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(6), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for V: ier=',i3,' lat=',i3)") ier,lat v(:,lat,:) = ftmp(:,:) ! ! W (westward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(7), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for W: ier=',i3,' lat=',i3)") ier,lat w(:,lat,:) = ftmp(:,:) ! ! write(6,"('mp_rcvdyn: mytid ',i2,' received lat ',i2)") mytid,lat end subroutine mp_rcvdyn !----------------------------------------------------------------------- subroutine mp_updatephi ! ! Dynamo has been calculated by master. Now send dynamo output ! field PHIM3D (mag coords) to the slaves: ! #include "dynphi.h" ! ! Local: integer :: n,msgtag,ier ! if (mytid==0) then ! master send to slaves do n=1,ntask-1 msgtag = 99+n call mpi_send(phim3d,imaxmp*jmaxm*(zkmxp+3),MPI_REAL8,n, | msgtag,MPI_COMM_WORLD,ier) if (ier /= 0) then write(6,"('>>> mp_updatephi: error sending', | ' phim3d from master to task ',i3,' msgtag=',i3)") | n,msgtag else ! write(6,"('mp_updatephi: master sent phim3d to task', ! | i3)") n endif enddo else ! slave receive from master msgtag = 99+mytid call mpi_recv(phim3d,imaxmp*jmaxm*(zkmxp+3),MPI_REAL8,0, | msgtag,MPI_COMM_WORLD,irstat,ier) if (ier /= 0) then write(6,"('>>> mp_updatephi: error receiving', | ' phim3d from master at task mytid=',i3,' msgtag=',i3)") | mytid,msgtag else ! write(6,"('mp_updatephi: task ',i3,' received phim3d ', ! | 'from master.')") mytid endif endif end subroutine mp_updatephi !----------------------------------------------------------------------- subroutine mp_close integer :: ier call mpi_finalize(ier) if (ier /= 0) then write(6,"(/,'>>> WARNING: error from mp_close: ier=',i3)") ier endif end subroutine mp_close !----------------------------------------------------------------------- integer function itasklat(lat) ! ! Return task id that is responsible for latitude index lat. ! (must be called after mp_distlat) ! ! Args: integer,intent(in) :: lat ! itasklat = (lat*ntask-1)/zjmx if (itasklat < 0 .or. itasklat > ntask) | write(6,"('>>> itasklat error: lat=',i2,' zjmx=',i2, | ' ntask=',i2,' itasklat=',i4)") lat,zjmx,ntask,itasklat end function itasklat !----------------------------------------------------------------------- subroutine distribute_1d(n1,n2,nprocs,myrank,istart,iend) ! ! Distribute work across a 1d vector(n1->n2) to nprocs. ! Return start and end indices for proc myrank. ! ! Args: integer,intent(in) :: n1,n2,nprocs,myrank integer,intent(out) :: istart,iend ! ! Local: integer :: lenproc,iremain,n ! n = n2-n1+1 lenproc = n/nprocs iremain = mod(n,nprocs) istart = n1 + myrank*lenproc + min(myrank,iremain) iend = istart+lenproc-1 if (iremain > myrank) iend = iend+1 ! end subroutine distribute_1d !----------------------------------------------------------------------- ! subroutine mp_distlat(lat0,lat1,mylats,bndlats) ! ! Distribute latitude indices across tasks. ! This is called after distribute_1d, which sets lat0,lat1. ! !#include "fgcom.h" ! ! Local: ! integer :: j,jj,n,i ! integer :: ier ! ! Define lats_have for *all* tasks: ! do n=0,ntask-1 ! jj = 0 ! do j=lat0,lat1 ! jj = jj+1 ! lats_have(jj,n) = j ! enddo ! ! Define boundary lats for each task: ! (this will include j -1, 0, zjmx+1, zjmx+2) ! lats_need(1,n) = lat0-2 ! lats_need(2,n) = lat0-1 ! lats_need(3,n) = lat1+1 ! lats_need(4,n) = lat1+2 ! ! write(6,"(/,'mp_distlat: n=',i2,' ntask=',i2,' lat0,1=', ! | 2i3,' lats_have=',/,(12i4))") ! | n,ntask,lat0,lat1,(lats_have(i,n),i=1,ntlats) ! write(6,"('lats_need=',4i4)") lats_need(:,n) ! ! enddo ! end subroutine mp_distlat !----------------------------------------------------------------------- subroutine mp_fgtomaster ! ! Master must collect all latitudes (except its own) into ! its fg-array prior to writing a history: ! !#include "fgcom.h" ! ! Local: integer :: jx ! if (mytid==0) then ! master: receive from slaves do jx=lat1+1,zjmx call mp_rcvfg(jx,ixtimec) enddo else ! slaves: send to master do jx=lat0,lat1 call mp_sndfg(jx,ixtimec,0) enddo endif ! write(6,"('mp_fgtomaster complete')") end subroutine mp_fgtomaster !----------------------------------------------------------------------- subroutine mp_sechtomaster ! ! Master must collect all latitudes (except its own) into its ! fsech%data arrays for diagnostics, prior to writing a secondary ! history. Any prognostics requested on secondary histories are ! already available in the fg-array (see mp_fgtomaster). ! use fields_module,only: fsech ! type(field) !#include "fgcom.h" ! ! Local: integer :: jx,i real :: fmin,fmax ! ! write(6,"('enter mp_sechtomaster: mytid=',i3, ! | ' lat0,1=',2i3)") mytid,lat0,lat1 ! if (mytid==0) then ! master: receive from slaves do jx=lat1+1,zjmx fields_loop1: do i=1,mxfsech if (.not.associated(fsech(i)%data).or. | fsech(i)%prognostic) cycle fields_loop1 call mp_rcvfsech(i,jx) ! call fminmax(fsech(i)%data(:,jx,:),zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_sechtomaster: jx=',i3,' i=',i2, ! | ' master receive ',a,' fmin,max=',2e12.4)") ! | jx,i,fsech(i)%name,fmin,fmax enddo fields_loop1 enddo else ! slaves: send to master do jx=lat0,lat1 fields_loop2: do i=1,mxfsech if (.not.associated(fsech(i)%data).or. | fsech(i)%prognostic) cycle fields_loop2 ! call fminmax(fsech(i)%data(:,jx,:),zimxp*zkmxp,fmin,fmax) call mp_sndfsech(i,jx,0) ! write(6,"('mp_sechtomaster: jx=',i3,' i=',i2, ! | ' slave send ',a,' fmin,max=',2e12.4)") ! | jx,i,fsech(i)%name,fmin,fmax enddo fields_loop2 enddo endif end subroutine mp_sechtomaster !----------------------------------------------------------------------- subroutine mp_bndlats(ixt) ! !#include "fgcom.h" ! ! Args: integer,intent(in) :: ixt ! ! Local: integer :: n,i,j,icount,itask,jneed,msgtag,ireqsend,ireqrecv, | ierwait,ier integer,external :: ixfind ! ! write(6,"(/,'mp_bndlats:')") ! do n=0,ntask-1 ! write(6,"(/,'task n=',i2,' ntask=',i2,' lats_have=',/, ! | (12i4))") n,ntask,(lats_have(i,n),i=1,ntlats) ! write(6,"('lats_need=',4i4)") lats_need(:,n) ! enddo ! write(6,"(' ')") ! ! Send any lats I have to tasks that need them: ! do j=lat0,lat1 ! loop through my latitudes do n=0,ntask-1 ! loop through other tasks if (n /= mytid) then ! not me itask = ixfind(lats_need(:,n),4,j,icount) if (itask > 0) then ! task n needs latitude j ! ! Send lat j to task n: ! msgtag = mytid+n+j call mpi_isend(fg(1,1,j,ixt),zimxp*zfldx,MPI_REAL8, | n,msgtag,MPI_COMM_WORLD,ireqsend,ier) if (ier /= 0) then write(6,"('>>> mp_bndlats: error sending lat ',i3, | ' to task ',i3,' ireqsend=',i3)") j,n,ireqsend else ! write(6,"('mp_bndlats: sent lat ',i3,' to task ', ! | i3,' msgtag=',i4)") j,n,msgtag endif endif endif enddo enddo ! ! Receive boundary lats I need from other tasks: ! do j=1,4 ! loop through lats I need jneed = lats_need(j,mytid) if (jneed >= 1 .and. jneed <= zjmx) then ! not -1,0,jmx+1,jmx+2 do n=0,ntask-1 ! loop through other tasks itask = ixfind(lats_have(:,n),zjmx,jneed,icount) if (itask > 0) then ! task n has lat which I need ! ! Receive lat jneed from task n: ! msgtag = mytid+n+jneed call mpi_irecv(fg(1,1,jneed,ixt),zimxp*zfldx, | MPI_REAL8,n,msgtag,MPI_COMM_WORLD,ireqrecv,ier) call mpi_wait(ireqrecv,irstat,ierwait) if (ierwait /= 0) write(6,"('>>> mp_bndlats: error', | ' from mpi_wait, receiving lat ',i3,' from task ', | i3,': ierwait=',i3,' ireqrecv=',i3,' msgtag=',i4)") | jneed,n,ierwait,ireqrecv,msgtag if (ier /= 0) then write(6,"('>>> mp_bndlats: error receiving lat ', | i3,' from task ',i3,' ireqrecv=',i3)") | jneed,n,ireqrecv else ! write(6,"('mp_bndlats: received lat ',i3,' from ', ! | 'task ',i3,' msgtag=',i4)") jneed,n,msgtag endif goto 100 endif enddo write(6,"('>>> WARNING mp_bndlats: could not find a task', | ' with lat ',i3,', which I need.')") jneed 100 continue endif enddo end subroutine mp_bndlats #endif end module mpi_module