#include "dims.h" ! module mpi_module ! ! Handle message-passing and related operations in ! distributed memory system, e.g., AIX. ! implicit none #ifdef MPI #include "mpif.h" integer :: | irstat(MPI_STATUS_SIZE) ! mpi receive status #endif #include "params.h" ! integer :: | ntask, ! number of mpi tasks | njptask, ! number of latitudes per task | mytid ! task id of current mpi process integer,parameter :: mxtasks=36 integer :: | lats_need(4,0:mxtasks-1), ! bndry lats each task needs | lats_have(zjmx,0:mxtasks-1) ! lats each task has ! ! msg id's for dynamo dependency fields sigma1,sigma2,z,h,u,v,w integer :: msg_dynv(18)=(/1,2,3,4,5,6,7,8,9,10,11,12, | 13,14,15,16,17,18/) ! ! If not an mpi job, this module contains no subroutines. ! #ifdef MPI contains !----------------------------------------------------------------------- subroutine mp_init ! ! Initialize mpi, get ntask and mytid. Set lat0,lat1 for each task. ! This is called from main tgcm.F *before* input. ! ! Local: integer :: ier,n,j,i ! call mpi_init(ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_init: ier=', | i4)") ier call mpi_comm_size(MPI_COMM_WORLD,ntask,ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_comm_size:', | ' ier=',i4)") ier call mpi_comm_rank(MPI_COMM_WORLD,mytid,ier) if (ier /= 0) write(6,"('>>> WARNING: error from mpi_comm_rank:', | ' ier=',i4)") ier write(6,"('mp_init: ntask=',i3,' mytid=',i3)") ntask,mytid ! lats_have(:,:) = ispval lats_need(:,:) = ispval ! ! Set first and last latitude indices lat0,lat1 for parallel j loops. ! mp_distlat also defines lat_sneed and lats_have for *all* tasks, ! and njptask (number of lats per task, zjmx/ntask) ! call mp_distlat ! write(6,"(/,'mp_init: lats_have=',/,(12i4))") | (lats_have(i,mytid),i=1,njptask) write(6,"(9x,'lats_need=',4i4)") lats_need(:,mytid) end subroutine mp_init !----------------------------------------------------------------------- subroutine mp_sndfg(lat,ixt,idest) ! ! Send fg(:,:,lat,ixt) to task idest. ! #include "fgcom.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to send | ixt, ! time index to receive | idest ! destination task ! ! Local: integer :: ier,ireqsend,msgtag ! msgtag = mytid+idest+lat call mpi_isend(fg(1,1,lat,ixt),zimxp*zfldx,MPI_REAL8,idest, | msgtag,MPI_COMM_WORLD,ireqsend,ier) if (ier==0) then ! write(6,"('mp_sndfg: lat=',i2,' ixt=',i2,' idest=',i2)") ! | lat,ixt,idest else write(6,"(/,'>>> Error return from mp_sndfg: ier=',i4, | ' lat=',i2,' ixt=',i2,' idest=',i2,/)") ier,lat,ixt,idest stop 'mp_sndfg' endif end subroutine mp_sndfg !----------------------------------------------------------------------- subroutine mp_rcvfg(lat,ixt) ! ! Receive a lat slice, defining fg(:,:,lat,ixt) ! #include "fgcom.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to receive | ixt ! time index to receive ! ! Local: integer :: isrc,ier,ierwait,ireqrecv,msgtag ! ! Get source task id: isrc = itasklat(lat) ! msgtag = mytid+isrc+lat call mpi_irecv(fg(1,1,lat,ixt),zimxp*zfldx,MPI_REAL8,isrc, | msgtag,MPI_COMM_WORLD,ireqrecv,ier) call mpi_wait(ireqrecv,irstat,ierwait) if (ierwait /= 0) write(6,"('>>> mp_rcvfg: error', | ' from mpi_wait, receiving lat ',i3,' from task ', | i3,': ierwait=',i3,' ireqrecv=',i3,' msgtag=',i4)") | lat,isrc,ierwait,ireqrecv,msgtag if (ier==0) then ! write(6,"('mp_rcvfg: lat=',i2,' ixt=',i2,' mytid=',i2)") ! | lat,ixt,mytid else write(6,"(/,'>>> Error return from mp_rcvfg: ier=',i4, | ' lat=',i2,' ixt=',i2,/)") ier,lat,ixt stop 'mp_rcvfg' endif end subroutine mp_rcvfg !----------------------------------------------------------------------- subroutine mp_sndfsech(ifld,lat,idest) ! ! Send fsech(ifld)%data(:,lat,:) to task idest (2d nlon x nlev). ! (this is called by slaves, who send to master) ! use fields_module,only: fsech ! type(field) ! ! Args: integer,intent(in) :: | ifld, ! field index to send | lat, ! latitude index to send | idest ! destination task ! ! Local: integer :: ier,ireqsend,msgtag real :: tmp(zimxp,zkmxp),fmin,fmax ! msgtag = mytid+idest+lat+ifld tmp(:,:) = fsech(ifld)%data(:,lat,:) ! call fminmax(tmp,zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_sndfsech: ifld=',i2,' lat=',i2,' tmp fmin,max=', ! | 2e12.4,' msgtag=',i4)") ifld,lat,fmin,fmax,msgtag call mpi_send(tmp,zimxp*zkmxp,MPI_REAL8,idest,msgtag, | MPI_COMM_WORLD,ier) if (ier==0) then ! write(6,"('mp_sndfg: ifld=',i3,' lat=',i2,' idest=',i2)") ! | ifld,lat,idest else write(6,"(/,'>>> Error return from mp_sndfsech: ier=',i4, | ' ifld=',i3,' lat=',i2,' idest=',i2,/)") ier,ifld,lat,idest stop 'mp_sndfsech' endif end subroutine mp_sndfsech !----------------------------------------------------------------------- subroutine mp_rcvfsech(ifld,lat) ! ! Receive a lat slice, defining fsech(ifld)%data(:,lat,:) ! (this is called by master task to get slave slices before ! writing fsech to secondary history) ! use fields_module,only: fsech ! type(field) ! ! Args: integer,intent(in) :: | ifld, ! field index to receive | lat ! latitude index to receive ! ! Local: integer :: isrc,ier,ierwait,ireqrecv,msgtag real :: tmp(zimxp,zkmxp),fmin,fmax ! ! Get source task id: isrc = itasklat(lat) ! msgtag = mytid+isrc+lat+ifld call mpi_recv(tmp,zimxp*zkmxp,MPI_REAL8,isrc,msgtag, | MPI_COMM_WORLD,irstat,ier) if (ier==0) then ! write(6,"('mp_rcvfsech: ifld=',i3,' lat=',i2,' mytid=',i2)") ! | ifld,lat,mytid else write(6,"(/,'>>> Error return from mp_rcvfsech: ier=',i4, | ' ifld=',i3,' lat=',i2,/)") ier,ifld,lat stop 'mp_rcvfsech' endif ! ! Update fsech: ! call fminmax(tmp,zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_rcvfsech: ifld=',i2,' lat=',i2,' isrc=',i2, ! | ' tmp fmin,max=',2e12.4,' msgtag=',i4)") ifld,lat,isrc,fmin, ! | fmax,msgtag fsech(ifld)%data(:,lat,:) = tmp(:,:) end subroutine mp_rcvfsech !----------------------------------------------------------------------- subroutine mp_snddyn(lat,idest) ! ! Send fields needed for dynamo to task idest. ! These were defined in main latitude loop by lamdas, and are ! sent to the master by the slaves after the main latitude loop ! and before the dynamo. ! #include "dynamo.h" #include "extrafields.h" ! ! Args: integer,intent(in) :: | lat, ! latitude index to send | idest ! destination task ! ! Local: real :: ftmp(zimxp1,zkmx),ftmpkp(zimxp1,zkmxp),ftmp1(zimxp1) integer :: ier ! ! SIGMA1 (Hall conductivity) ftmp(:,:) = sigma1(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(1), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for SIGMA1: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! SIGMA2 (Pedersen conductivity) ftmp(:,:) = sigma2(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(2), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for SIGMA2: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! Z (geopotential) ftmpkp(:,:) = z(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(3), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for Z: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! H (scale height) ftmp(:,:) = h(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(4), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for H: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! U (eastward velocity) ftmp(:,:) = u(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(5), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for U: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! V (westward velocity) ftmp(:,:) = v(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(6), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for V: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! W (vertical velocity) ftmp(:,:) = w(:,lat,:) call mpi_send(ftmp,zimxp1*zkmx,MPI_REAL8,idest,msg_dynv(7), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for W: ier=',i3,' lat=',i3,' idest=',i2)") ier,lat,idest ! ! famiev_sec (amie meridinal current component with height) ftmpkp(:,:) = famiev_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(8), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for famiev_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! fwindv_sec (wind meridinal current component with height) ftmpkp(:,:) = fwindv_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(9), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for fwindv_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! famiev (amie meridional current component) ftmp1(:) = famiev(:,lat) call mpi_send(ftmp1,zimxp1,MPI_REAL8,idest,msg_dynv(10), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for famiev: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! fwindv (wind meridinal current component) ftmp1(:) = fwindv(:,lat) call mpi_send(ftmp1,zimxp1,MPI_REAL8,idest,msg_dynv(11), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for fwindv: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! qamie_sec (amie Joule heating) ftmpkp(:,:) = qamie_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(12), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for qamie_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! qwind_sec (wind Joule heating) ftmpkp(:,:) = qwind_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(13), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for qwind_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! work_sec (mechanical work) ftmpkp(:,:) = work_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(14), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for work_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! wtot_sec (total pmechanical power) ftmpkp(:,:) = wtot_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(15), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for wtot_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! tec_sec (TEC) ftmpkp(:,:) = tec_sec(:,lat,:) call mpi_send(ftmpkp,zimxp1*zkmxp,MPI_REAL8,idest,msg_dynv(16), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for tec_sec: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! sigp (height-integrated Pedersen conductivity) ftmp1(:) = sigp(:,lat) call mpi_send(ftmp1,zimxp1,MPI_REAL8,idest,msg_dynv(17), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for sigp: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! sigh (height-integrated Hall conductivity) ftmp1(:) = sigh(:,lat) call mpi_send(ftmp1,zimxp1,MPI_REAL8,idest,msg_dynv(18), | MPI_COMM_WORLD,ier) if (ier /= 0) write(6,"('>>> mp_snddyn: error from mpi_send', | ' for sigh: ier=',i3,' lat=',i3,' idest=',i2)") | ier,lat,idest ! ! write(6,"('mp_snddyn: mytid ',i2,' sent lat ',i2,' to idest ', ! | i2)") mytid,lat,idest end subroutine mp_snddyn !----------------------------------------------------------------------- subroutine mp_rcvdyn(lat) ! ! Receive fields needed for dynamo. ! These were defined in main latitude loop by lamdas, and are ! received by the master from the slaves before the master ! calls the dynamo. ! #include "dynamo.h" #include "extrafields.h" ! ! Args: integer,intent(in) :: | lat ! latitude index to receive ! ! Local: real :: ftmp(zimxp1,zkmx),ftmpkp(zimxp1,zkmxp),ftmp1(zimxp1) integer :: isrc,ier ! ! Get source task id: isrc = itasklat(lat) ! ! SIGMA1 (Hall conductivity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(1), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for SIGMA1: ier=',i3,' lat=',i3)") ier,lat sigma1(:,lat,:) = ftmp(:,:) ! ! SIGMA2 (Pedersen conductivity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(2), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for SIGMA2: ier=',i3,' lat=',i3)") ier,lat sigma2(:,lat,:) = ftmp(:,:) ! ! Z (geopotential) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(3), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for Z: ier=',i3,' lat=',i3)") ier,lat z(:,lat,:) = ftmpkp(:,:) ! ! H (scale height) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(4), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for Z: ier=',i3,' lat=',i3)") ier,lat h(:,lat,:) = ftmp(:,:) ! ! U (eastward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(5), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for U: ier=',i3,' lat=',i3)") ier,lat u(:,lat,:) = ftmp(:,:) ! ! V (westward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(6), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for V: ier=',i3,' lat=',i3)") ier,lat v(:,lat,:) = ftmp(:,:) ! ! W (westward velocity) call mpi_recv(ftmp,zimxp1*zkmx,MPI_REAL8,isrc,msg_dynv(7), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for W: ier=',i3,' lat=',i3)") ier,lat w(:,lat,:) = ftmp(:,:) ! ! famiev_sec (amie meridinal current component with height) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(8), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for famiev_sec: ier=',i3,' lat=',i3)") ier,lat famiev_sec(:,lat,:) = ftmpkp(:,:) ! ! fwindv_sec (wind meridinal current component with height) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(9), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for fwindv_sec: ier=',i3,' lat=',i3)") ier,lat fwindv_sec(:,lat,:) = ftmpkp(:,:) ! ! famiev (amie meridinal current component) call mpi_recv(ftmp1,zimxp1,MPI_REAL8,isrc,msg_dynv(10), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for famiev: ier=',i3,' lat=',i3)") ier,lat famiev(:,lat) = ftmp1(:) ! write(6,"('mp_rcvdyn: lat=',i3,' famiev=',/,(6e12.4))") ! | lat,famiev(:,lat) ! ! fwindv (wind meridinal current component) call mpi_recv(ftmp1,zimxp1,MPI_REAL8,isrc,msg_dynv(11), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for fwindv: ier=',i3,' lat=',i3)") ier,lat fwindv(:,lat) = ftmp1(:) ! ! qamie_sec (amie Joule heating) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(12), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for qamie_sec: ier=',i3,' lat=',i3)") ier,lat qamie_sec(:,lat,:) = ftmpkp(:,:) ! ! qwind_sec (wind Joule heating) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(13), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for qwind_sec: ier=',i3,' lat=',i3)") ier,lat qwind_sec(:,lat,:) = ftmpkp(:,:) ! ! work_sec (mechanical work) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(14), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for work_sec: ier=',i3,' lat=',i3)") ier,lat work_sec(:,lat,:) = ftmpkp(:,:) ! ! wtot_sec (total mechanical work) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(15), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for wtot_sec: ier=',i3,' lat=',i3)") ier,lat wtot_sec(:,lat,:) = ftmpkp(:,:) ! ! tec_sec (TEC) call mpi_recv(ftmpkp,zimxp1*zkmxp,MPI_REAL8,isrc,msg_dynv(16), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for tec_sec: ier=',i3,' lat=',i3)") ier,lat tec_sec(:,lat,:) = ftmpkp(:,:) ! ! sigp (height-integrated Pedersen conductivity) call mpi_recv(ftmp1,zimxp1,MPI_REAL8,isrc,msg_dynv(17), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for sigp: ier=',i3,' lat=',i3)") ier,lat sigp(:,lat) = ftmp1(:) ! ! sigh (height-integrated Hall conductivity) call mpi_recv(ftmp1,zimxp1,MPI_REAL8,isrc,msg_dynv(18), | MPI_COMM_WORLD,irstat,ier) if (ier /= 0) write(6,"('>>> mp_rcvdyn: error from mpi_recv', | ' for sigh: ier=',i3,' lat=',i3)") ier,lat sigh(:,lat) = ftmp1(:) ! ! write(6,"('mp_rcvdyn: mytid ',i2,' received lat ',i2)") mytid,lat end subroutine mp_rcvdyn !----------------------------------------------------------------------- subroutine mp_updatephi ! ! Dynamo has been calculated by master. Now send dynamo output ! field PHIM3D (mag coords) to the slaves: ! #include "dynphi.h" ! ! Local: integer :: n,msgtag,ier ! if (mytid==0) then ! master send to slaves do n=1,ntask-1 msgtag = 99+n call mpi_send(phim3d,imaxmp*jmaxm*(zkmxp+3),MPI_REAL8,n, | msgtag,MPI_COMM_WORLD,ier) if (ier /= 0) then write(6,"('>>> mp_updatephi: error sending', | ' phim3d from master to task ',i3,' msgtag=',i3)") | n,msgtag else ! write(6,"('mp_updatephi: master sent phim3d to task', ! | i3)") n endif enddo else ! slave receive from master msgtag = 99+mytid call mpi_recv(phim3d,imaxmp*jmaxm*(zkmxp+3),MPI_REAL8,0, | msgtag,MPI_COMM_WORLD,irstat,ier) if (ier /= 0) then write(6,"('>>> mp_updatephi: error receiving', | ' phim3d from master at task mytid=',i3,' msgtag=',i3)") | mytid,msgtag else ! write(6,"('mp_updatephi: task ',i3,' received phim3d ', ! | 'from master.')") mytid endif endif end subroutine mp_updatephi !----------------------------------------------------------------------- subroutine mp_close integer :: ier call mpi_finalize(ier) if (ier /= 0) then write(6,"(/,'>>> WARNING: error from mp_close: ier=',i3)") ier endif end subroutine mp_close !----------------------------------------------------------------------- integer function itasklat(lat) ! ! Return task id that is responsible for latitude index lat. ! (must be called after mp_distlat) ! ! Args: integer,intent(in) :: lat ! itasklat = (lat*ntask-1)/zjmx if (itasklat < 0 .or. itasklat > ntask) | write(6,"('>>> itasklat error: lat=',i2,' zjmx=',i2, | ' ntask=',i2,' itasklat=',i4)") lat,zjmx,ntask,itasklat end function itasklat !----------------------------------------------------------------------- subroutine mp_distlat ! ! Distribute latitude indices across tasks. ! Ntasks must be defined, and divide evenly into the number ! of latitudes. This sets lat0,lat1 (fgcom.h) for the main ! latitude loop in advnce.f. ! #include "fgcom.h" ! ! Local: integer :: j,jj,n,i,ilat0,ilat1 integer :: ier ! ! Number of tasks must divide evenly into number of latitudes: if (mod(zjmx,ntask) /= 0) then write(6,"(/,'>>> mp_distlat: mod(zjmx,ntask)=',i2, | ' (must be 0)')") mod(zjmx,ntask) write(6,"(' zjmx=',i3,' ntask=',i3,/)") zjmx,ntask stop 'distlat' endif ! ! njptask = number of latitudes per task: ! (mod(zjmx,ntask) is forced to 0 above) njptask = zjmx/ntask ! ! Define lats_have for *all* tasks: do n=0,ntask-1 ilat0 = n * njptask + 1 ilat1 = ilat0 + njptask - 1 jj = 0 do j=ilat0,ilat1 jj = jj+1 lats_have(jj,n) = j enddo ! ! Define boundary lats for each task: ! (this will include j -1, 0, zjmx+1, zjmx+2) lats_need(1,n) = ilat0-2 lats_need(2,n) = ilat0-1 lats_need(3,n) = ilat1+1 lats_need(4,n) = ilat1+2 ! ! write(6,"(/,'mp_distlat: n=',i2,' ntask=',i2,' lat0,1=', ! | 2i3,' lats_have=',/,(12i4))") ! | n,ntask,ilat0,ilat1,(lats_have(i,n),i=1,njptask) ! write(6,"('lats_need=',4i4)") lats_need(:,n) ! enddo ! ! Set first and last latitude indices for *my* task for use ! in advnce (lat0 and lat1 are in common in fgcom.h) ! lat0 = mytid * njptask + 1 lat1 = lat0 + njptask - 1 ! end subroutine mp_distlat !----------------------------------------------------------------------- subroutine mp_fgtomaster ! ! Master must collect all latitudes (except its own) into ! its fg-array prior to writing a history: ! #include "fgcom.h" ! ! Local: integer :: jx ! if (mytid==0) then ! master: receive from slaves do jx=lat1+1,zjmx call mp_rcvfg(jx,ixtimec) enddo else ! slaves: send to master do jx=lat0,lat1 call mp_sndfg(jx,ixtimec,0) enddo endif ! write(6,"('mp_fgtomaster complete')") end subroutine mp_fgtomaster !----------------------------------------------------------------------- subroutine mp_sechtomaster ! ! Master must collect all latitudes (except its own) into its ! fsech%data arrays for diagnostics, prior to writing a secondary ! history. Any prognostics requested on secondary histories are ! already available in the fg-array (see mp_fgtomaster). ! use fields_module,only: fsech ! type(field) #include "fgcom.h" ! ! Local: integer :: jx,i real :: fmin,fmax ! ! write(6,"('enter mp_sechtomaster: mytid=',i3, ! | ' lat0,1=',2i3)") mytid,lat0,lat1 ! if (mytid==0) then ! master: receive from slaves do jx=lat1+1,zjmx fields_loop1: do i=1,mxfsech if (.not.associated(fsech(i)%data).or. | fsech(i)%prognostic) cycle fields_loop1 call mp_rcvfsech(i,jx) ! call fminmax(fsech(i)%data(:,jx,:),zimxp*zkmxp,fmin,fmax) ! write(6,"('mp_sechtomaster: jx=',i3,' i=',i2, ! | ' master receive ',a,' fmin,max=',2e12.4)") ! | jx,i,fsech(i)%name,fmin,fmax enddo fields_loop1 enddo else ! slaves: send to master do jx=lat0,lat1 fields_loop2: do i=1,mxfsech if (.not.associated(fsech(i)%data).or. | fsech(i)%prognostic) cycle fields_loop2 ! call fminmax(fsech(i)%data(:,jx,:),zimxp*zkmxp,fmin,fmax) call mp_sndfsech(i,jx,0) ! write(6,"('mp_sechtomaster: jx=',i3,' i=',i2, ! | ' slave send ',a,' fmin,max=',2e12.4)") ! | jx,i,fsech(i)%name,fmin,fmax enddo fields_loop2 enddo endif end subroutine mp_sechtomaster !----------------------------------------------------------------------- subroutine mp_bndlats(ixt) ! #include "fgcom.h" ! ! Args: integer,intent(in) :: ixt ! ! Local: integer :: n,i,j,icount,itask,jneed,msgtag,ireqsend,ireqrecv, | ierwait,ier integer,external :: ixfind ! ! write(6,"(/,'mp_bndlats:')") ! do n=0,ntask-1 ! write(6,"(/,'task n=',i2,' ntask=',i2,' lats_have=',/, ! | (12i4))") n,ntask,(lats_have(i,n),i=1,njptask) ! write(6,"('lats_need=',4i4)") lats_need(:,n) ! enddo ! write(6,"(' ')") ! ! Send any lats I have to tasks that need them: ! do j=lat0,lat1 ! loop through my latitudes do n=0,ntask-1 ! loop through other tasks if (n /= mytid) then ! not me itask = ixfind(lats_need(:,n),4,j,icount) if (itask > 0) then ! task n needs latitude j ! ! Send lat j to task n: ! msgtag = mytid+n+j call mpi_isend(fg(1,1,j,ixt),zimxp*zfldx,MPI_REAL8, | n,msgtag,MPI_COMM_WORLD,ireqsend,ier) if (ier /= 0) then write(6,"('>>> mp_bndlats: error sending lat ',i3, | ' to task ',i3,' ireqsend=',i3)") j,n,ireqsend else ! write(6,"('mp_bndlats: sent lat ',i3,' to task ', ! | i3,' msgtag=',i4)") j,n,msgtag endif endif endif enddo enddo ! ! Receive boundary lats I need from other tasks: ! do j=1,4 ! loop through lats I need jneed = lats_need(j,mytid) if (jneed >= 1 .and. jneed <= zjmx) then ! not -1,0,jmx+1,jmx+2 do n=0,ntask-1 ! loop through other tasks itask = ixfind(lats_have(:,n),zjmx,jneed,icount) if (itask > 0) then ! task n has lat which I need ! ! Receive lat jneed from task n: ! msgtag = mytid+n+jneed call mpi_irecv(fg(1,1,jneed,ixt),zimxp*zfldx, | MPI_REAL8,n,msgtag,MPI_COMM_WORLD,ireqrecv,ier) call mpi_wait(ireqrecv,irstat,ierwait) if (ierwait /= 0) write(6,"('>>> mp_bndlats: error', | ' from mpi_wait, receiving lat ',i3,' from task ', | i3,': ierwait=',i3,' ireqrecv=',i3,' msgtag=',i4)") | jneed,n,ierwait,ireqrecv,msgtag if (ier /= 0) then write(6,"('>>> mp_bndlats: error receiving lat ', | i3,' from task ',i3,' ireqrecv=',i3)") | jneed,n,ireqrecv else ! write(6,"('mp_bndlats: received lat ',i3,' from ', ! | 'task ',i3,' msgtag=',i4)") jneed,n,msgtag endif goto 100 endif enddo write(6,"('>>> WARNING mp_bndlats: could not find a task', | ' with lat ',i3,', which I need.')") jneed 100 continue endif enddo end subroutine mp_bndlats #endif end module mpi_module