MOAB: Mesh Oriented datABase  (version 5.4.1)
imoab_coupler_fortran.F90
Go to the documentation of this file.
00001 !      This program shows how to do an imoab_coupler type test in fortran 90
00002 !      the atm and ocean files are read from repo, meshes are migrated to coupler pes,
00003 !      the intx is caried on, weight generation,; tag migration and projection,
00004 !      migrate back to ocean pes, and compare against baseline1.txt
00005 !      cannot change source/target files, or the layout, just test the imoab
00006 !       calls, that need to give the same results as C=++ equivalent test (imoab_coupler)
00007 !          between atm SE and ocn FV
00008 
00009 SUBROUTINE errorout(ierr, message)
00010    integer ierr
00011    character*(*) message
00012    if (ierr .ne. 0) then
00013       print *, message
00014       call exit(1)
00015    end if
00016    return
00017 end
00018 !
00019 #include "moab/MOABConfig.h"
00020 
00021 #ifndef MOAB_MESH_DIR
00022 #error Specify MOAB_MESH_DIR path
00023 #endif
00024 
00025 program imoab_coupler_fortran
00026 
00027    use iso_c_binding
00028    use iMOAB
00029 
00030 #include "mpif.h"
00031 #include "moab/MOABConfig.h"
00032    !implicit none
00033    integer :: m ! for number of arguments ; if less than 1, exit
00034    integer :: global_comm
00035    integer :: rankInGlobalComm, numProcesses
00036    integer :: ierr
00037    integer :: my_id, num_procs
00038    integer :: jgroup   ! group for global comm
00039    character(:), allocatable :: atmFileName
00040    character(:), allocatable :: ocnFileName
00041    character(:), allocatable :: baselineFileName
00042    character(:), allocatable :: readopts, fileWriteOptions
00043    character :: appname*128
00044    character(:), allocatable :: weights_identifier1
00045    character(:), allocatable :: disc_methods1, disc_methods2, dof_tag_names1, dof_tag_names2
00046    integer :: disc_orders1, disc_orders2
00047    character(:), allocatable :: atmocn_map_file_name, intx_from_file_identifier
00048 
00049    ! all groups and comms are on the same number of processes, for simplicity
00050    integer :: atmGroup, atmComm, ocnGroup, ocnComm, cplGroup, cplComm
00051    integer :: atmCouComm, ocnCouComm
00052 
00053    integer :: cmpatm, cplatm, cmpocn, cplocn, atmocnid ! global IDs
00054    integer :: cmpAtmPID, cplAtmPID ! iMOAB app ids
00055    integer :: cmpOcnPID, cplOcnPID ! iMOAB app ids
00056    integer :: cplAtmOcnPID ! intx pid
00057    integer :: nghlay, partScheme, context_id
00058    integer :: fNoBubble, fMonotoneTypeID, fVolumetric, fNoConserve, fValidate, fInverseDistanceMap
00059 
00060    integer, dimension(2) ::  tagIndex
00061    integer, dimension (2) :: tagTypes!  { DENSE_DOUBLE, DENSE_DOUBLE }
00062    integer :: atmCompNDoFs ! = disc_orders[0] * disc_orders[0],
00063    integer :: ocnCompNDoFs !  = 1 /*FV*/
00064    character(:), allocatable :: bottomFields, bottomProjectedFields
00065    integer, dimension(3) ::  nverts, nelem, nblocks, nsbc, ndbc
00066    double precision, allocatable :: vals(:) ! to set the double values to 0
00067    integer :: i ! for loops
00068    integer :: storLeng, eetype ! for tags defs
00069    character(:), allocatable :: concat_fieldname, concat_fieldnameT, outputFileOcn
00070    integer :: tagIndexIn2 ! not really needed
00071    integer :: dummyCpl, dummyRC, dummyType
00072 
00073    cmpatm = 5
00074    cplatm = 6
00075    cmpocn = 17
00076    cplocn = 18
00077    atmocnid = 618
00078 
00079    call mpi_init(ierr)
00080    call errorout(ierr,'mpi_init')
00081    call mpi_comm_rank (MPI_COMM_WORLD, my_id, ierr)
00082    call errorout(ierr, 'fail to get MPI rank')
00083 
00084    call mpi_comm_size (MPI_COMM_WORLD, num_procs, ierr)
00085    call errorout(ierr, 'fail to get MPI size')
00086    call mpi_comm_dup(MPI_COMM_WORLD, global_comm, ierr)
00087    call errorout(ierr, 'fail to get global comm duplicate')
00088 
00089    call MPI_Comm_group( global_comm, jgroup, ierr );  !  all processes in jgroup
00090    call errorout(ierr, 'fail to get joint group')
00091    ! readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" )
00092    ! readoptsLnd( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" )
00093    atmFileName = &
00094      MOAB_MESH_DIR &
00095      //'unittest/wholeATM_T.h5m'//C_NULL_CHAR
00096    ocnFileName = &
00097      MOAB_MESH_DIR &
00098      //'unittest/recMeshOcn.h5m'//C_NULL_CHAR
00099    baselineFileName = &
00100      MOAB_MESH_DIR &
00101      //'unittest/baseline1.txt'//C_NULL_CHAR
00102 
00103    ! all comms span the whole world, for simplicity
00104    atmComm = MPI_COMM_NULL
00105    ocnComm = MPI_COMM_NULL
00106    cplComm = MPI_COMM_NULL
00107    atmCouComm = MPI_COMM_NULL
00108    ocnCouComm = MPI_COMM_NULL
00109    call mpi_comm_dup(global_comm, atmComm, ierr)
00110    call mpi_comm_dup(global_comm, ocnComm, ierr)
00111    call mpi_comm_dup(global_comm, cplComm, ierr)
00112    call mpi_comm_dup(global_comm, atmCouComm, ierr)
00113    call mpi_comm_dup(global_comm, ocnCouComm, ierr)
00114 
00115    ! all groups of interest are easy breezy
00116    call MPI_Comm_group( atmComm, atmGroup, ierr )
00117    call MPI_Comm_group( ocnComm, ocnGroup, ierr )
00118    call MPI_Comm_group( cplComm, cplGroup, ierr )
00119 
00120    readopts ='PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS'//C_NULL_CHAR
00121    nghlay = 0 ! no ghost layers
00122 #ifdef MOAB_HAVE_ZOLTAN
00123    partScheme = 2  ! RCB with zoltan
00124 #else
00125    partScheme = 0  ! Trivial partitioner
00126 #endif
00127 
00128    if (my_id .eq. 0) then
00129       print *, ' number of tasks: ', num_procs
00130       print *, ' Atm file: ', atmFileName
00131       print *, ' Ocn file: ', ocnFileName
00132       print *, ' baseline file: ', baselineFileName
00133       print *, ' using partitioner: ', partScheme
00134    end if
00135 
00136    ierr = iMOAB_Initialize()
00137    appname = 'ATM'//C_NULL_CHAR
00138    ierr = iMOAB_RegisterApplication(appname, atmComm, cmpatm, cmpatmPid)
00139    appname = 'ATMX'//C_NULL_CHAR
00140    ierr = iMOAB_RegisterApplication(appname, cplComm, cplatm, cplatmPid)
00141    appname = 'OCN'//C_NULL_CHAR
00142    ierr = iMOAB_RegisterApplication(appname, ocnComm, cmpocn, cmpocnPid)
00143    appname = 'OCNX'//C_NULL_CHAR
00144    ierr = iMOAB_RegisterApplication(appname, cplComm, cplocn, cplocnPid)
00145 
00146    appname = 'ATMOCN'//C_NULL_CHAR
00147    ierr = iMOAB_RegisterApplication(appname, cplComm, atmocnid, cplAtmOcnPID)
00148 
00149    ! read atm and migrate
00150    if (atmComm .NE. MPI_COMM_NULL) then
00151       ierr = iMOAB_LoadMesh(cmpatmPid, atmFileName, readopts, nghlay)
00152       call errorout(ierr, 'fail to load atm')
00153       ierr = iMOAB_SendMesh(cmpatmPid, atmCouComm, cplGroup, cplatm, partScheme)
00154       call errorout(ierr, 'fail to send atm')
00155    end if
00156    if (cplComm .NE. MPI_COMM_NULL) then
00157       ierr = iMOAB_ReceiveMesh(cplatmPid, atmCouComm, atmGroup, cmpatm) !
00158       call errorout(ierr, 'fail to receive atm')
00159    end if
00160 
00161    ! we can now free the sender buffers
00162    if (atmComm .NE. MPI_COMM_NULL) then
00163       context_id = cplatm
00164       ierr = iMOAB_FreeSenderBuffers(cmpatmPid, context_id)
00165       call errorout(ierr, 'fail to free atm buffers')
00166    end if
00167 
00168    ! read ocn and migrate
00169    if (ocnComm .NE. MPI_COMM_NULL) then
00170       ierr = iMOAB_LoadMesh(cmpocnPid, ocnFileName, readopts, nghlay)
00171       call errorout(ierr, 'fail to load ocn')
00172       ierr = iMOAB_SendMesh(cmpocnPid, ocnCouComm, cplGroup, cplocn, partScheme)
00173       call errorout(ierr, 'fail to send ocn')
00174    end if
00175    if (cplComm .NE. MPI_COMM_NULL) then
00176       ierr = iMOAB_ReceiveMesh(cplocnPid, ocnCouComm, ocnGroup, cmpocn) !
00177       call errorout(ierr, 'fail to receive ocn')
00178    end if
00179 
00180    ! we can now free the sender buffers
00181    if (ocnComm .NE. MPI_COMM_NULL) then
00182       context_id = cplocn
00183       ierr = iMOAB_FreeSenderBuffers(cmpocnPid, context_id)
00184       call errorout(ierr, 'fail to free ocn buffers')
00185    end if
00186 
00187    if (cplComm .NE. MPI_COMM_NULL) then
00188       ierr = iMOAB_ComputeMeshIntersectionOnSphere(cplAtmPID, cplOcnPID, cplAtmOcnPID)
00189       ! coverage mesh was computed here, for cplAtmPID, atm on coupler pes
00190       ! basically, atm was redistributed according to target (ocean) partition, to "cover" the
00191       !ocean partitions check if intx valid, write some h5m intx file
00192       call errorout(ierr, 'cannot compute intersection')
00193    end if
00194 
00195    if (atmCouComm .NE. MPI_COMM_NULL) then
00196       ! the new graph will be for sending data from atm comp to coverage mesh.
00197       ! it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00198       ! results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh
00199       ! after this, the sending of tags from atm pes to coupler pes will use the new par comm
00200       ! graph, that has more precise info about what to send for ocean cover ; every time, we
00201       ! will use the element global id, which should uniquely identify the element
00202       ierr = iMOAB_CoverageGraph(atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID, cmpatm, cplatm, cplocn) ! it happens over joint communicator
00203       call errorout(ierr, 'cannot recompute direct coverage graph for ocean')
00204    end if
00205 
00206    weights_identifier1 = 'scalar'//C_NULL_CHAR
00207    disc_methods1 = 'cgll'//C_NULL_CHAR
00208    disc_methods2 = 'fv'//C_NULL_CHAR
00209    disc_orders1 = 4
00210    disc_orders2 = 1
00211    dof_tag_names1 = 'GLOBAL_DOFS'//C_NULL_CHAR
00212    dof_tag_names2 = 'GLOBAL_ID'//C_NULL_CHAR
00213    ! fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0, fNoBubble = 1
00214    fNoBubble = 1
00215    fMonotoneTypeID = 0
00216    fVolumetric = 0
00217    fNoConserve = 0
00218    fValidate = 1
00219    fInverseDistanceMap = 0
00220 
00221    if (cplComm .NE. MPI_COMM_NULL) then
00222 
00223       ierr = iMOAB_ComputeScalarProjectionWeights( &
00224              cplAtmOcnPID, weights_identifier1, disc_methods1, disc_orders1, &
00225              disc_methods2, disc_orders2, fNoBubble, fMonotoneTypeID, fVolumetric, fInverseDistanceMap, fNoConserve, &
00226              fValidate, dof_tag_names1, dof_tag_names2)
00227       call errorout(ierr, 'cannot compute scalar projection weights')
00228 
00229 #ifdef MOAB_HAVE_NETCDF
00230       atmocn_map_file_name = 'atm_ocn_map_f.nc'//C_NULL_CHAR
00231       ierr = iMOAB_WriteMappingWeightsToFile( cplAtmOcnPID, weights_identifier1, atmocn_map_file_name)
00232       call errorout(ierr, 'failed to write map file to disk')
00233       intx_from_file_identifier = 'map-from-file'//C_NULL_CHAR
00234       dummyCpl = -1
00235       dummyRC = -1
00236       dummyType = 0
00237       ierr = iMOAB_LoadMappingWeightsFromFile( cplAtmOcnPID, dummyCpl, dummyRC, dummyType, &
00238          intx_from_file_identifier, atmocn_map_file_name)
00239       call errorout(ierr, 'failed to load map file from disk')
00240 #endif
00241    end if
00242 
00243    ! start copy
00244    tagTypes(1) = 1 ! somehow, DENSE_DOUBLE give 0, while it should be 1; maybe moab::DENSE_DOUBLE ?
00245    tagTypes(2) = 1 ! ! DENSE_DOUBLE
00246    atmCompNDoFs = disc_orders1*disc_orders1
00247    ocnCompNDoFs = 1 ! /*FV*/
00248 
00249    bottomFields = 'a2oTbot:a2oUbot:a2oVbot'//C_NULL_CHAR
00250    bottomProjectedFields = 'a2oTbot_proj:a2oUbot_proj:a2oVbot_proj'//C_NULL_CHAR
00251 
00252    if (cplComm .NE. MPI_COMM_NULL) then
00253       ierr = iMOAB_DefineTagStorage(cplAtmPID, bottomFields, tagTypes(1), atmCompNDoFs, tagIndex(1))
00254       call errorout(ierr, 'failed to define the field tags a2oTbot:a2oUbot:a2oVbot ')
00255       ierr = iMOAB_DefineTagStorage(cplOcnPID, bottomProjectedFields, tagTypes(2), ocnCompNDoFs, tagIndex(2))
00256       call errorout(ierr, 'failed to define the field tags a2oTbot_proj:a2oUbot_proj:a2oVbot_proj')
00257    end if
00258 
00259    ! make the tag 0, to check we are actually sending needed data
00260    if (cplAtmPID .ge. 0) then
00261 
00262       !  Each process in the communicator will have access to a local mesh instance, which
00263       !  will contain the original cells in the local partition and ghost entities. Number of
00264       !  vertices, primary cells, visible blocks, number of sidesets and nodesets boundary
00265       !  conditions will be returned in numProcesses 3 arrays, for local, ghost and total
00266       !  numbers.
00267 
00268       ierr = iMOAB_GetMeshInfo(cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc)
00269       call errorout(ierr, 'failed to get num primary elems')
00270       storLeng = nelem(3)*atmCompNDoFs*3 ! 3 tags
00271       allocate (vals(storLeng))
00272       eetype = 1 ! double type
00273 
00274       do i = 1, storLeng
00275          vals(:) = 0.
00276       end do
00277 
00278       ! set the tag values to 0.0
00279       ierr = iMOAB_SetDoubleTagStorage(cplAtmPID, bottomFields, storLeng, eetype, vals)
00280       call errorout(ierr, 'cannot make tag nul')
00281 
00282    end if
00283 
00284    ! Define the field variables to project
00285    concat_fieldname = 'a2oTbot:a2oUbot:a2oVbot'//C_NULL_CHAR
00286    concat_fieldnameT = 'a2oTbot_proj:a2oUbot_proj:a2oVbot_proj'//C_NULL_CHAR
00287 
00288    if (atmComm .NE. MPI_COMM_NULL) then
00289 
00290       ! As always, use nonblocking sends
00291       ! this is for projection to ocean:
00292       ierr = iMOAB_SendElementTag(cmpAtmPID, concat_fieldname, atmCouComm, cplocn)
00293       call errorout(ierr, 'cannot send tag values')
00294 
00295    end if
00296 
00297    if (cplComm .NE. MPI_COMM_NULL) then
00298       !// receive on atm on coupler pes, that was redistributed according to coverage
00299       ierr = iMOAB_ReceiveElementTag(cplAtmPID, concat_fieldname, atmCouComm, cplocn)
00300       call errorout(ierr, 'cannot receive tag values')
00301    end if
00302 
00303    ! we can now free the sender buffers
00304    if (atmComm .NE. MPI_COMM_NULL) then
00305 
00306       ierr = iMOAB_FreeSenderBuffers(cmpAtmPID, cplocn) !context is for ocean
00307       call errorout(ierr, 'cannot free buffers used to resend atm tag towards the coverage mesh')
00308 
00309    end if
00310    if (cplComm .ne. MPI_COMM_NULL) then
00311 
00312       outputFileOcn = "AtmOnCplF.h5m"//C_NULL_CHAR
00313       fileWriteOptions = 'PARALLEL=WRITE_PART'//C_NULL_CHAR
00314       ierr = iMOAB_WriteMesh(cplAtmPID, outputFileOcn, fileWriteOptions)
00315       call errorout(ierr, 'could not write AtmOnCpl.h5m to disk')
00316 
00317    end if
00318    if (cplComm .ne. MPI_COMM_NULL) then
00319 
00320       ! We have the remapping weights now. Let us apply the weights onto the tag we defined
00321       ! on the source mesh and get the projection on the target mesh
00322       ierr = iMOAB_ApplyScalarProjectionWeights(cplAtmOcnPID, weights_identifier1, concat_fieldname, &
00323                                                 concat_fieldnameT)
00324       call errorout(ierr, 'failed to compute projection weight application')
00325 
00326       outputFileOcn = "OcnOnCplF.h5m"//C_NULL_CHAR
00327       fileWriteOptions = 'PARALLEL=WRITE_PART'//C_NULL_CHAR
00328       ierr = iMOAB_WriteMesh(cplOcnPID, outputFileOcn, fileWriteOptions)
00329       call errorout(ierr, 'could not write OcnOnCpl.h5m to disk')
00330 
00331    end if
00332    ! send the projected tag back to ocean pes, with send/receive tag
00333    ! first makje sure the tags are defined, otherwise they cannot be received
00334    if (ocnComm .ne. MPI_COMM_NULL) then
00335 
00336       ierr = iMOAB_DefineTagStorage(cmpOcnPID, bottomProjectedFields, tagTypes(2), ocnCompNDoFs, tagIndexIn2)
00337       call errorout(ierr, 'failed to define the field tag for receiving back the tag a2oTbot_proj,  on ocn pes')
00338 
00339    end if
00340 
00341    !  send the tag to ocean pes, from ocean mesh on coupler pes
00342    !  from couComm, using common joint comm ocn_coupler
00343    !  as always, use nonblocking sends
00344    !  original graph (context is -1_
00345    if (cplComm .ne. MPI_COMM_NULL) then
00346       context_id = cmpocn
00347       ierr = iMOAB_SendElementTag(cplOcnPID, concat_fieldnameT, ocnCouComm, context_id)
00348       call errorout(ierr, 'cannot send tag values back to ocean pes')
00349    end if
00350 
00351    if (ocnComm .ne. MPI_COMM_NULL) then
00352       context_id = cplocn
00353       ierr = iMOAB_ReceiveElementTag(cmpOcnPID, concat_fieldnameT, ocnCouComm, context_id)
00354       call errorout(ierr, 'cannot receive tag values from ocean mesh on coupler pes')
00355    end if
00356 
00357    if (cplComm .ne. MPI_COMM_NULL) then
00358       context_id = cmpocn
00359       ierr = iMOAB_FreeSenderBuffers(cplOcnPID, context_id)
00360       call errorout(ierr, 'cannot free sender buffers on coupler')
00361    end if
00362 
00363    if (ocnComm .ne. MPI_COMM_NULL) then
00364 
00365       outputFileOcn = "OcnWithProjF.h5m"//C_NULL_CHAR
00366       fileWriteOptions = 'PARALLEL=WRITE_PART'//C_NULL_CHAR
00367       if (my_id .eq. 0) then
00368          print *, ' Writing ocean mesh file with projected solution to disk: ', outputFileOcn
00369       end if
00370       ierr = iMOAB_WriteMesh(cmpOcnPID, outputFileOcn, fileWriteOptions)
00371       call errorout(ierr, 'could not write OcnWithProjF.h5m to disk')
00372 
00373    end if
00374 
00375    ! free up resources
00376    ierr = iMOAB_DeregisterApplication(cplAtmOcnPID)
00377    call errorout(ierr, 'could not de-register OCN component')
00378 
00379    ierr = iMOAB_DeregisterApplication(cplOcnPID)
00380    call errorout(ierr, 'could not de-register OCN component')
00381    ierr = iMOAB_DeregisterApplication(cmpOcnPID)
00382    call errorout(ierr, 'could not de-register OCN component')
00383    ierr = iMOAB_DeregisterApplication(cplAtmPID)
00384    call errorout(ierr, 'could not de-register OCN component')
00385    ierr = iMOAB_DeregisterApplication(cmpAtmPID)
00386    call errorout(ierr, 'could not de-register OCN component')
00387 
00388    ! Free all MPI datastructures
00389    call MPI_Comm_free(atmComm, ierr)
00390    call MPI_Group_free(atmGroup, ierr)
00391    call MPI_Comm_free(ocnComm, ierr)
00392    call MPI_Group_free(ocnGroup, ierr)
00393    call MPI_Comm_free(cplComm, ierr)
00394    call MPI_Group_free(cplGroup, ierr)
00395 
00396    call MPI_Comm_free(atmCouComm, ierr)
00397    call MPI_Comm_free(ocnCouComm, ierr)
00398    call MPI_Comm_free(global_comm, ierr)
00399    call MPI_Group_free(jgroup, ierr)
00400    call MPI_Finalize(ierr)
00401 
00402 end program imoab_coupler_fortran
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines