MOAB: Mesh Oriented datABase  (version 5.4.1)
imoab_apg2_ol_coupler.cpp
Go to the documentation of this file.
00001 /**
00002  * imoab_apg2_ol_coupler.cpp
00003  *
00004  *  This imoab_apg2_ol_coupler test will simulate coupling between 3 components
00005  *  meshes will be loaded from 3 files (atm phys + atm pg2, ocean, and land), and
00006  *  Atm and ocn will be migrated to coupler pes and compute intx on them
00007  *  then, intx will be performed between migrated meshes
00008  * and weights will be generated, such that a field from phys atm will be transferred to ocn
00009  * currently, the phys atm will send some data to be projected to ocean
00010  *  similar for land, but land will be intersected too with the atm pg2
00011  *  Land is defined by the domain mesh
00012  *
00013  * first, intersect atm and ocn, and recompute comm graph 1 between atm phys and atm_cx, for ocn intx
00014  * repeat the same for land; for atm/lnd coupling will use similar Fv -Fv maps; maybe later will identify some
00015  * equivalent to bilinear maps
00016  */
00017 
00018 #include "moab/Core.hpp"
00019 #ifndef MOAB_HAVE_MPI
00020 #error imoab coupler test requires MPI configuration
00021 #endif
00022 
00023 // MPI includes
00024 #include "moab_mpi.h"
00025 #include "moab/ParallelComm.hpp"
00026 #include "MBParallelConventions.h"
00027 
00028 #include "moab/iMOAB.h"
00029 #include "TestUtil.hpp"
00030 #include "moab/CpuTimer.hpp"
00031 #include "moab/ProgOptions.hpp"
00032 #include <iostream>
00033 #include <sstream>
00034 
00035 #include "imoab_coupler_utils.hpp"
00036 
00037 using namespace moab;
00038 
00039 // #define VERBOSE
00040 
00041 #ifndef MOAB_HAVE_TEMPESTREMAP
00042 #error The climate coupler test example requires MOAB configuration with TempestRemap
00043 #endif
00044 
00045 #define ENABLE_ATMOCN_COUPLING
00046 // for land coupling with phys atm, we do not have to "compute" intersection
00047 // it is enough to know that the ids are the same, we can project from atm to land that way, using a computed graph
00048 #define ENABLE_ATMLND_COUPLING
00049 
00050 int main( int argc, char* argv[] )
00051 {
00052     int ierr;
00053     int rankInGlobalComm, numProcesses;
00054     MPI_Group jgroup;
00055     std::string readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" );
00056     std::string readoptsPhysAtm( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" );
00057 
00058     /*if (argc < 2)
00059       return 0; // no test; we will force naming the land domain file*/
00060     // Timer data
00061     moab::CpuTimer timer;
00062     double timer_ops;
00063     std::string opName;
00064 
00065     int repartitioner_scheme = 0;
00066 #ifdef MOAB_HAVE_ZOLTAN
00067     repartitioner_scheme = 2;  // use the geometry partitioner in that case
00068 #endif
00069 
00070     MPI_Init( &argc, &argv );
00071     MPI_Comm_rank( MPI_COMM_WORLD, &rankInGlobalComm );
00072     MPI_Comm_size( MPI_COMM_WORLD, &numProcesses );
00073 
00074     MPI_Comm_group( MPI_COMM_WORLD, &jgroup );  // all processes in jgroup
00075 
00076     std::string atmFilename =
00077         "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/ne4pg2_p8.h5m";  // we should use only mesh from here
00078     std::string atmPhysMesh =
00079         "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/AtmPhys_pg2.h5m";  // it has some data associated to vertices, T_ph,
00080                                                                      // u_ph, v_ph
00081     // we will eventually project that data to ocean mesh, after intx atm/ocn
00082 
00083     // on a regular case,  5 ATM, 6 CPLATM (ATMX), 17 OCN     , 18 CPLOCN (OCNX)  ;
00084     // intx atm/ocn is not in e3sm yet, give a number
00085     //   6 * 100 + 18 = 618 : atmocnid
00086     //  18 * 100 + 6  = 1806 : ocnatmid on coupler pes!
00087     // 9 LND, 10 CPLLND
00088     //   6 * 100 + 10 = 610  atmlndid:
00089     //   10 * 100 + 6 = 1006  lndatmid: on coupler pes
00090     // cmpatm is for atm on atm pes
00091     // cmpocn is for ocean, on ocean pe
00092     // cplatm is for atm on coupler pes
00093     // cplocn is for ocean on coupler pes
00094     // atmocnid is for intx atm / ocn on coupler pes
00095     //
00096     int rankInAtmComm = -1;
00097     int cmpatm = 5, cplatm = 6;  // component ids are unique over all pes, and established in advance;
00098     int cmpPhysAtm = 105;        // different from atm spectral ?
00099 #ifdef ENABLE_ATMOCN_COUPLING
00100     std::string ocnFilename = TestDir + "unittest/recMeshOcn.h5m";
00101     int rankInOcnComm       = -1;
00102     int cmpocn = 17, cplocn = 18, atmocnid = 618,
00103         ocnatmid = 1806;  // component ids are unique over all pes, and established in advance;
00104 #endif
00105 #ifdef ENABLE_ATMLND_COUPLING
00106     std::string lndFilename = "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/land_p8.h5m";
00107     int rankInLndComm       = -1;
00108     int cpllnd = 10, cmplnd = 9, atmlndid = 610,
00109         lndatmid = 1006;  // component ids are unique over all pes, and established in advance;
00110 #endif
00111 
00112     int rankInCouComm = -1;
00113 
00114     int nghlay = 0;  // number of ghost layers for loading the file
00115     std::vector< int > groupTasks;
00116     int startG1 = 0, startG2 = 0, endG1 = numProcesses - 1, endG2 = numProcesses - 1, startG3 = startG1, endG3 = endG1;
00117     int startG4 = startG1, endG4 = endG1;  // these are for coupler layout
00118     int context_id = -1;                   // used now for freeing buffers
00119 
00120     // default: load atm on 2 proc, ocean on 2, land on 2; migrate to 2 procs, then compute intx
00121     // later, we need to compute weight matrix with tempestremap
00122 
00123     ProgOptions opts;
00124     opts.addOpt< std::string >( "atmosphere,t", "atm mesh filename (source)", &atmFilename );
00125 #ifdef ENABLE_ATMOCN_COUPLING
00126     opts.addOpt< std::string >( "ocean,m", "ocean mesh filename (target)", &ocnFilename );
00127 #endif
00128 
00129 #ifdef ENABLE_ATMLND_COUPLING
00130     opts.addOpt< std::string >( "land,l", "land mesh filename (target)", &lndFilename );
00131 #endif
00132 
00133     opts.addOpt< std::string >( "physgrid,q", "physics grid file", &atmPhysMesh );
00134 
00135     opts.addOpt< int >( "startAtm,a", "start task for atmosphere layout", &startG1 );
00136     opts.addOpt< int >( "endAtm,b", "end task for atmosphere layout", &endG1 );
00137 #ifdef ENABLE_ATMOCN_COUPLING
00138     opts.addOpt< int >( "startOcn,c", "start task for ocean layout", &startG2 );
00139     opts.addOpt< int >( "endOcn,d", "end task for ocean layout", &endG2 );
00140 #endif
00141 
00142 #ifdef ENABLE_ATMLND_COUPLING
00143     opts.addOpt< int >( "startLnd,e", "start task for land layout", &startG3 );
00144     opts.addOpt< int >( "endLnd,f", "end task for land layout", &endG3 );
00145 #endif
00146 
00147     opts.addOpt< int >( "startCoupler,g", "start task for coupler layout", &startG4 );
00148     opts.addOpt< int >( "endCoupler,j", "end task for coupler layout", &endG4 );
00149 
00150     opts.addOpt< int >( "partitioning,p", "partitioning option for migration", &repartitioner_scheme );
00151 
00152     opts.parseCommandLine( argc, argv );
00153 
00154     char fileWriteOptions[] = "PARALLEL=WRITE_PART";
00155 
00156     if( !rankInGlobalComm )
00157     {
00158         std::cout << " atm file: " << atmFilename << "\n   on tasks : " << startG1 << ":" << endG1 <<
00159 #ifdef ENABLE_ATMOCN_COUPLING
00160             "\n ocn file: " << ocnFilename << "\n     on tasks : " << startG2 << ":" << endG2 <<
00161 #endif
00162 #ifdef ENABLE_ATMLND_COUPLING
00163             "\n land file: " << lndFilename << "\n     on tasks : " << startG3 << ":" << endG3 <<
00164 #endif
00165 
00166             "\n atm phys file: " << atmPhysMesh << "\n     on tasks : " << startG1 << ":" << endG1 <<
00167 
00168             "\n  partitioning (0 trivial, 1 graph, 2 geometry) " << repartitioner_scheme << "\n  ";
00169     }
00170     // load files on 3 different communicators, groups
00171     // first groups has task 0, second group tasks 0 and 1
00172     // coupler will be on joint tasks, will be on a third group (0 and 1, again)
00173     MPI_Group atmPEGroup;
00174     MPI_Comm atmComm;
00175     ierr = create_group_and_comm( startG1, endG1, jgroup, &atmPEGroup, &atmComm );
00176     CHECKIERR( ierr, "Cannot create atm MPI group and communicator " )
00177 
00178 #ifdef ENABLE_ATMOCN_COUPLING
00179     MPI_Group ocnPEGroup;
00180     MPI_Comm ocnComm;
00181     ierr = create_group_and_comm( startG2, endG2, jgroup, &ocnPEGroup, &ocnComm );
00182     CHECKIERR( ierr, "Cannot create ocn MPI group and communicator " )
00183 #endif
00184 
00185 #ifdef ENABLE_ATMLND_COUPLING
00186     MPI_Group lndPEGroup;
00187     MPI_Comm lndComm;
00188     ierr = create_group_and_comm( startG3, endG3, jgroup, &lndPEGroup, &lndComm );
00189     CHECKIERR( ierr, "Cannot create lnd MPI group and communicator " )
00190 #endif
00191 
00192     // we will always have a coupler
00193     MPI_Group couPEGroup;
00194     MPI_Comm couComm;
00195     ierr = create_group_and_comm( startG4, endG4, jgroup, &couPEGroup, &couComm );
00196     CHECKIERR( ierr, "Cannot create cpl MPI group and communicator " )
00197 
00198     // now, create the joint communicators atm_coupler, ocn_coupler, lnd_coupler
00199     // for each, we will have to create the group first, then the communicator
00200 
00201     // atm_coupler
00202     MPI_Group joinAtmCouGroup;
00203     MPI_Comm atmCouComm;
00204     ierr = create_joint_comm_group( atmPEGroup, couPEGroup, &joinAtmCouGroup, &atmCouComm );
00205     CHECKIERR( ierr, "Cannot create joint atm cou communicator" )
00206 
00207 #ifdef ENABLE_ATMOCN_COUPLING
00208     // ocn_coupler
00209     MPI_Group joinOcnCouGroup;
00210     MPI_Comm ocnCouComm;
00211     ierr = create_joint_comm_group( ocnPEGroup, couPEGroup, &joinOcnCouGroup, &ocnCouComm );
00212     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00213 #endif
00214 
00215 #ifdef ENABLE_ATMLND_COUPLING
00216     // lnd_coupler
00217     MPI_Group joinLndCouGroup;
00218     MPI_Comm lndCouComm;
00219     ierr = create_joint_comm_group( lndPEGroup, couPEGroup, &joinLndCouGroup, &lndCouComm );
00220     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00221 #endif
00222 
00223     ierr = iMOAB_Initialize( argc, argv );  // not really needed anything from argc, argv, yet; maybe we should
00224     CHECKIERR( ierr, "Cannot initialize iMOAB" )
00225 
00226     int cmpAtmAppID       = -1;
00227     iMOAB_AppID cmpAtmPID = &cmpAtmAppID;  // atm
00228     int cplAtmAppID       = -1;            // -1 means it is not initialized
00229     iMOAB_AppID cplAtmPID = &cplAtmAppID;  // atm on coupler PEs
00230 #ifdef ENABLE_ATMOCN_COUPLING
00231     int cmpOcnAppID       = -1;
00232     iMOAB_AppID cmpOcnPID = &cmpOcnAppID;                            // ocn
00233     int cplOcnAppID = -1, cplAtmOcnAppID = -1, cplOcnAtmAppID = -1;  // -1 means it is not initialized
00234     iMOAB_AppID cplOcnPID    = &cplOcnAppID;                         // ocn on coupler PEs
00235     iMOAB_AppID cplAtmOcnPID = &cplAtmOcnAppID;                      // intx atm - ocn on coupler PEs
00236     iMOAB_AppID cplOcnAtmPID = &cplOcnAtmAppID;                      // intx ocn - atm on coupler PEs
00237 
00238 #endif
00239 
00240 #ifdef ENABLE_ATMLND_COUPLING
00241     int cmpLndAppID       = -1;
00242     iMOAB_AppID cmpLndPID = &cmpLndAppID;                            // lnd
00243     int cplLndAppID = -1, cplAtmLndAppID = -1, cplLndAtmAppID = -1;  // -1 means it is not initialized
00244     iMOAB_AppID cplLndPID    = &cplLndAppID;                         // land on coupler PEs
00245     iMOAB_AppID cplAtmLndPID = &cplAtmLndAppID;  // intx atm - lnd on coupler PEs will be similar to ocn atm intx
00246 
00247     iMOAB_AppID cplLndAtmPID = &cplLndAtmAppID;
00248 #endif
00249 
00250     int cmpPhysAtmID        = -1;
00251     iMOAB_AppID cmpPhAtmPID = &cmpPhysAtmID;  // phys atm; we do not need to move it to cpl!
00252 
00253     if( couComm != MPI_COMM_NULL )
00254     {
00255         MPI_Comm_rank( couComm, &rankInCouComm );
00256         // Register all the applications on the coupler PEs
00257         ierr = iMOAB_RegisterApplication( "ATMX", &couComm, &cplatm, cplAtmPID );  // atm on coupler pes
00258         CHECKIERR( ierr, "Cannot register ATM over coupler PEs" )
00259 #ifdef ENABLE_ATMOCN_COUPLING
00260         ierr = iMOAB_RegisterApplication( "OCNX", &couComm, &cplocn, cplOcnPID );  // ocn on coupler pes
00261         CHECKIERR( ierr, "Cannot register OCN over coupler PEs" )
00262 #endif
00263 
00264 #ifdef ENABLE_ATMLND_COUPLING
00265         ierr = iMOAB_RegisterApplication( "LNDX", &couComm, &cpllnd, cplLndPID );  // lnd on coupler pes
00266         CHECKIERR( ierr, "Cannot register LND over coupler PEs" )
00267 #endif
00268     }
00269 
00270     if( atmComm != MPI_COMM_NULL )
00271     {
00272         MPI_Comm_rank( atmComm, &rankInAtmComm );
00273         ierr = iMOAB_RegisterApplication( "ATM1", &atmComm, &cmpatm, cmpAtmPID );
00274         CHECKIERR( ierr, "Cannot register ATM App" )
00275     }
00276 
00277 #ifdef ENABLE_ATMOCN_COUPLING
00278     if( ocnComm != MPI_COMM_NULL )
00279     {
00280         MPI_Comm_rank( ocnComm, &rankInOcnComm );
00281         ierr = iMOAB_RegisterApplication( "OCN1", &ocnComm, &cmpocn, cmpOcnPID );
00282         CHECKIERR( ierr, "Cannot register OCN App" )
00283     }
00284 #endif
00285 
00286     // atm
00287     ierr =
00288         setup_component_coupler_meshes( cmpAtmPID, cmpatm, cplAtmPID, cplatm, &atmComm, &atmPEGroup, &couComm,
00289                                         &couPEGroup, &atmCouComm, atmFilename, readopts, nghlay, repartitioner_scheme );
00290     CHECKIERR( ierr, "Cannot load and migrate atm mesh" )
00291 
00292     MPI_Barrier( MPI_COMM_WORLD );
00293 
00294 #ifdef ENABLE_ATMOCN_COUPLING
00295     // ocean
00296     ierr =
00297         setup_component_coupler_meshes( cmpOcnPID, cmpocn, cplOcnPID, cplocn, &ocnComm, &ocnPEGroup, &couComm,
00298                                         &couPEGroup, &ocnCouComm, ocnFilename, readopts, nghlay, repartitioner_scheme );
00299     CHECKIERR( ierr, "Cannot load and migrate ocn mesh" )
00300 
00301     if( couComm != MPI_COMM_NULL )
00302     {
00303         char outputFileTgt3[] = "recvOcn3.h5m";
00304         PUSH_TIMER( "Write migrated OCN mesh on coupler PEs" )
00305         ierr = iMOAB_WriteMesh( cplOcnPID, outputFileTgt3, fileWriteOptions );
00306         CHECKIERR( ierr, "cannot write ocn mesh after receiving" )
00307         POP_TIMER( couComm, rankInCouComm )
00308     }
00309 #endif  // #ifdef ENABLE_ATMOCN_COUPLING
00310 
00311     MPI_Barrier( MPI_COMM_WORLD );
00312     // load phys atm mesh, with some data on it already
00313     if( atmComm != MPI_COMM_NULL )
00314     {
00315 
00316         ierr = iMOAB_RegisterApplication( "PhysAtm", &atmComm, &cmpPhysAtm, cmpPhAtmPID );
00317         CHECKIERR( ierr, "Cannot register Phys Atm App " )
00318 
00319         // load the next component mesh
00320         PUSH_TIMER( "Load Phys Atm  mesh" )
00321         ierr = iMOAB_LoadMesh( cmpPhAtmPID, atmPhysMesh.c_str(), readoptsPhysAtm.c_str(), &nghlay );
00322         CHECKIERR( ierr, "Cannot load Atm Phys  mesh on atm pes" )
00323         POP_TIMER( atmComm, rankInAtmComm )
00324 
00325         int nverts[3], nelem[3];
00326         ierr = iMOAB_GetMeshInfo( cmpPhAtmPID, nverts, nelem, 0, 0, 0 );
00327         CHECKIERR( ierr, "failed to get mesh info" );
00328         printf( "Phys Atm Component Mesh: %d vertices and %d elements\n", nverts[0], nelem[0] );
00329     }
00330 
00331     MPI_Barrier( MPI_COMM_WORLD );
00332 
00333 #ifdef ENABLE_ATMLND_COUPLING
00334     // land
00335     if( lndComm != MPI_COMM_NULL )
00336     {
00337         ierr = iMOAB_RegisterApplication( "LND1", &lndComm, &cmplnd, cmpLndPID );
00338         CHECKIERR( ierr, "Cannot register LND App " )
00339     }
00340     ierr = setup_component_coupler_meshes( cmpLndPID, cmplnd, cplLndPID, cpllnd, &lndComm, &lndPEGroup, &couComm,
00341                                            &couPEGroup, &lndCouComm, lndFilename, readoptsPhysAtm, nghlay,
00342                                            repartitioner_scheme );
00343 
00344     if( couComm != MPI_COMM_NULL )
00345     {  // write only for n==1 case
00346         char outputFileLnd[] = "recvLnd.h5m";
00347         ierr                 = iMOAB_WriteMesh( cplLndPID, outputFileLnd, fileWriteOptions );
00348         CHECKIERR( ierr, "cannot write lnd mesh after receiving" )
00349     }
00350 
00351 #endif  // #ifdef ENABLE_ATMLND_COUPLING
00352 
00353 #ifdef ENABLE_ATMOCN_COUPLING
00354     if( couComm != MPI_COMM_NULL )
00355     {
00356         // now compute intersection between OCNx and ATMx on coupler PEs
00357         ierr = iMOAB_RegisterApplication( "ATMOCN", &couComm, &atmocnid, cplAtmOcnPID );
00358         CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " )
00359 
00360         ierr = iMOAB_RegisterApplication( "OCNATM", &couComm, &ocnatmid, cplOcnAtmPID );
00361         CHECKIERR( ierr, "Cannot register atm_ocn intx over coupler pes " )
00362     }
00363 #endif
00364 
00365 #ifdef ENABLE_ATMLND_COUPLING
00366     if( couComm != MPI_COMM_NULL )
00367     {
00368         // now compute intersection between LNDx and ATMx on coupler PEs
00369         ierr = iMOAB_RegisterApplication( "ATMLND", &couComm, &atmlndid, cplAtmLndPID );
00370         CHECKIERR( ierr, "Cannot register atm_lnd intx over coupler pes " )
00371         // now compute intersection between ATMx and LNDx on coupler PEs
00372         ierr = iMOAB_RegisterApplication( "LNDATM", &couComm, &lndatmid, cplLndAtmPID );
00373         CHECKIERR( ierr, "Cannot register lnd_atm intx over coupler pes " )
00374     }
00375 #endif
00376 
00377     const char* weights_identifiers[2] = { "scalar", "scalar-pc" };
00378     int disc_orders[3]                 = { 4, 1, 1 };
00379     const char* disc_methods[3]        = { "cgll", "fv", "pcloud" };
00380     const char* dof_tag_names[3]       = { "GLOBAL_DOFS", "GLOBAL_ID", "GLOBAL_ID" };
00381 #ifdef ENABLE_ATMOCN_COUPLING
00382     if( couComm != MPI_COMM_NULL )
00383     {
00384         PUSH_TIMER( "Compute ATM-OCN mesh intersection" )
00385         ierr = iMOAB_ComputeMeshIntersectionOnSphere(
00386             cplAtmPID, cplOcnPID, cplAtmOcnPID );  // coverage mesh was computed here, for cplAtmPID, atm on coupler pes
00387         // basically, atm was redistributed according to target (ocean) partition, to "cover" the ocean partitions
00388         // check if intx valid, write some h5m intx file
00389         CHECKIERR( ierr, "cannot compute intersection" )
00390         POP_TIMER( couComm, rankInCouComm )
00391 
00392         PUSH_TIMER( "Compute OCN-ATM mesh intersection" )
00393         ierr =
00394             iMOAB_ComputeMeshIntersectionOnSphere( cplOcnPID, cplAtmPID, cplOcnAtmPID );  // coverage mesh was computed
00395         CHECKIERR( ierr, "cannot compute intersection" )
00396         POP_TIMER( couComm, rankInCouComm )
00397     }
00398 
00399     if( atmCouComm != MPI_COMM_NULL )
00400     {
00401         // the new graph will be for sending data from atm comp to coverage mesh;
00402         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00403         // results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh
00404         // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more
00405         // precise info about what to send for ocean cover ; every time, we will
00406         //  use the element global id, which should uniquely identify the element
00407         PUSH_TIMER( "Compute OCN coverage graph for ATM mesh" )
00408         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID, &cmpatm, &cplatm,
00409                                     &cplocn );  // it happens over joint communicator
00410         CHECKIERR( ierr, "cannot recompute direct coverage graph for ocean" )
00411         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00412     }
00413     if( ocnCouComm != MPI_COMM_NULL )
00414     {
00415         // now for the second intersection, ocn-atm; will be sending data from ocean to atm
00416         // Can we reuse the intx atm-ocn? Not sure yet; we will compute everything again :(
00417         // the new graph will be for sending data from ocn comp to coverage mesh over atm;
00418         // it involves initial ocn app; cmpOcnPID; also migrated ocn mesh on coupler pes, cplOcnPID
00419         // results are in cplOcnAtmPID, intx mesh; remapper also has some info about coverage mesh
00420         // after this, the sending of tags from ocn pes to coupler pes will use the new par comm graph, that has more
00421         // precise info about what to send for atm cover ; every time, we will
00422         //  use the element global id, which should uniquely identify the element
00423         PUSH_TIMER( "Compute ATM coverage graph for OCN mesh" )
00424         ierr = iMOAB_CoverageGraph( &ocnCouComm, cmpOcnPID, cplOcnPID, cplOcnAtmPID, &cmpocn, &cplocn,
00425                                     &cplatm );  // it happens over joint communicator, ocean + coupler
00426         CHECKIERR( ierr, "cannot recompute direct coverage graph for atm" )
00427         POP_TIMER( ocnCouComm, rankInOcnComm )  // hijack this rank
00428     }
00429 
00430     // need to compute graph between phys atm and atm/ocn intx coverage
00431     if( atmCouComm != MPI_COMM_NULL )
00432     {
00433         int typeA = 2;  // point cloud, phys mesh
00434         int typeB = 3;  // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ?
00435         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmOcnPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00436                                        &cmpPhysAtm, &atmocnid );
00437         CHECKIERR( ierr, "cannot compute graph between phys grid on atm and intx between FV atm and ocn" )
00438     }
00439 
00440     // also
00441     // need to compute graph between ocn/atm intx and phys atm mesh
00442     /*if( atmCouComm != MPI_COMM_NULL )
00443     {
00444         int typeA = 3;  // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ?
00445         int typeB = 2;  // point cloud, phys mesh
00446         ierr = iMOAB_ComputeCommGraph( cplOcnAtmPID, cmpPhAtmPID, &atmCouComm, &couPEGroup, &atmPEGroup, &typeA, &typeB,
00447                                        &ocnatmid, &cmpatm );
00448     }*/
00449     // need to compute graph between atm on coupler and phys atm mesh on component
00450     if( atmCouComm != MPI_COMM_NULL )
00451     {
00452         int typeA = 2;  // point cloud, phys mesh
00453         int typeB = 3;  // cells of atmosphere, dof based; need another type for ParCommGraph graphtype ?
00454         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00455                                        &cmpPhysAtm, &cplatm );
00456         CHECKIERR( ierr, "cannot compute graph between phys grid on atm and FV atm on coupler" )
00457     }
00458 #endif
00459 
00460 #ifdef ENABLE_ATMLND_COUPLING
00461     if( couComm != MPI_COMM_NULL )
00462     {
00463         PUSH_TIMER( "Compute ATM-LND mesh intersection" )
00464         ierr = iMOAB_ComputeMeshIntersectionOnSphere( cplAtmPID, cplLndPID, cplAtmLndPID );
00465         CHECKIERR( ierr, "failed to compute atm - land intx for mapping" );
00466         POP_TIMER( couComm, rankInCouComm )
00467 
00468         PUSH_TIMER( "Compute LND-ATM mesh intersection" )
00469         ierr =
00470             iMOAB_ComputeMeshIntersectionOnSphere( cplLndPID, cplAtmPID, cplLndAtmPID );  // coverage mesh was computed
00471         CHECKIERR( ierr, "cannot compute intersection" )
00472         POP_TIMER( couComm, rankInCouComm )
00473     }
00474     if( atmCouComm != MPI_COMM_NULL )
00475     {
00476         // the new graph will be for sending data from atm comp to coverage mesh for land mesh;
00477         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00478         // results are in cplAtmLndPID, intx mesh; remapper also has some info about coverage mesh
00479         // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more
00480         // precise info about what to send (specifically for land cover); every time,
00481         /// we will use the element global id, which should uniquely identify the element
00482         PUSH_TIMER( "Compute LND coverage graph for ATM mesh" )
00483         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmLndPID, &cmpatm, &cplatm,
00484                                     &cpllnd );  // it happens over joint communicator
00485         CHECKIERR( ierr, "cannot recompute direct coverage graph for land" )
00486         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00487     }
00488 
00489     // we will compute comm graph between atm phys and land, directly;
00490     // on the new TriGrid workflow, we do need intersection between atm and land;
00491     if( atmCouComm != MPI_COMM_NULL )
00492     {
00493         int typeA = 2;  // point cloud
00494         int typeB = 3;  // type 3 for land on coupler, based on global ids for land cells ?
00495         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmLndPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00496                                        &cmpPhysAtm, &atmlndid );
00497         CHECKIERR( ierr, "cannot compute comm graph between atm and atm/lnd intersection" )
00498     }
00499 
00500     // for reverse direction, lnd - atm intx:
00501     if( lndCouComm != MPI_COMM_NULL )
00502     {
00503         // now for the second intersection, lnd-atm; will be sending data from lnd to atm
00504         // Can we reuse the intx atm-lnd? Not sure yet; we will compute everything again :(
00505         // the new graph will be for sending data from lnd comp to coverage mesh over atm;
00506         // it involves initial lnd app; cmpLndPID; also migrated lnd mesh on coupler pes, cplLndPID
00507         // results are in cplLndAtmPID, intx mesh; remapper also has some info about coverage mesh
00508         // after this, the sending of tags from lnd pes to coupler pes will use the new par comm graph, that has more
00509         // precise info about what to send for atm cover ; every time, we will
00510         //  use the element global id, which should uniquely identify the element
00511         PUSH_TIMER( "Compute ATM coverage graph for LND mesh" )
00512         ierr = iMOAB_CoverageGraph( &lndCouComm, cmpLndPID, cplLndPID, cplLndAtmPID, &cmplnd, &cpllnd,
00513                                     &cplatm );  // it happens over joint communicator, ocean + coupler
00514         CHECKIERR( ierr, "cannot recompute direct coverage graph for atm for intx with land" )
00515         POP_TIMER( lndCouComm, rankInLndComm )
00516     }
00517 #endif
00518     MPI_Barrier( MPI_COMM_WORLD );
00519 
00520     int fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0, fNoBubble = 1, fInverseDistanceMap = 0;
00521 
00522 #ifdef ENABLE_ATMOCN_COUPLING
00523 #ifdef VERBOSE
00524     if( couComm != MPI_COMM_NULL )
00525     {
00526         char serialWriteOptions[] = "";  // for writing in serial
00527         std::stringstream outf;
00528         outf << "intxAtmOcn_" << rankInCouComm << ".h5m";
00529         std::string intxfile = outf.str();  // write in serial the intx file, for debugging
00530         ierr                 = iMOAB_WriteMesh( cplAtmOcnPID, intxfile.c_str(), serialWriteOptions );
00531         CHECKIERR( ierr, "cannot write intx file result" )
00532     }
00533 #endif
00534 
00535     if( couComm != MPI_COMM_NULL )
00536     {
00537         PUSH_TIMER( "Compute the projection weights with TempestRemap" )
00538         ierr = iMOAB_ComputeScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], disc_methods[1],
00539                                                      &disc_orders[1],                   // fv
00540                                                      disc_methods[1], &disc_orders[1],  // fv
00541                                                      &fNoBubble, &fMonotoneTypeID, &fVolumetric, &fInverseDistanceMap,
00542                                                      &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1] );
00543         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00544         POP_TIMER( couComm, rankInCouComm )
00545     }
00546 
00547     // now compute weight maps for ocn to atm mapping;
00548     if( couComm != MPI_COMM_NULL )
00549     {
00550         PUSH_TIMER( "Compute the projection weights with TempestRemap for ocn - atm map" )
00551         ierr = iMOAB_ComputeScalarProjectionWeights( cplOcnAtmPID, weights_identifiers[0], disc_methods[1],
00552                                                      &disc_orders[1],                   // fv
00553                                                      disc_methods[1], &disc_orders[1],  // fv
00554                                                      &fNoBubble, &fMonotoneTypeID, &fVolumetric, &fInverseDistanceMap,
00555                                                      &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1] );
00556         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00557         POP_TIMER( couComm, rankInCouComm )
00558     }
00559 
00560 #endif
00561 
00562     MPI_Barrier( MPI_COMM_WORLD );
00563 
00564     // we do not need to compute weights ? just send tags between dofs ?
00565 
00566 #ifdef ENABLE_ATMLND_COUPLING
00567     if( couComm != MPI_COMM_NULL )
00568     {
00569         // Compute the weights to project the solution from ATM component to LND component
00570         PUSH_TIMER( "Compute ATM-LND remapping weights" )
00571         ierr = iMOAB_ComputeScalarProjectionWeights( cplAtmLndPID, weights_identifiers[0], disc_methods[1],
00572                                                      &disc_orders[1], disc_methods[1], &disc_orders[1], &fNoBubble,
00573                                                      &fMonotoneTypeID, &fVolumetric, &fInverseDistanceMap, &fNoConserve,
00574                                                      &fValidate, dof_tag_names[1], dof_tag_names[1] );
00575         CHECKIERR( ierr, "failed to compute remapping projection weights for ATM-LND scalar non-conservative field" );
00576         POP_TIMER( couComm, rankInCouComm )
00577 
00578         // Compute the weights to project the solution from LND component to ATM component
00579         PUSH_TIMER( "Compute LND-ATM remapping weights" )
00580         ierr = iMOAB_ComputeScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], disc_methods[1],
00581                                                      &disc_orders[1], disc_methods[1], &disc_orders[1], &fNoBubble,
00582                                                      &fMonotoneTypeID, &fVolumetric, &fInverseDistanceMap, &fNoConserve,
00583                                                      &fValidate, dof_tag_names[1], dof_tag_names[1] );
00584         CHECKIERR( ierr, "failed to compute remapping projection weights for LND-ATM scalar non-conservative field" );
00585         POP_TIMER( couComm, rankInCouComm )
00586     }
00587 #endif
00588 
00589     int tagIndex[2];
00590     int tagTypes[2]  = { DENSE_DOUBLE, DENSE_DOUBLE };
00591     int atmCompNDoFs = 1 /* FV disc_orders[0]*disc_orders[0] */, ocnCompNDoFs = 1 /*FV*/;
00592 
00593     const char* bottomFields = "T_ph:u_ph:v_ph";  // same as on phys atm mesh
00594 
00595     const char* bottomProjectedFields = "T_proj:u_proj:v_proj";
00596 
00597     // coming from ocn to atm, project back from T_proj
00598     const char* bottomFields2 = "T2_ph:u2_ph:v2_ph";  // same as on phys atm mesh
00599 
00600     // coming from lnd to atm, project back from T_proj
00601     const char* bottomFields3 = "T3_ph:u3_ph:v3_ph";  // same as on phys atm mesh
00602 
00603     // tags on phys grid atm mesh
00604     const char* bottomPhProjectedFields = "Tph_proj:uph_proj:vph_proj";
00605 
00606     // tags on phys grid atm mesh
00607     const char* bottomPhLndProjectedFields =
00608         "TphL_proj:uphL_proj:vphL_proj";  // L and Lnd signify the fields projected from land
00609 
00610     if( couComm != MPI_COMM_NULL )
00611     {
00612         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomFields, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00613         CHECKIERR( ierr, "failed to define the field tags T_ph, u_ph, v_ph " );
00614 #ifdef ENABLE_ATMOCN_COUPLING
00615 
00616         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomProjectedFields, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00617         CHECKIERR( ierr, "failed to define the field tags T_proj, u_proj, v_proj " );
00618 #endif
00619 
00620 #ifdef ENABLE_ATMLND_COUPLING
00621         // need to define tag storage for land; will use the same T_proj, u_proj, v_proj name, because it will be
00622         // used to send between point clouds !
00623         // use the same ndof and same size as ocnCompNDoFs (1) !!
00624         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomProjectedFields, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00625         CHECKIERR( ierr, "failed to define the field tags T_proj, u_proj, v_proj" );
00626 #endif
00627     }
00628     // need to make sure that the coverage mesh (created during intx method) received the tag that need to be projected
00629     // to target so far, the coverage mesh has only the ids and global dofs; need to change the migrate method to
00630     // accommodate any GLL tag now send a tag from original atmosphere (cmpAtmPID) towards migrated coverage mesh
00631     // (cplAtmPID), using the new coverage graph communicator
00632 
00633     // make the tag 0, to check we are actually sending needed data
00634     {
00635         if( cplAtmAppID >= 0 )
00636         {
00637             int nverts[3], nelem[3], nblocks[3], nsbc[3], ndbc[3];
00638             /*
00639              * Each process in the communicator will have access to a local mesh instance, which will contain the
00640              * original cells in the local partition and ghost entities. Number of vertices, primary cells, visible
00641              * blocks, number of sidesets and nodesets boundary conditions will be returned in numProcesses 3 arrays,
00642              * for local, ghost and total numbers.
00643              */
00644             ierr = iMOAB_GetMeshInfo( cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc );
00645             CHECKIERR( ierr, "failed to get num primary elems" );
00646             int numAllElem = nelem[2];
00647             std::vector< double > vals;
00648             int storLeng = atmCompNDoFs * numAllElem * 3;  // 3 tags
00649             vals.resize( storLeng );
00650             for( int k = 0; k < storLeng; k++ )
00651                 vals[k] = 0.;
00652             int eetype = 1;
00653             ierr       = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomFields, &storLeng, &eetype, &vals[0] );
00654             CHECKIERR( ierr, "cannot make tag nul" )
00655             // set the tag to 0
00656         }
00657     }
00658 
00659 #ifdef ENABLE_ATMOCN_COUPLING
00660     PUSH_TIMER( "Send/receive data from atm component to coupler in ocn context" )
00661     if( atmComm != MPI_COMM_NULL )
00662     {
00663         // as always, use nonblocking sends
00664         // this is for projection to ocean:
00665         ierr = iMOAB_SendElementTag( cmpPhAtmPID, "T_ph:u_ph:v_ph", &atmCouComm, &atmocnid );
00666         CHECKIERR( ierr, "cannot send tag values" )
00667     }
00668     if( couComm != MPI_COMM_NULL )
00669     {
00670         // receive on atm on coupler pes, that was redistributed according to coverage
00671         ierr = iMOAB_ReceiveElementTag( cplAtmOcnPID, "T_ph:u_ph:v_ph", &atmCouComm, &cmpPhysAtm );
00672         CHECKIERR( ierr, "cannot receive tag values on intx atm ocn" )
00673     }
00674     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00675 
00676     // we can now free the sender buffers
00677     if( atmComm != MPI_COMM_NULL )
00678     {
00679         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmocnid );  // context is for ocean
00680         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh" )
00681     }
00682     /*
00683     #ifdef VERBOSE
00684       if (couComm != MPI_COMM_NULL) {
00685         char outputFileRecvd[] = "recvAtmCoupOcn.h5m";
00686         ierr = iMOAB_WriteMesh(cplAtmPID, outputFileRecvd, fileWriteOptions );
00687       }
00688     #endif
00689     */
00690 
00691     if( couComm != MPI_COMM_NULL )
00692     {
00693         const char* concat_fieldname  = "T_ph:u_ph:v_ph";
00694         const char* concat_fieldnameT = "T_proj:u_proj:v_proj";
00695 
00696         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00697            on the source mesh and get the projection on the target mesh */
00698         PUSH_TIMER( "Apply Scalar projection weights" )
00699         ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], concat_fieldname,
00700                                                    concat_fieldnameT );
00701         CHECKIERR( ierr, "failed to compute projection weight application" );
00702         POP_TIMER( couComm, rankInCouComm )
00703 
00704         char outputFileTgt[] = "fOcnOnCpl3.h5m";
00705         ierr                 = iMOAB_WriteMesh( cplOcnPID, outputFileTgt, fileWriteOptions );
00706         CHECKIERR( ierr, "failed to write fOcnOnCpl3.h5m " );
00707     }
00708     // send the projected tag back to ocean pes, with send/receive tag
00709     if( ocnComm != MPI_COMM_NULL )
00710     {
00711         int tagIndexIn2;
00712         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomProjectedFields, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00713         CHECKIERR( ierr,
00714                    "failed to define the field tag for receiving back the tag T_proj, u_proj, v_proj on ocn pes" );
00715     }
00716     // send the tag to ocean pes, from ocean mesh on coupler pes
00717     //   from couComm, using common joint comm ocn_coupler
00718     // as always, use nonblocking sends
00719     // original graph (context is -1_
00720     if( couComm != MPI_COMM_NULL )
00721     {
00722         context_id = cmpocn;
00723         ierr       = iMOAB_SendElementTag( cplOcnPID, "T_proj:u_proj:v_proj", &ocnCouComm, &context_id );
00724         CHECKIERR( ierr, "cannot send tag values back to ocean pes" )
00725     }
00726 
00727     // receive on component 2, ocean
00728     if( ocnComm != MPI_COMM_NULL )
00729     {
00730         context_id = cplocn;
00731         ierr       = iMOAB_ReceiveElementTag( cmpOcnPID, "T_proj:u_proj:v_proj", &ocnCouComm, &context_id );
00732         CHECKIERR( ierr, "cannot receive tag values from ocean mesh on coupler pes" )
00733     }
00734 
00735     MPI_Barrier( MPI_COMM_WORLD );
00736 
00737     if( couComm != MPI_COMM_NULL )
00738     {
00739         context_id = cmpocn;
00740         ierr       = iMOAB_FreeSenderBuffers( cplOcnPID, &context_id );
00741         CHECKIERR( ierr, "cannot free buffers related to send tag" )
00742     }
00743     if( ocnComm != MPI_COMM_NULL )
00744     {
00745         char outputFileOcn[] = "OcnWithProj3.h5m";
00746         ierr                 = iMOAB_WriteMesh( cmpOcnPID, outputFileOcn, fileWriteOptions );
00747         CHECKIERR( ierr, "cannot write OcnWithProj3.h5m" )
00748     }
00749 #endif
00750 
00751     MPI_Barrier( MPI_COMM_WORLD );
00752 
00753 #ifdef ENABLE_ATMLND_COUPLING
00754     // start land proj:
00755 
00756     // we used this to compute
00757     //  ierr = iMOAB_ComputeCommGraph(cmpPhAtmPID, cplLndPID, &atmCouComm, &atmPEGroup, &couPEGroup,
00758     //     &typeA, &typeB, &cmpPhysAtm, &atmlndid);
00759 
00760     // end copy
00761     PUSH_TIMER( "Send/receive data from phys comp atm to coupler land, using computed graph" )
00762     if( atmComm != MPI_COMM_NULL )
00763     {
00764 
00765         // as always, use nonblocking sends
00766         // this is for projection to land:
00767         ierr = iMOAB_SendElementTag( cmpPhAtmPID, "T_ph:u_ph:v_ph", &atmCouComm, &atmlndid );
00768         CHECKIERR( ierr, "cannot send tag values towards cpl on land" )
00769     }
00770     if( couComm != MPI_COMM_NULL )
00771     {
00772         // receive on lnd on coupler pes
00773         ierr = iMOAB_ReceiveElementTag( cplAtmLndPID, "T_ph:u_ph:v_ph", &atmCouComm, &cmpPhysAtm );
00774         CHECKIERR( ierr, "cannot receive tag values on land on coupler, for atm coupling" )
00775     }
00776     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00777 
00778     // we can now free the sender buffers
00779     if( atmComm != MPI_COMM_NULL )
00780     {
00781         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmlndid );
00782         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the land on coupler" )
00783     }
00784 
00785     if( couComm != MPI_COMM_NULL )
00786     {
00787         const char* concat_fieldname  = "T_ph:u_ph:v_ph";
00788         const char* concat_fieldnameT = "T_proj:u_proj:v_proj";
00789 
00790         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00791            on the source mesh and get the projection on the target mesh */
00792         PUSH_TIMER( "Apply Scalar projection weights" )
00793         ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmLndPID, weights_identifiers[0], concat_fieldname,
00794                                                    concat_fieldnameT );
00795         CHECKIERR( ierr, "failed to compute projection weight application" );
00796         POP_TIMER( couComm, rankInCouComm )
00797 
00798         char outputFileTgt[] = "fLndOnCpl3.h5m";
00799         ierr                 = iMOAB_WriteMesh( cplLndPID, outputFileTgt, fileWriteOptions );
00800         CHECKIERR( ierr, "cannot write land on coupler" )
00801     }
00802 
00803     // end land proj
00804     // send the tags back to land pes, from land mesh on coupler pes
00805     // send from cplLndPID to cmpLndPID, using common joint comm
00806     // as always, use nonblocking sends
00807     // original graph
00808     // int context_id = -1;
00809     // the land might not have these tags yet; it should be a different name for land
00810     // in e3sm we do have different names
00811     if( lndComm != MPI_COMM_NULL )
00812     {
00813         int tagIndexIn2;
00814         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomProjectedFields, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00815         CHECKIERR( ierr,
00816                    "failed to define the field tag for receiving back the tag T_proj, u_proj, v_proj on lnd pes" );
00817     }
00818     if( couComm != MPI_COMM_NULL )
00819     {
00820         context_id = cmplnd;
00821         ierr       = iMOAB_SendElementTag( cplLndPID, "T_proj:u_proj:v_proj", &lndCouComm, &context_id );
00822         CHECKIERR( ierr, "cannot send tag values back to land pes" )
00823     }
00824     // receive on component 3, land
00825     if( lndComm != MPI_COMM_NULL )
00826     {
00827         context_id = cpllnd;
00828         ierr       = iMOAB_ReceiveElementTag( cmpLndPID, "T_proj:u_proj:v_proj", &lndCouComm, &context_id );
00829         CHECKIERR( ierr, "cannot receive tag values from land mesh on coupler pes" )
00830     }
00831 
00832     MPI_Barrier( MPI_COMM_WORLD );
00833     if( couComm != MPI_COMM_NULL )
00834     {
00835         context_id = cmplnd;
00836         ierr       = iMOAB_FreeSenderBuffers( cplLndPID, &context_id );
00837         CHECKIERR( ierr, "cannot free buffers related to sending tags from coupler to land pes" )
00838     }
00839     if( lndComm != MPI_COMM_NULL )
00840     {
00841         char outputFileLnd[] = "LndWithProj3.h5m";
00842         ierr                 = iMOAB_WriteMesh( cmpLndPID, outputFileLnd, fileWriteOptions );
00843         CHECKIERR( ierr, "cannot write land file with projection" )
00844     }
00845 
00846     // we have received on ocean component some data projected from atm, on coupler
00847 
00848     // path was T_ph (atm phys) towards atm on FV mesh, we projected, then we sent back to ocean comp
00849 // start copy ocn atm coupling; go reverse direction now !!
00850 #ifdef ENABLE_ATMOCN_COUPLING
00851     PUSH_TIMER( "Send/receive data from ocn component to coupler in atm context" )
00852     if( ocnComm != MPI_COMM_NULL )
00853     {
00854         // as always, use nonblocking sends
00855         // this is for projection to ocean:
00856         ierr = iMOAB_SendElementTag( cmpOcnPID, "T_proj:u_proj:v_proj", &ocnCouComm, &cplatm );
00857         CHECKIERR( ierr, "cannot send tag values T_proj, etc towards ocn coupler" )
00858     }
00859     if( couComm != MPI_COMM_NULL )
00860     {
00861         // receive on ocn on coupler pes, that was redistributed according to coverage
00862         ierr = iMOAB_ReceiveElementTag( cplOcnPID, "T_proj:u_proj:v_proj", &ocnCouComm, &cplatm );
00863         CHECKIERR( ierr, "cannot receive tag values" )
00864     }
00865     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00866 
00867     // we can now free the sender buffers
00868     if( ocnComm != MPI_COMM_NULL )
00869     {
00870         ierr = iMOAB_FreeSenderBuffers( cmpOcnPID, &cplatm );  // context is for atm
00871         CHECKIERR( ierr, "cannot free buffers used to send ocn tag towards the coverage mesh for atm" )
00872     }
00873     //#ifdef VERBOSE
00874     if( couComm != MPI_COMM_NULL )
00875     {
00876         // write only for n==1 case
00877         char outputFileRecvd[] = "recvOcnCpl.h5m";
00878         ierr                   = iMOAB_WriteMesh( cplOcnPID, outputFileRecvd, fileWriteOptions );
00879         CHECKIERR( ierr, "could not write recvOcnCplOcn.h5m to disk" )
00880     }
00881     //#endif
00882 
00883     if( couComm != MPI_COMM_NULL )
00884     {
00885         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00886            on the source mesh and get the projection on the target mesh */
00887 
00888         const char* concat_fieldname  = "T_proj:u_proj:v_proj";  // this is now source tag
00889         const char* concat_fieldnameT = "T2_ph:u2_ph:v2_ph";     // projected tag on
00890         // make sure the new tags exist on atm coupler mesh;
00891         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomFields2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00892         CHECKIERR( ierr, "failed to define the field tags T2_ph, u2_ph, v2_ph" );
00893 
00894         PUSH_TIMER( "Apply Scalar projection weights" )
00895         ierr = iMOAB_ApplyScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], concat_fieldname,
00896                                                    concat_fieldnameT );
00897         CHECKIERR( ierr, "failed to compute projection weight application" );
00898         POP_TIMER( couComm, rankInCouComm )
00899 
00900         char outputFileTgt[] = "fAtm2OnCpl2.h5m";
00901         ierr                 = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions );
00902         CHECKIERR( ierr, "could not write fAtm2OnCpl.h5m to disk" )
00903     }
00904 
00905     // send the projected tag back to atm pes, with send/receive partial par graph computed
00906     //  from intx ocn/atm towards atm physics mesh !
00907     if( atmComm != MPI_COMM_NULL )
00908     {
00909         int tagIndexIn2;
00910         ierr =
00911             iMOAB_DefineTagStorage( cmpPhAtmPID, bottomPhProjectedFields, &tagTypes[1], &atmCompNDoFs, &tagIndexIn2 );
00912         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00913                          "Tph_proj, etc on atm pes" );
00914     }
00915     // send the tag to atm pes, from atm pg2 mesh on coupler pes
00916     //   from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse
00917     // as always, use nonblocking sends
00918     // graph context comes from commgraph ?
00919 
00920     //      ierr = iMOAB_ComputeCommGraph(  cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA,
00921     //      &typeB,
00922     //  &cmpatm, &cplatm );
00923 
00924     if( couComm != MPI_COMM_NULL )
00925     {
00926         context_id = cmpPhysAtm;
00927         ierr       = iMOAB_SendElementTag( cplAtmPID, "T2_ph:u2_ph:v2_ph", &atmCouComm, &context_id );
00928         CHECKIERR( ierr, "cannot send tag values back to atm pes" )
00929     }
00930 
00931     // receive on component atm phys mesh
00932     if( atmComm != MPI_COMM_NULL )
00933     {
00934         context_id = cplatm;
00935         ierr       = iMOAB_ReceiveElementTag( cmpPhAtmPID, "Tph_proj:uph_proj:vph_proj", &atmCouComm, &context_id );
00936         CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" )
00937     }
00938 
00939     MPI_Barrier( MPI_COMM_WORLD );
00940 
00941     if( couComm != MPI_COMM_NULL )
00942     {
00943         context_id = cmpatm;
00944         ierr       = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id );
00945         CHECKIERR( ierr, "cannot free buffers for sending T2_ph from cpl to phys atm" )
00946     }
00947     if( atmComm != MPI_COMM_NULL )  // write only for n==1 case
00948     {
00949         char outputFileAtmPh[] = "AtmPhysProj.h5m";
00950         ierr                   = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions );
00951         CHECKIERR( ierr, "could not write AtmPhysProj.h5m to disk" )
00952     }
00953 #endif
00954     // end copy need more work for land
00955 // start copy
00956 // lnd atm coupling; go reverse direction now, from lnd to atm , using lnd - atm intx, weight gen
00957 // send back the T_proj , etc , from lan comp to land coupler
00958 #ifdef ENABLE_ATMLND_COUPLING
00959     PUSH_TIMER( "Send/receive data from lnd component to coupler in atm context" )
00960     if( lndComm != MPI_COMM_NULL )
00961     {
00962         // as always, use nonblocking sends
00963         ierr = iMOAB_SendElementTag( cmpLndPID, "T_proj:u_proj:v_proj", &lndCouComm, &cplatm );
00964         CHECKIERR( ierr, "cannot send tag values T_proj, etc towards lnd coupler" )
00965     }
00966     if( couComm != MPI_COMM_NULL )
00967     {
00968         // receive on ocn on coupler pes, that was redistributed according to coverage
00969         ierr = iMOAB_ReceiveElementTag( cplLndPID, "T_proj:u_proj:v_proj", &lndCouComm, &cplatm );
00970         CHECKIERR( ierr, "cannot receive tag values on land" )
00971     }
00972     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00973 
00974     // we can now free the sender buffers
00975     if( lndComm != MPI_COMM_NULL )
00976     {
00977         ierr = iMOAB_FreeSenderBuffers( cmpLndPID, &cplatm );  // context is for atm
00978         CHECKIERR( ierr, "cannot free buffers used to send lnd tag towards the coverage mesh for atm" )
00979     }
00980     //#ifdef VERBOSE
00981     if( couComm != MPI_COMM_NULL )
00982     {
00983 
00984         char outputFileRecvd[] = "recvLndCpl.h5m";
00985         ierr                   = iMOAB_WriteMesh( cplLndPID, outputFileRecvd, fileWriteOptions );
00986         CHECKIERR( ierr, "could not write recvLndCpl.h5m to disk" )
00987     }
00988     //#endif
00989 
00990     if( couComm != MPI_COMM_NULL )
00991     {
00992         PUSH_TIMER( "Apply Scalar projection weights for lnd - atm coupling" )
00993         const char* concat_fieldname  = "T_proj:u_proj:v_proj";  // this is now source tag, on land cpl
00994         const char* concat_fieldnameT = "T3_ph:u3_ph:v3_ph";     // projected tag on
00995         // make sure the new tags exist on atm coupler mesh;
00996         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomFields3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00997         CHECKIERR( ierr, "failed to define the field tag T3_ph" );
00998 
00999         ierr = iMOAB_ApplyScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], concat_fieldname,
01000                                                    concat_fieldnameT );
01001         CHECKIERR( ierr, "failed to compute projection weight application from lnd to atm " );
01002         POP_TIMER( couComm, rankInCouComm )
01003 
01004         char outputFileTgt[] = "fAtm3OnCpl.h5m";  // this is for T3_ph, etc
01005         ierr                 = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions );
01006         CHECKIERR( ierr, "could not write fAtm3OnCpl.h5m to disk" )
01007     }
01008 
01009     // send the projected tag back to atm pes, with send/receive partial par graph computed
01010     //  from intx lnd/atm towards atm physics mesh !
01011     if( atmComm != MPI_COMM_NULL )
01012     {
01013         int tagIndexIn2;
01014         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomPhLndProjectedFields, &tagTypes[1], &atmCompNDoFs,
01015                                        &tagIndexIn2 );
01016         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
01017                          "TphL_proj,  on atm pes" );
01018     }
01019     // send the tag to atm pes, from atm pg2 mesh on coupler pes
01020     //   from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse
01021     // as always, use nonblocking sends
01022     // graph context comes from commgraph ?
01023 
01024     //      ierr = iMOAB_ComputeCommGraph(  cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA,
01025     //      &typeB,
01026     //  &cmpatm, &cplatm );
01027 
01028     if( couComm != MPI_COMM_NULL )
01029     {
01030         context_id = cmpPhysAtm;
01031         ierr       = iMOAB_SendElementTag( cplAtmPID, "T3_ph:u3_ph:v3_ph", &atmCouComm, &context_id );
01032         CHECKIERR( ierr, "cannot send tag values back to atm pes" )
01033     }
01034 
01035     // receive on component atm phys mesh
01036     if( atmComm != MPI_COMM_NULL )
01037     {
01038         context_id = cplatm;
01039         ierr       = iMOAB_ReceiveElementTag( cmpPhAtmPID, "TphL_proj:uphL_proj:vphL_proj", &atmCouComm, &context_id );
01040         CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" )
01041     }
01042 
01043     MPI_Barrier( MPI_COMM_WORLD );
01044 
01045     if( couComm != MPI_COMM_NULL )
01046     {
01047         context_id = cmpatm;
01048         ierr       = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id );
01049         CHECKIERR( ierr, "cannot free buffers used for sending back atm tags " )
01050     }
01051     if( atmComm != MPI_COMM_NULL )  // write only for n==1 case
01052     {
01053         char outputFileAtmPh[] = "AtmPhysProj3.h5m";
01054         ierr                   = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions );
01055         CHECKIERR( ierr, "could not write AtmPhysProj3.h5m to disk" )
01056     }
01057 #endif
01058     // we could deregister cplLndAtmPID
01059     if( couComm != MPI_COMM_NULL )
01060     {
01061         ierr = iMOAB_DeregisterApplication( cplLndAtmPID );
01062         CHECKIERR( ierr, "cannot deregister app intx LA" )
01063     }
01064 
01065     // we could deregister cplAtmLndPID
01066     if( couComm != MPI_COMM_NULL )
01067     {
01068         ierr = iMOAB_DeregisterApplication( cplAtmLndPID );
01069         CHECKIERR( ierr, "cannot deregister app intx AL" )
01070     }
01071 #endif  // ENABLE_ATMLND_COUPLING
01072 
01073 #ifdef ENABLE_ATMOCN_COUPLING
01074     if( couComm != MPI_COMM_NULL )
01075     {
01076         ierr = iMOAB_DeregisterApplication( cplOcnAtmPID );
01077         CHECKIERR( ierr, "cannot deregister app intx OA" )
01078     }
01079     if( couComm != MPI_COMM_NULL )
01080     {
01081         ierr = iMOAB_DeregisterApplication( cplAtmOcnPID );
01082         CHECKIERR( ierr, "cannot deregister app intx AO" )
01083     }
01084 #endif
01085 
01086 #ifdef ENABLE_ATMLND_COUPLING
01087     if( lndComm != MPI_COMM_NULL )
01088     {
01089         ierr = iMOAB_DeregisterApplication( cmpLndPID );
01090         CHECKIERR( ierr, "cannot deregister app LND1" )
01091     }
01092 #endif  // ENABLE_ATMLND_COUPLING
01093     if( atmComm != MPI_COMM_NULL )
01094     {
01095         ierr = iMOAB_DeregisterApplication( cmpPhAtmPID );
01096         CHECKIERR( ierr, "cannot deregister app PhysAtm " )
01097     }
01098 #ifdef ENABLE_ATMOCN_COUPLING
01099     if( ocnComm != MPI_COMM_NULL )
01100     {
01101         ierr = iMOAB_DeregisterApplication( cmpOcnPID );
01102         CHECKIERR( ierr, "cannot deregister app OCN1" )
01103     }
01104 #endif  // ENABLE_ATMOCN_COUPLING
01105 
01106     if( atmComm != MPI_COMM_NULL )
01107     {
01108         ierr = iMOAB_DeregisterApplication( cmpAtmPID );
01109         CHECKIERR( ierr, "cannot deregister app ATM1" )
01110     }
01111 
01112 #ifdef ENABLE_ATMLND_COUPLING
01113     if( couComm != MPI_COMM_NULL )
01114     {
01115         ierr = iMOAB_DeregisterApplication( cplLndPID );
01116         CHECKIERR( ierr, "cannot deregister app LNDX" )
01117     }
01118 #endif  // ENABLE_ATMLND_COUPLING
01119 
01120 #ifdef ENABLE_ATMOCN_COUPLING
01121     if( couComm != MPI_COMM_NULL )
01122     {
01123         ierr = iMOAB_DeregisterApplication( cplOcnPID );
01124         CHECKIERR( ierr, "cannot deregister app OCNX" )
01125     }
01126 #endif  // ENABLE_ATMOCN_COUPLING
01127 
01128     if( couComm != MPI_COMM_NULL )
01129     {
01130         ierr = iMOAB_DeregisterApplication( cplAtmPID );
01131         CHECKIERR( ierr, "cannot deregister app ATMX" )
01132     }
01133 
01134     //#endif
01135     ierr = iMOAB_Finalize();
01136     CHECKIERR( ierr, "did not finalize iMOAB" )
01137 
01138     // free atm coupler group and comm
01139     if( MPI_COMM_NULL != atmCouComm ) MPI_Comm_free( &atmCouComm );
01140     MPI_Group_free( &joinAtmCouGroup );
01141     if( MPI_COMM_NULL != atmComm ) MPI_Comm_free( &atmComm );
01142 
01143 #ifdef ENABLE_ATMOCN_COUPLING
01144     if( MPI_COMM_NULL != ocnComm ) MPI_Comm_free( &ocnComm );
01145     // free ocn - coupler group and comm
01146     if( MPI_COMM_NULL != ocnCouComm ) MPI_Comm_free( &ocnCouComm );
01147     MPI_Group_free( &joinOcnCouGroup );
01148 #endif
01149 
01150 #ifdef ENABLE_ATMLND_COUPLING
01151     if( MPI_COMM_NULL != lndComm ) MPI_Comm_free( &lndComm );
01152     // free land - coupler group and comm
01153     if( MPI_COMM_NULL != lndCouComm ) MPI_Comm_free( &lndCouComm );
01154     MPI_Group_free( &joinLndCouGroup );
01155 #endif
01156 
01157     if( MPI_COMM_NULL != couComm ) MPI_Comm_free( &couComm );
01158 
01159     MPI_Group_free( &atmPEGroup );
01160 #ifdef ENABLE_ATMOCN_COUPLING
01161     MPI_Group_free( &ocnPEGroup );
01162 #endif
01163 #ifdef ENABLE_ATMLND_COUPLING
01164     MPI_Group_free( &lndPEGroup );
01165 #endif
01166     MPI_Group_free( &couPEGroup );
01167     MPI_Group_free( &jgroup );
01168 
01169     MPI_Finalize();
01170     // endif #if 0
01171 
01172     return 0;
01173 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines