MOAB: Mesh Oriented datABase  (version 5.3.1)
imoab_coupler.cpp
Go to the documentation of this file.
00001 /*
00002  * This imoab_coupler test will simulate coupling between 3 components
00003  * 3 meshes will be loaded from 3 files (atm, ocean, lnd), and they will be migrated to
00004  * all processors (coupler pes); then, intx will be performed between migrated meshes
00005  * and weights will be generated, such that a field from one component will be transferred to
00006  * the other component
00007  * currently, the atm will send some data to be projected to ocean and land components
00008  *
00009  * first, intersect atm and ocn, and recompute comm graph 1 between atm and atm_cx, for ocn intx
00010  * second, intersect atm and lnd, and recompute comm graph 2 between atm and atm_cx for lnd intx
00011 
00012  */
00013 
00014 #include "moab/Core.hpp"
00015 #ifndef MOAB_HAVE_MPI
00016 #error mbtempest tool requires MPI configuration
00017 #endif
00018 
00019 // MPI includes
00020 #include "moab_mpi.h"
00021 #include "moab/ParallelComm.hpp"
00022 #include "MBParallelConventions.h"
00023 
00024 #include "moab/iMOAB.h"
00025 #include "TestUtil.hpp"
00026 #include "moab/CpuTimer.hpp"
00027 #include "moab/ProgOptions.hpp"
00028 #include <iostream>
00029 #include <sstream>
00030 
00031 #include "imoab_coupler_utils.hpp"
00032 
00033 using namespace moab;
00034 
00035 //#define GRAPH_INFO
00036 
00037 #ifndef MOAB_HAVE_TEMPESTREMAP
00038 #error The climate coupler test example requires MOAB configuration with TempestRemap
00039 #endif
00040 
00041 #define ENABLE_ATMOCN_COUPLING
00042 #define ENABLE_ATMLND_COUPLING
00043 
00044 #if( !defined( ENABLE_ATMOCN_COUPLING ) && !defined( ENABLE_ATMLND_COUPLING ) )
00045 #error Enable either OCN (ENABLE_ATMOCN_COUPLING) and/or LND (ENABLE_ATMLND_COUPLING) for coupling
00046 #endif
00047 
00048 int main( int argc, char* argv[] )
00049 {
00050     int ierr;
00051     int rankInGlobalComm, numProcesses;
00052     MPI_Group jgroup;
00053     std::string readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" );
00054     std::string readoptsLnd( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" );
00055 
00056     // Timer data
00057     moab::CpuTimer timer;
00058     double timer_ops;
00059     std::string opName;
00060 
00061     int repartitioner_scheme = 0;
00062 #ifdef MOAB_HAVE_ZOLTAN
00063     repartitioner_scheme = 2;  // use the graph partitioner in that caseS
00064 #endif
00065 
00066     MPI_Init( &argc, &argv );
00067     MPI_Comm_rank( MPI_COMM_WORLD, &rankInGlobalComm );
00068     MPI_Comm_size( MPI_COMM_WORLD, &numProcesses );
00069 
00070     MPI_Comm_group( MPI_COMM_WORLD, &jgroup );  // all processes in jgroup
00071 
00072     std::string atmFilename = TestDir + "unittest/wholeATM_T.h5m";
00073     // on a regular case,  5 ATM, 6 CPLATM (ATMX), 17 OCN     , 18 CPLOCN (OCNX)  ;
00074     // intx atm/ocn is not in e3sm yet, give a number
00075     //   6 * 100+ 18 = 618 : atmocnid
00076     // 9 LND, 10 CPLLND
00077     //   6 * 100 + 10 = 610  atmlndid:
00078     // cmpatm is for atm on atm pes
00079     // cmpocn is for ocean, on ocean pe
00080     // cplatm is for atm on coupler pes
00081     // cplocn is for ocean on coupelr pes
00082     // atmocnid is for intx atm / ocn on coupler pes
00083     //
00084     int rankInAtmComm = -1;
00085     int cmpatm        = 5,
00086         cplatm        = 6;  // component ids are unique over all pes, and established in advance;
00087 #ifdef ENABLE_ATMOCN_COUPLING
00088     std::string ocnFilename = TestDir + "unittest/recMeshOcn.h5m";
00089     std::string baseline    = TestDir + "unittest/baseline1.txt";
00090     int rankInOcnComm       = -1;
00091     int cmpocn = 17, cplocn = 18,
00092         atmocnid = 618;  // component ids are unique over all pes, and established in advance;
00093 #endif
00094 
00095 #ifdef ENABLE_ATMLND_COUPLING
00096     std::string lndFilename = TestDir + "unittest/wholeLnd.h5m";
00097     int cpllnd = 10, cmplnd = 9,
00098         atmlndid = 610;  // component ids are unique over all pes, and established in advance;
00099 #endif
00100 
00101     int rankInCouComm = -1;
00102 
00103     int nghlay = 0;  // number of ghost layers for loading the file
00104     std::vector< int > groupTasks;
00105     int startG1 = 0, startG2 = 0, endG1 = numProcesses - 1, endG2 = numProcesses - 1, startG3 = startG1,
00106         endG3   = endG1;                   // Support launch of imoab_coupler test on any combo of 2*x processes
00107     int startG4 = startG1, endG4 = endG1;  // these are for coupler layout
00108     int context_id = -1;                   // used now for freeing buffers
00109 
00110     // default: load atm on 2 proc, ocean on 2, land on 2; migrate to 2 procs, then compute intx
00111     // later, we need to compute weight matrix with tempestremap
00112 
00113     ProgOptions opts;
00114     opts.addOpt< std::string >( "atmosphere,t", "atm mesh filename (source)", &atmFilename );
00115 #ifdef ENABLE_ATMOCN_COUPLING
00116     opts.addOpt< std::string >( "ocean,m", "ocean mesh filename (target)", &ocnFilename );
00117 #endif
00118 #ifdef ENABLE_ATMLND_COUPLING
00119     opts.addOpt< std::string >( "land,l", "land mesh filename (target)", &lndFilename );
00120 #endif
00121     opts.addOpt< int >( "startAtm,a", "start task for atmosphere layout", &startG1 );
00122     opts.addOpt< int >( "endAtm,b", "end task for atmosphere layout", &endG1 );
00123 #ifdef ENABLE_ATMOCN_COUPLING
00124     opts.addOpt< int >( "startOcn,c", "start task for ocean layout", &startG2 );
00125     opts.addOpt< int >( "endOcn,d", "end task for ocean layout", &endG2 );
00126 #endif
00127 #ifdef ENABLE_ATMLND_COUPLING
00128     opts.addOpt< int >( "startLnd,e", "start task for land layout", &startG3 );
00129     opts.addOpt< int >( "endLnd,f", "end task for land layout", &endG3 );
00130 #endif
00131 
00132     opts.addOpt< int >( "startCoupler,g", "start task for coupler layout", &startG4 );
00133     opts.addOpt< int >( "endCoupler,j", "end task for coupler layout", &endG4 );
00134 
00135     opts.addOpt< int >( "partitioning,p", "partitioning option for migration", &repartitioner_scheme );
00136 
00137     int n = 1;  // number of send/receive / project / send back cycles
00138     opts.addOpt< int >( "iterations,n", "number of iterations for coupler", &n );
00139 
00140     bool no_regression_test = false;
00141     opts.addOpt< void >( "no_regression,r", "do not do regression test against baseline 1", &no_regression_test );
00142     opts.parseCommandLine( argc, argv );
00143 
00144     char fileWriteOptions[] = "PARALLEL=WRITE_PART";
00145 
00146     if( !rankInGlobalComm )
00147     {
00148         std::cout << " atm file: " << atmFilename << "\n   on tasks : " << startG1 << ":" << endG1 <<
00149 #ifdef ENABLE_ATMOCN_COUPLING
00150             "\n ocn file: " << ocnFilename << "\n     on tasks : " << startG2 << ":" << endG2 <<
00151 #endif
00152 #ifdef ENABLE_ATMLND_COUPLING
00153             "\n lnd file: " << lndFilename << "\n     on tasks : " << startG3 << ":" << endG3 <<
00154 #endif
00155             "\n  partitioning (0 trivial, 1 graph, 2 geometry) " << repartitioner_scheme << "\n  ";
00156     }
00157 
00158     // load files on 3 different communicators, groups
00159     // first groups has task 0, second group tasks 0 and 1
00160     // coupler will be on joint tasks, will be on a third group (0 and 1, again)
00161     // first groups has task 0, second group tasks 0 and 1
00162     // coupler will be on joint tasks, will be on a third group (0 and 1, again)
00163     MPI_Group atmPEGroup;
00164     MPI_Comm atmComm;
00165     ierr = create_group_and_comm( startG1, endG1, jgroup, &atmPEGroup, &atmComm );
00166     CHECKIERR( ierr, "Cannot create atm MPI group and communicator " )
00167 
00168 #ifdef ENABLE_ATMOCN_COUPLING
00169     MPI_Group ocnPEGroup;
00170     MPI_Comm ocnComm;
00171     ierr = create_group_and_comm( startG2, endG2, jgroup, &ocnPEGroup, &ocnComm );
00172     CHECKIERR( ierr, "Cannot create ocn MPI group and communicator " )
00173 #endif
00174 
00175 #ifdef ENABLE_ATMLND_COUPLING
00176     MPI_Group lndPEGroup;
00177     MPI_Comm lndComm;
00178     ierr = create_group_and_comm( startG3, endG3, jgroup, &lndPEGroup, &lndComm );
00179     CHECKIERR( ierr, "Cannot create lnd MPI group and communicator " )
00180 #endif
00181 
00182     // we will always have a coupler
00183     MPI_Group couPEGroup;
00184     MPI_Comm couComm;
00185     ierr = create_group_and_comm( startG4, endG4, jgroup, &couPEGroup, &couComm );
00186     CHECKIERR( ierr, "Cannot create cpl MPI group and communicator " )
00187 
00188     // atm_coupler
00189     MPI_Group joinAtmCouGroup;
00190     MPI_Comm atmCouComm;
00191     ierr = create_joint_comm_group( atmPEGroup, couPEGroup, &joinAtmCouGroup, &atmCouComm );
00192     CHECKIERR( ierr, "Cannot create joint atm cou communicator" )
00193 
00194 #ifdef ENABLE_ATMOCN_COUPLING
00195     // ocn_coupler
00196     MPI_Group joinOcnCouGroup;
00197     MPI_Comm ocnCouComm;
00198     ierr = create_joint_comm_group( ocnPEGroup, couPEGroup, &joinOcnCouGroup, &ocnCouComm );
00199     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00200 #endif
00201 
00202 #ifdef ENABLE_ATMLND_COUPLING
00203     // lnd_coupler
00204     MPI_Group joinLndCouGroup;
00205     MPI_Comm lndCouComm;
00206     ierr = create_joint_comm_group( lndPEGroup, couPEGroup, &joinLndCouGroup, &lndCouComm );
00207     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00208 #endif
00209 
00210     ierr = iMOAB_Initialize( argc, argv );  // not really needed anything from argc, argv, yet; maybe we should
00211     CHECKIERR( ierr, "Cannot initialize iMOAB" )
00212 
00213     int cmpAtmAppID       = -1;
00214     iMOAB_AppID cmpAtmPID = &cmpAtmAppID;  // atm
00215     int cplAtmAppID       = -1;            // -1 means it is not initialized
00216     iMOAB_AppID cplAtmPID = &cplAtmAppID;  // atm on coupler PEs
00217 #ifdef ENABLE_ATMOCN_COUPLING
00218     int cmpOcnAppID       = -1;
00219     iMOAB_AppID cmpOcnPID = &cmpOcnAppID;        // ocn
00220     int cplOcnAppID = -1, cplAtmOcnAppID = -1;   // -1 means it is not initialized
00221     iMOAB_AppID cplOcnPID    = &cplOcnAppID;     // ocn on coupler PEs
00222     iMOAB_AppID cplAtmOcnPID = &cplAtmOcnAppID;  // intx atm -ocn on coupler PEs
00223 #endif
00224 
00225 #ifdef ENABLE_ATMLND_COUPLING
00226     int cmpLndAppID       = -1;
00227     iMOAB_AppID cmpLndPID = &cmpLndAppID;        // lnd
00228     int cplLndAppID = -1, cplAtmLndAppID = -1;   // -1 means it is not initialized
00229     iMOAB_AppID cplLndPID    = &cplLndAppID;     // land on coupler PEs
00230     iMOAB_AppID cplAtmLndPID = &cplAtmLndAppID;  // intx atm - lnd on coupler PEs
00231 #endif
00232 
00233     if( couComm != MPI_COMM_NULL )
00234     {
00235         MPI_Comm_rank( couComm, &rankInCouComm );
00236         // Register all the applications on the coupler PEs
00237         ierr = iMOAB_RegisterApplication( "ATMX", &couComm, &cplatm,
00238                                           cplAtmPID );  // atm on coupler pes
00239         CHECKIERR( ierr, "Cannot register ATM over coupler PEs" )
00240 #ifdef ENABLE_ATMOCN_COUPLING
00241         ierr = iMOAB_RegisterApplication( "OCNX", &couComm, &cplocn,
00242                                           cplOcnPID );  // ocn on coupler pes
00243         CHECKIERR( ierr, "Cannot register OCN over coupler PEs" )
00244 #endif
00245 #ifdef ENABLE_ATMLND_COUPLING
00246         ierr = iMOAB_RegisterApplication( "LNDX", &couComm, &cpllnd,
00247                                           cplLndPID );  // lnd on coupler pes
00248         CHECKIERR( ierr, "Cannot register LND over coupler PEs" )
00249 #endif
00250     }
00251 
00252     if( atmComm != MPI_COMM_NULL )
00253     {
00254         MPI_Comm_rank( atmComm, &rankInAtmComm );
00255         ierr = iMOAB_RegisterApplication( "ATM1", &atmComm, &cmpatm, cmpAtmPID );
00256         CHECKIERR( ierr, "Cannot register ATM App" )
00257     }
00258 
00259 #ifdef ENABLE_ATMOCN_COUPLING
00260     if( ocnComm != MPI_COMM_NULL )
00261     {
00262         MPI_Comm_rank( ocnComm, &rankInOcnComm );
00263         ierr = iMOAB_RegisterApplication( "OCN1", &ocnComm, &cmpocn, cmpOcnPID );
00264         CHECKIERR( ierr, "Cannot register OCN App" )
00265     }
00266 #endif
00267 
00268     // atm
00269     ierr =
00270         setup_component_coupler_meshes( cmpAtmPID, cmpatm, cplAtmPID, cplatm, &atmComm, &atmPEGroup, &couComm,
00271                                         &couPEGroup, &atmCouComm, atmFilename, readopts, nghlay, repartitioner_scheme );
00272     CHECKIERR( ierr, "Cannot load and migrate atm mesh" )
00273 #ifdef VERBOSE
00274     if( couComm != MPI_COMM_NULL && 1 == n )
00275     {  // write only for n==1 case
00276         char outputFileTgt3[] = "recvAtm.h5m";
00277         ierr                  = iMOAB_WriteMesh( cplAtmPID, outputFileTgt3, fileWriteOptions, strlen( outputFileTgt3 ),
00278                                 strlen( fileWriteOptions ) );
00279         CHECKIERR( ierr, "cannot write atm mesh after receiving" )
00280     }
00281 #endif
00282 #ifdef GRAPH_INFO
00283     if( atmComm != MPI_COMM_NULL )
00284     {
00285 
00286         int is_sender = 1;
00287         int context   = -1;
00288         iMOAB_DumpCommGraph( cmpAtmPID, &context, &is_sender, "AtmMigS" );
00289     }
00290     if( couComm != MPI_COMM_NULL )
00291     {
00292         int is_sender = 0;
00293         int context   = -1;
00294         iMOAB_DumpCommGraph( cplAtmPID, &context, &is_sender, "AtmMigR" );
00295     }
00296 #endif
00297     MPI_Barrier( MPI_COMM_WORLD );
00298 
00299 #ifdef ENABLE_ATMOCN_COUPLING
00300     // ocean
00301     ierr =
00302         setup_component_coupler_meshes( cmpOcnPID, cmpocn, cplOcnPID, cplocn, &ocnComm, &ocnPEGroup, &couComm,
00303                                         &couPEGroup, &ocnCouComm, ocnFilename, readopts, nghlay, repartitioner_scheme );
00304     CHECKIERR( ierr, "Cannot load and migrate ocn mesh" )
00305 
00306     MPI_Barrier( MPI_COMM_WORLD );
00307 
00308 #ifdef VERBOSE
00309     if( couComm != MPI_COMM_NULL && 1 == n )
00310     {  // write only for n==1 case
00311         char outputFileTgt3[] = "recvOcn.h5m";
00312         ierr                  = iMOAB_WriteMesh( cplOcnPID, outputFileTgt3, fileWriteOptions );
00313         CHECKIERR( ierr, "cannot write ocn mesh after receiving" )
00314     }
00315 #endif
00316 #endif  // #ifdef ENABLE_ATMOCN_COUPLING
00317 
00318 #ifdef ENABLE_ATMLND_COUPLING
00319     // land
00320     if( lndComm != MPI_COMM_NULL )
00321     {
00322         ierr = iMOAB_RegisterApplication( "LND1", &lndComm, &cmplnd, cmpLndPID );
00323         CHECKIERR( ierr, "Cannot register LND App " )
00324     }
00325     ierr = setup_component_coupler_meshes( cmpLndPID, cmplnd, cplLndPID, cpllnd, &lndComm, &lndPEGroup, &couComm,
00326                                            &couPEGroup, &lndCouComm, lndFilename, readoptsLnd, nghlay,
00327                                            repartitioner_scheme );
00328 
00329     if( couComm != MPI_COMM_NULL && 1 == n )
00330     {  // write only for n==1 case
00331         char outputFileLnd[] = "recvLnd.h5m";
00332         ierr                 = iMOAB_WriteMesh( cplLndPID, outputFileLnd, fileWriteOptions );
00333         CHECKIERR( ierr, "cannot write lnd mesh after receiving" )
00334     }
00335 
00336 #endif  // #ifdef ENABLE_ATMLND_COUPLING
00337 
00338     MPI_Barrier( MPI_COMM_WORLD );
00339 
00340 #ifdef ENABLE_ATMOCN_COUPLING
00341     if( couComm != MPI_COMM_NULL )
00342     {
00343         // now compute intersection between OCNx and ATMx on coupler PEs
00344         ierr = iMOAB_RegisterApplication( "ATMOCN", &couComm, &atmocnid, cplAtmOcnPID );
00345         CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " )
00346     }
00347 #endif
00348 #ifdef ENABLE_ATMLND_COUPLING
00349     if( couComm != MPI_COMM_NULL )
00350     {
00351         // now compute intersection between LNDx and ATMx on coupler PEs
00352         ierr = iMOAB_RegisterApplication( "ATMLND", &couComm, &atmlndid, cplAtmLndPID );
00353         CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " )
00354     }
00355 #endif
00356 
00357     int disc_orders[3]                       = { 4, 1, 1 };
00358     const std::string weights_identifiers[2] = { "scalar", "scalar-pc" };
00359     const std::string disc_methods[3]        = { "cgll", "fv", "pcloud" };
00360     const std::string dof_tag_names[3]       = { "GLOBAL_DOFS", "GLOBAL_ID", "GLOBAL_ID" };
00361 #ifdef ENABLE_ATMOCN_COUPLING
00362     if( couComm != MPI_COMM_NULL )
00363     {
00364         PUSH_TIMER( "Compute ATM-OCN mesh intersection" )
00365         ierr = iMOAB_ComputeMeshIntersectionOnSphere(
00366             cplAtmPID, cplOcnPID,
00367             cplAtmOcnPID );  // coverage mesh was computed here, for cplAtmPID, atm on coupler pes
00368         // basically, atm was redistributed according to target (ocean) partition, to "cover" the
00369         // ocean partitions check if intx valid, write some h5m intx file
00370         CHECKIERR( ierr, "cannot compute intersection" )
00371         POP_TIMER( couComm, rankInCouComm )
00372 #ifdef VERBOSE
00373         char prefix[] = "intx_atmocn";
00374         ierr          = iMOAB_WriteLocalMesh( cplAtmOcnPID, prefix, strlen( prefix ) );
00375         CHECKIERR( ierr, "failed to write local intx mesh" );
00376 #endif
00377     }
00378 
00379     if( atmCouComm != MPI_COMM_NULL )
00380     {
00381 
00382         // the new graph will be for sending data from atm comp to coverage mesh;
00383         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00384         // results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh
00385         // after this, the sending of tags from atm pes to coupler pes will use the new par comm
00386         // graph, that has more precise info about what to send for ocean cover ; every time, we
00387         // will
00388         //  use the element global id, which should uniquely identify the element
00389         PUSH_TIMER( "Compute OCN coverage graph for ATM mesh" )
00390         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID, &cmpatm, &cplatm,
00391                                     &cplocn );  // it happens over joint communicator
00392         CHECKIERR( ierr, "cannot recompute direct coverage graph for ocean" )
00393         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00394     }
00395 #endif
00396 
00397 #ifdef ENABLE_ATMLND_COUPLING
00398     if( couComm != MPI_COMM_NULL )
00399     {
00400         PUSH_TIMER( "Compute ATM-LND mesh intersection" )
00401         ierr = iMOAB_ComputePointDoFIntersection( cplAtmPID, cplLndPID, cplAtmLndPID );
00402         CHECKIERR( ierr, "failed to compute point-cloud mapping" );
00403         POP_TIMER( couComm, rankInCouComm )
00404     }
00405     if( atmCouComm != MPI_COMM_NULL )
00406     {
00407         // the new graph will be for sending data from atm comp to coverage mesh for land mesh;
00408         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00409         // results are in cplAtmLndPID, intx mesh; remapper also has some info about coverage mesh
00410         // after this, the sending of tags from atm pes to coupler pes will use the new par comm
00411         // graph, that has more precise info about what to send (specifically for land cover); every
00412         // time,
00413         /// we will use the element global id, which should uniquely identify the element
00414         PUSH_TIMER( "Compute LND coverage graph for ATM mesh" )
00415         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmLndPID, &cmpatm, &cplatm,
00416                                     &cpllnd );  // it happens over joint communicator
00417         CHECKIERR( ierr, "cannot recompute direct coverage graph for land" )
00418         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00419     }
00420 #endif
00421 
00422     MPI_Barrier( MPI_COMM_WORLD );
00423 
00424     int fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0, fNoBubble = 1;
00425 
00426 #ifdef ENABLE_ATMOCN_COUPLING
00427 #ifdef VERBOSE
00428     if( couComm != MPI_COMM_NULL && 1 == n )
00429     {                                    // write only for n==1 case
00430         char serialWriteOptions[] = "";  // for writing in serial
00431         std::stringstream outf;
00432         outf << "intxAtmOcn_" << rankInCouComm << ".h5m";
00433         std::string intxfile = outf.str();  // write in serial the intx file, for debugging
00434         ierr                 = iMOAB_WriteMesh( cplAtmOcnPID, intxfile.c_str(), serialWriteOptions );
00435         CHECKIERR( ierr, "cannot write intx file result" )
00436     }
00437 #endif
00438 
00439     if( couComm != MPI_COMM_NULL )
00440     {
00441         PUSH_TIMER( "Compute the projection weights with TempestRemap" )
00442         ierr =
00443             iMOAB_ComputeScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0].c_str(), disc_methods[0].c_str(),
00444                                                   &disc_orders[0], disc_methods[1].c_str(), &disc_orders[1], &fNoBubble,
00445                                                   &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate,
00446                                                   dof_tag_names[0].c_str(), dof_tag_names[1].c_str() );
00447         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00448         POP_TIMER( couComm, rankInCouComm )
00449 
00450         // Let us now write the map file to disk and then read it back to test the I/O API in iMOAB
00451 #ifdef MOAB_HAVE_NETCDF
00452         {
00453             const std::string atmocn_map_file_name = "atm_ocn_map.nc";
00454             ierr = iMOAB_WriteMappingWeightsToFile( cplAtmOcnPID, weights_identifiers[0].c_str(),
00455                                                     atmocn_map_file_name.c_str() );
00456             CHECKIERR( ierr, "failed to write map file to disk" );
00457 
00458             const std::string intx_from_file_identifier = "map-from-file";
00459             ierr = iMOAB_LoadMappingWeightsFromFile( cplAtmOcnPID, intx_from_file_identifier.c_str(),
00460                                                      atmocn_map_file_name.c_str() );
00461             CHECKIERR( ierr, "failed to load map file from disk" );
00462         }
00463 #endif
00464     }
00465 
00466 #endif
00467 
00468     MPI_Barrier( MPI_COMM_WORLD );
00469 
00470 #ifdef ENABLE_ATMLND_COUPLING
00471     if( couComm != MPI_COMM_NULL )
00472     {
00473         /* Compute the weights to preoject the solution from ATM component to LND compoenent */
00474         PUSH_TIMER( "Compute ATM-LND remapping weights" )
00475         ierr =
00476             iMOAB_ComputeScalarProjectionWeights( cplAtmLndPID, weights_identifiers[1].c_str(), disc_methods[0].c_str(),
00477                                                   &disc_orders[0], disc_methods[2].c_str(), &disc_orders[2], &fNoBubble,
00478                                                   &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate,
00479                                                   dof_tag_names[0].c_str(), dof_tag_names[2].c_str() );
00480         CHECKIERR( ierr, "failed to compute remapping projection weights for ATM-LND scalar "
00481                          "non-conservative field" );
00482         POP_TIMER( couComm, rankInCouComm )
00483 
00484         // Let us now write the map file to disk and then read it back to test the I/O API in iMOAB
00485         // VSM: TODO: This does not work since the LND model is a point cloud and we do not initilize
00486         // data correctly in TempestOnlineMap::WriteParallelWeightsToFile routine.
00487         // {
00488         //     const char* atmlnd_map_file_name = "atm_lnd_map.nc";
00489         //     ierr = iMOAB_WriteMappingWeightsToFile( cplAtmLndPID, weights_identifiers[1], atmlnd_map_file_name );
00490         //     CHECKIERR( ierr, "failed to write map file to disk" );
00491 
00492         //     const char* intx_from_file_identifier = "map-from-file";
00493         //     ierr = iMOAB_LoadMappingWeightsFromFile( cplAtmLndPID, intx_from_file_identifier, atmlnd_map_file_name,
00494         //                                              NULL, NULL, NULL,
00495         //                                             );
00496         //     CHECKIERR( ierr, "failed to load map file from disk" );
00497         // }
00498     }
00499 #endif
00500 
00501     int tagIndex[2];
00502     int tagTypes[2]  = { DENSE_DOUBLE, DENSE_DOUBLE };
00503     int atmCompNDoFs = disc_orders[0] * disc_orders[0], ocnCompNDoFs = 1 /*FV*/;
00504 
00505     const char* bottomTempField          = "a2oTbot";
00506     const char* bottomTempProjectedField = "a2oTbot_proj";
00507     // Define more fields
00508     const char* bottomUVelField          = "a2oUbot";
00509     const char* bottomUVelProjectedField = "a2oUbot_proj";
00510     const char* bottomVVelField          = "a2oVbot";
00511     const char* bottomVVelProjectedField = "a2oVbot_proj";
00512 
00513     if( couComm != MPI_COMM_NULL )
00514     {
00515         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00516         CHECKIERR( ierr, "failed to define the field tag a2oTbot" );
00517 #ifdef ENABLE_ATMOCN_COUPLING
00518 
00519         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00520         CHECKIERR( ierr, "failed to define the field tag a2oTbot_proj" );
00521 #endif
00522 
00523         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00524         CHECKIERR( ierr, "failed to define the field tag a2oUbot" );
00525 #ifdef ENABLE_ATMOCN_COUPLING
00526 
00527         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00528         CHECKIERR( ierr, "failed to define the field tag a2oUbot_proj" );
00529 #endif
00530 
00531         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00532         CHECKIERR( ierr, "failed to define the field tag a2oUbot" );
00533 #ifdef ENABLE_ATMOCN_COUPLING
00534         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00535         CHECKIERR( ierr, "failed to define the field tag a2oUbot_proj" );
00536 #endif
00537     }
00538 
00539     // need to make sure that the coverage mesh (created during intx method) received the tag that
00540     // need to be projected to target so far, the coverage mesh has only the ids and global dofs;
00541     // need to change the migrate method to accommodate any GLL tag
00542     // now send a tag from original atmosphere (cmpAtmPID) towards migrated coverage mesh
00543     // (cplAtmPID), using the new coverage graph communicator
00544 
00545     // make the tag 0, to check we are actually sending needed data
00546     {
00547         if( cplAtmAppID >= 0 )
00548         {
00549             int nverts[3], nelem[3], nblocks[3], nsbc[3], ndbc[3];
00550             /*
00551              * Each process in the communicator will have access to a local mesh instance, which
00552              * will contain the original cells in the local partition and ghost entities. Number of
00553              * vertices, primary cells, visible blocks, number of sidesets and nodesets boundary
00554              * conditions will be returned in numProcesses 3 arrays, for local, ghost and total
00555              * numbers.
00556              */
00557             ierr = iMOAB_GetMeshInfo( cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc );
00558             CHECKIERR( ierr, "failed to get num primary elems" );
00559             int numAllElem = nelem[2];
00560             std::vector< double > vals;
00561             int storLeng = atmCompNDoFs * numAllElem;
00562             int eetype   = 1;
00563 
00564             vals.resize( storLeng );
00565             for( int k = 0; k < storLeng; k++ )
00566                 vals[k] = 0.;
00567 
00568             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomTempField, &storLeng, &eetype, &vals[0] );
00569             CHECKIERR( ierr, "cannot make tag nul" )
00570             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomUVelField, &storLeng, &eetype, &vals[0] );
00571             CHECKIERR( ierr, "cannot make tag nul" )
00572             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomVVelField, &storLeng, &eetype, &vals[0] );
00573             CHECKIERR( ierr, "cannot make tag nul" )
00574             // set the tag to 0
00575         }
00576     }
00577 
00578     const char* concat_fieldname  = "a2oTbot;a2oUbot;a2oVbot;";
00579     const char* concat_fieldnameT = "a2oTbot_proj;a2oUbot_proj;a2oVbot_proj;";
00580 
00581     // start a virtual loop for number of iterations
00582     for( int iters = 0; iters < n; iters++ )
00583     {
00584 #ifdef ENABLE_ATMOCN_COUPLING
00585         PUSH_TIMER( "Send/receive data from atm component to coupler in ocn context" )
00586         if( atmComm != MPI_COMM_NULL )
00587         {
00588             // as always, use nonblocking sends
00589             // this is for projection to ocean:
00590             ierr = iMOAB_SendElementTag( cmpAtmPID, "a2oTbot;a2oUbot;a2oVbot;", &atmCouComm, &cplocn );
00591             CHECKIERR( ierr, "cannot send tag values" )
00592 #ifdef GRAPH_INFO
00593             int is_sender = 1;
00594             int context   = cplocn;
00595             iMOAB_DumpCommGraph( cmpAtmPID, &context, &is_sender, "AtmCovOcnS" );
00596 #endif
00597         }
00598         if( couComm != MPI_COMM_NULL )
00599         {
00600             // receive on atm on coupler pes, that was redistributed according to coverage
00601             ierr = iMOAB_ReceiveElementTag( cplAtmPID, "a2oTbot;a2oUbot;a2oVbot;", &atmCouComm, &cplocn );
00602             CHECKIERR( ierr, "cannot receive tag values" )
00603 #ifdef GRAPH_INFO
00604             int is_sender = 0;
00605             int context   = cplocn;  // the same context, cplocn
00606             iMOAB_DumpCommGraph( cmpAtmPID, &context, &is_sender, "AtmCovOcnR" );
00607 #endif
00608         }
00609         POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00610 
00611         // we can now free the sender buffers
00612         if( atmComm != MPI_COMM_NULL )
00613         {
00614             ierr = iMOAB_FreeSenderBuffers( cmpAtmPID, &cplocn );  // context is for ocean
00615             CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh" )
00616         }
00617 #ifdef VERBOSE
00618         if( couComm != MPI_COMM_NULL && 1 == n )
00619         {
00620             // write only for n==1 case
00621             char outputFileRecvd[] = "recvAtmCoupOcn.h5m";
00622             ierr                   = iMOAB_WriteMesh( cplAtmPID, outputFileRecvd, fileWriteOptions );
00623             CHECKIERR( ierr, "could not write recvAtmCoupOcn.h5m to disk" )
00624         }
00625 #endif
00626 
00627         if( couComm != MPI_COMM_NULL )
00628         {
00629             /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00630                on the source mesh and get the projection on the target mesh */
00631             PUSH_TIMER( "Apply Scalar projection weights" )
00632             ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0].c_str(), concat_fieldname,
00633                                                        concat_fieldnameT );
00634             CHECKIERR( ierr, "failed to compute projection weight application" );
00635             POP_TIMER( couComm, rankInCouComm )
00636             if( 1 == n )  // write only for n==1 case
00637             {
00638                 char outputFileTgt[] = "fOcnOnCpl.h5m";
00639                 ierr                 = iMOAB_WriteMesh( cplOcnPID, outputFileTgt, fileWriteOptions );
00640                 CHECKIERR( ierr, "could not write fOcnOnCpl.h5m to disk" )
00641             }
00642         }
00643 
00644         // send the projected tag back to ocean pes, with send/receive tag
00645         if( ocnComm != MPI_COMM_NULL )
00646         {
00647             int tagIndexIn2;
00648             ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs,
00649                                            &tagIndexIn2 );
00650             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00651                              "a2oTbot_proj on ocn pes" );
00652             ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs,
00653                                            &tagIndexIn2 );
00654             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00655                              "a2oUbot_proj on ocn pes" );
00656             ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs,
00657                                            &tagIndexIn2 );
00658             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00659                              "a2oVbot_proj on ocn pes" );
00660         }
00661         // send the tag to ocean pes, from ocean mesh on coupler pes
00662         //   from couComm, using common joint comm ocn_coupler
00663         // as always, use nonblocking sends
00664         // original graph (context is -1_
00665         if( couComm != MPI_COMM_NULL )
00666         {
00667             // need to use ocean comp id for context
00668             context_id = cmpocn;  // id for ocean on comp
00669             ierr =
00670                 iMOAB_SendElementTag( cplOcnPID, "a2oTbot_proj;a2oUbot_proj;a2oVbot_proj;", &ocnCouComm, &context_id );
00671             CHECKIERR( ierr, "cannot send tag values back to ocean pes" )
00672         }
00673 
00674         // receive on component 2, ocean
00675         if( ocnComm != MPI_COMM_NULL )
00676         {
00677             context_id = cplocn;  // id for ocean on coupler
00678             ierr       = iMOAB_ReceiveElementTag( cmpOcnPID, "a2oTbot_proj;a2oUbot_proj;a2oVbot_proj;", &ocnCouComm,
00679                                             &context_id );
00680             CHECKIERR( ierr, "cannot receive tag values from ocean mesh on coupler pes" )
00681         }
00682 
00683         MPI_Barrier( MPI_COMM_WORLD );
00684 
00685         if( couComm != MPI_COMM_NULL )
00686         {
00687             context_id = cmpocn;
00688             ierr       = iMOAB_FreeSenderBuffers( cplOcnPID, &context_id );
00689             CHECKIERR( ierr, "cannot free send/receive buffers for OCN context" )
00690         }
00691         if( ocnComm != MPI_COMM_NULL && 1 == n )  // write only for n==1 case
00692         {
00693             char outputFileOcn[] = "OcnWithProj.h5m";
00694             ierr                 = iMOAB_WriteMesh( cmpOcnPID, outputFileOcn, fileWriteOptions );
00695             CHECKIERR( ierr, "could not write OcnWithProj.h5m to disk" )
00696             // test results only for n == 1, for bottomTempProjectedField
00697             if( !no_regression_test )
00698             {
00699                 // the same as remap test
00700                 // get temp field on ocean, from conservative, the global ids, and dump to the baseline file
00701                 // first get GlobalIds from ocn, and fields:
00702                 int nverts[3], nelem[3];
00703                 ierr = iMOAB_GetMeshInfo( cmpOcnPID, nverts, nelem, 0, 0, 0 );
00704                 CHECKIERR( ierr, "failed to get ocn mesh info" );
00705                 std::vector< int > gidElems;
00706                 gidElems.resize( nelem[2] );
00707                 std::vector< double > tempElems;
00708                 tempElems.resize( nelem[2] );
00709                 // get global id storage
00710                 const std::string GidStr = "GLOBAL_ID";  // hard coded too
00711                 int tag_type = DENSE_INTEGER, ncomp = 1, tagInd = 0;
00712                 ierr = iMOAB_DefineTagStorage( cmpOcnPID, GidStr.c_str(), &tag_type, &ncomp, &tagInd );
00713                 CHECKIERR( ierr, "failed to define global id tag" );
00714 
00715                 int ent_type = 1;
00716                 ierr         = iMOAB_GetIntTagStorage( cmpOcnPID, GidStr.c_str(), &nelem[2], &ent_type, &gidElems[0] );
00717                 CHECKIERR( ierr, "failed to get global ids" );
00718                 ierr = iMOAB_GetDoubleTagStorage( cmpOcnPID, bottomTempProjectedField, &nelem[2], &ent_type,
00719                                                   &tempElems[0] );
00720                 CHECKIERR( ierr, "failed to get temperature field" );
00721                 int err_code = 1;
00722                 check_baseline_file( baseline, gidElems, tempElems, 1.e-9, err_code );
00723                 if( 0 == err_code )
00724                     std::cout << " passed baseline test atm2ocn on ocean task " << rankInOcnComm << "\n";
00725             }
00726         }
00727 #endif
00728 
00729 #ifdef ENABLE_ATMLND_COUPLING
00730         // start land proj:
00731         PUSH_TIMER( "Send/receive data from component atm to coupler, in land context" )
00732         if( atmComm != MPI_COMM_NULL )
00733         {
00734             // as always, use nonblocking sends
00735             // this is for projection to land:
00736             ierr = iMOAB_SendElementTag( cmpAtmPID, "a2oTbot;a2oUbot;a2oVbot;", &atmCouComm, &cpllnd );
00737             CHECKIERR( ierr, "cannot send tag values" )
00738         }
00739         if( couComm != MPI_COMM_NULL )
00740         {
00741             // receive on atm on coupler pes, that was redistributed according to coverage, for land
00742             // context
00743             ierr = iMOAB_ReceiveElementTag( cplAtmPID, "a2oTbot;a2oUbot;a2oVbot;", &atmCouComm, &cpllnd );
00744             CHECKIERR( ierr, "cannot receive tag values" )
00745         }
00746         POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00747 
00748         // we can now free the sender buffers
00749         if( atmComm != MPI_COMM_NULL )
00750         {
00751             ierr = iMOAB_FreeSenderBuffers( cmpAtmPID, &cpllnd );
00752             CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh "
00753                              "for land context" )
00754         }
00755 #ifdef VERBOSE
00756         if( couComm != MPI_COMM_NULL && 1 == n )
00757         {  // write only for n==1 case
00758             char outputFileRecvd[] = "recvAtmCoupLnd.h5m";
00759             ierr                   = iMOAB_WriteMesh( cplAtmPID, outputFileRecvd, fileWriteOptions );
00760             CHECKIERR( ierr, "could not write recvAtmCoupLnd.h5m to disk" )
00761         }
00762 #endif
00763 
00764         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00765            on the source mesh and get the projection on the target mesh */
00766         if( couComm != MPI_COMM_NULL )
00767         {
00768             PUSH_TIMER( "Apply Scalar projection weights for land" )
00769             ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmLndPID, weights_identifiers[1].c_str(), concat_fieldname,
00770                                                        concat_fieldnameT );
00771             CHECKIERR( ierr, "failed to compute projection weight application" );
00772             POP_TIMER( couComm, rankInCouComm )
00773         }
00774 
00775 #ifdef VERBOSE
00776         if( couComm != MPI_COMM_NULL && 1 == n )
00777         {  // write only for n==1 case
00778             char outputFileTgtLnd[] = "fLndOnCpl.h5m";
00779             ierr                    = iMOAB_WriteMesh( cplLndPID, outputFileTgtLnd, fileWriteOptions );
00780             CHECKIERR( ierr, "could not write fLndOnCpl.h5m to disk" )
00781         }
00782 #endif
00783 
00784         // end land proj
00785         // send the tags back to land pes, from land mesh on coupler pes
00786         // send from cplLndPID to cmpLndPID, using common joint comm
00787         // as always, use nonblocking sends
00788         // original graph
00789         // int context_id = -1;
00790         // the land might not have these tags yet; it should be a different name for land
00791         // in e3sm we do have different names
00792         if( lndComm != MPI_COMM_NULL )
00793         {
00794             int tagIndexIn2;
00795             ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs,
00796                                            &tagIndexIn2 );
00797             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00798                              "a2oTbot_proj on lnd pes" );
00799             ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs,
00800                                            &tagIndexIn2 );
00801             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00802                              "a2oUbot_proj on lnd pes" );
00803             ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs,
00804                                            &tagIndexIn2 );
00805             CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00806                              "a2oVbot_proj on lnd pes" );
00807         }
00808         if( couComm != MPI_COMM_NULL )
00809         {
00810             context_id = cmplnd;  // land comp id
00811             ierr =
00812                 iMOAB_SendElementTag( cplLndPID, "a2oTbot_proj;a2oUbot_proj;a2oVbot_proj;", &lndCouComm, &context_id );
00813             CHECKIERR( ierr, "cannot send tag values back to land pes" )
00814         }
00815         // receive on component 3, land
00816         if( lndComm != MPI_COMM_NULL )
00817         {
00818             context_id = cpllnd;  // land on coupler id
00819             ierr       = iMOAB_ReceiveElementTag( cmpLndPID, "a2oTbot_proj;a2oUbot_proj;a2oVbot_proj;", &lndCouComm,
00820                                             &context_id );
00821             CHECKIERR( ierr, "cannot receive tag values from land mesh on coupler pes" )
00822         }
00823 
00824         MPI_Barrier( MPI_COMM_WORLD );
00825         if( couComm != MPI_COMM_NULL )
00826         {
00827             context_id = cmplnd;
00828             ierr       = iMOAB_FreeSenderBuffers( cplLndPID, &context_id );
00829             CHECKIERR( ierr, "cannot free buffers used to resend land tag towards the coverage mesh "
00830                              "for atm context" )
00831         }
00832         if( lndComm != MPI_COMM_NULL && 1 == n )  // write only for n==1 case
00833         {
00834             char outputFileLnd[] = "LndWithProj.h5m";
00835             ierr                 = iMOAB_WriteMesh( cmpLndPID, outputFileLnd, fileWriteOptions );
00836             CHECKIERR( ierr, "could not write LndWithProj.h5m to disk" )
00837         }
00838 #endif  // ENABLE_ATMLND_COUPLING
00839 
00840     }  // end loop iterations n
00841 #ifdef ENABLE_ATMLND_COUPLING
00842     if( lndComm != MPI_COMM_NULL )
00843     {
00844         ierr = iMOAB_DeregisterApplication( cmpLndPID );
00845         CHECKIERR( ierr, "cannot deregister app LND1" )
00846     }
00847 #endif  // ENABLE_ATMLND_COUPLING
00848 
00849 #ifdef ENABLE_ATMOCN_COUPLING
00850     if( couComm != MPI_COMM_NULL )
00851     {
00852         ierr = iMOAB_DeregisterApplication( cplAtmOcnPID );
00853         CHECKIERR( ierr, "cannot deregister app intx AO" )
00854     }
00855     if( ocnComm != MPI_COMM_NULL )
00856     {
00857         ierr = iMOAB_DeregisterApplication( cmpOcnPID );
00858         CHECKIERR( ierr, "cannot deregister app OCN1" )
00859     }
00860 #endif  // ENABLE_ATMOCN_COUPLING
00861 
00862     if( atmComm != MPI_COMM_NULL )
00863     {
00864         ierr = iMOAB_DeregisterApplication( cmpAtmPID );
00865         CHECKIERR( ierr, "cannot deregister app ATM1" )
00866     }
00867 
00868 #ifdef ENABLE_ATMLND_COUPLING
00869     if( couComm != MPI_COMM_NULL )
00870     {
00871         ierr = iMOAB_DeregisterApplication( cplLndPID );
00872         CHECKIERR( ierr, "cannot deregister app LNDX" )
00873     }
00874 #endif  // ENABLE_ATMLND_COUPLING
00875 
00876 #ifdef ENABLE_ATMOCN_COUPLING
00877     if( couComm != MPI_COMM_NULL )
00878     {
00879         ierr = iMOAB_DeregisterApplication( cplOcnPID );
00880         CHECKIERR( ierr, "cannot deregister app OCNX" )
00881     }
00882 #endif  // ENABLE_ATMOCN_COUPLING
00883 
00884     if( couComm != MPI_COMM_NULL )
00885     {
00886         ierr = iMOAB_DeregisterApplication( cplAtmPID );
00887         CHECKIERR( ierr, "cannot deregister app ATMX" )
00888     }
00889 
00890     //#endif
00891     ierr = iMOAB_Finalize();
00892     CHECKIERR( ierr, "did not finalize iMOAB" )
00893 
00894     // free atm coupler group and comm
00895     if( MPI_COMM_NULL != atmCouComm ) MPI_Comm_free( &atmCouComm );
00896     MPI_Group_free( &joinAtmCouGroup );
00897     if( MPI_COMM_NULL != atmComm ) MPI_Comm_free( &atmComm );
00898 
00899 #ifdef ENABLE_ATMOCN_COUPLING
00900     if( MPI_COMM_NULL != ocnComm ) MPI_Comm_free( &ocnComm );
00901     // free ocn - coupler group and comm
00902     if( MPI_COMM_NULL != ocnCouComm ) MPI_Comm_free( &ocnCouComm );
00903     MPI_Group_free( &joinOcnCouGroup );
00904 #endif
00905 
00906 #ifdef ENABLE_ATMLND_COUPLING
00907     if( MPI_COMM_NULL != lndComm ) MPI_Comm_free( &lndComm );
00908     // free land - coupler group and comm
00909     if( MPI_COMM_NULL != lndCouComm ) MPI_Comm_free( &lndCouComm );
00910     MPI_Group_free( &joinLndCouGroup );
00911 #endif
00912 
00913     if( MPI_COMM_NULL != couComm ) MPI_Comm_free( &couComm );
00914 
00915     MPI_Group_free( &atmPEGroup );
00916 #ifdef ENABLE_ATMOCN_COUPLING
00917     MPI_Group_free( &ocnPEGroup );
00918 #endif
00919 #ifdef ENABLE_ATMLND_COUPLING
00920     MPI_Group_free( &lndPEGroup );
00921 #endif
00922     MPI_Group_free( &couPEGroup );
00923     MPI_Group_free( &jgroup );
00924 
00925     MPI_Finalize();
00926 
00927     return 0;
00928 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines