MOAB: Mesh Oriented datABase  (version 5.3.1)
imoab_apg2_ol_coupler.cpp
Go to the documentation of this file.
00001 /**
00002  * imoab_apg2_ol_coupler.cpp
00003  *
00004  *  This imoab_apg2_ol_coupler test will simulate coupling between 3 components
00005  *  meshes will be loaded from 3 files (atm phys + atm pg2, ocean, and land), and
00006  *  Atm and ocn will be migrated to coupler pes and compute intx on them
00007  *  then, intx will be performed between migrated meshes
00008  * and weights will be generated, such that a field from phys atm will be transferred to ocn
00009  * currently, the phys atm will send some data to be projected to ocean
00010  *  similar for land, but land will be intersected too with the atm pg2
00011  *  Land is defined by the domain mesh
00012  *
00013  * first, intersect atm and ocn, and recompute comm graph 1 between atm phys and atm_cx, for ocn intx
00014  * repeat the same for land; for atm/lnd coupling will use similar Fv -Fv maps; maybe later will identify some
00015  * equivalent to bilinear maps
00016  */
00017 
00018 #include "moab/Core.hpp"
00019 #ifndef MOAB_HAVE_MPI
00020 #error imoab coupler test requires MPI configuration
00021 #endif
00022 
00023 // MPI includes
00024 #include "moab_mpi.h"
00025 #include "moab/ParallelComm.hpp"
00026 #include "MBParallelConventions.h"
00027 
00028 #include "moab/iMOAB.h"
00029 #include "TestUtil.hpp"
00030 #include "moab/CpuTimer.hpp"
00031 #include "moab/ProgOptions.hpp"
00032 #include <iostream>
00033 #include <sstream>
00034 
00035 #include "imoab_coupler_utils.hpp"
00036 
00037 using namespace moab;
00038 
00039 // #define VERBOSE
00040 
00041 #ifndef MOAB_HAVE_TEMPESTREMAP
00042 #error The climate coupler test example requires MOAB configuration with TempestRemap
00043 #endif
00044 
00045 #define ENABLE_ATMOCN_COUPLING
00046 // for land coupling with phys atm, we do not have to "compute" intersection
00047 // it is enough to know that the ids are the same, we can project from atm to land that way, using a computed graph
00048 #define ENABLE_ATMLND_COUPLING
00049 
00050 int main( int argc, char* argv[] )
00051 {
00052     int ierr;
00053     int rankInGlobalComm, numProcesses;
00054     MPI_Group jgroup;
00055     std::string readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" );
00056     std::string readoptsPhysAtm( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" );
00057 
00058     /*if (argc < 2)
00059       return 0; // no test; we will force naming the land domain file*/
00060     // Timer data
00061     moab::CpuTimer timer;
00062     double timer_ops;
00063     std::string opName;
00064 
00065     int repartitioner_scheme = 0;
00066 #ifdef MOAB_HAVE_ZOLTAN
00067     repartitioner_scheme = 2;  // use the geometry partitioner in that case
00068 #endif
00069 
00070     MPI_Init( &argc, &argv );
00071     MPI_Comm_rank( MPI_COMM_WORLD, &rankInGlobalComm );
00072     MPI_Comm_size( MPI_COMM_WORLD, &numProcesses );
00073 
00074     MPI_Comm_group( MPI_COMM_WORLD, &jgroup );  // all processes in jgroup
00075 
00076     std::string atmFilename =
00077         "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/ne4pg2_p8.h5m";  // we should use only mesh from here
00078     std::string atmPhysMesh =
00079         "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/AtmPhys_pg2.h5m";  // it has some data associated to vertices, T_ph,
00080                                                                      // u_ph, v_ph
00081     // we will eventually project that data to ocean mesh, after intx atm/ocn
00082 
00083     // on a regular case,  5 ATM, 6 CPLATM (ATMX), 17 OCN     , 18 CPLOCN (OCNX)  ;
00084     // intx atm/ocn is not in e3sm yet, give a number
00085     //   6 * 100 + 18 = 618 : atmocnid
00086     //  18 * 100 + 6  = 1806 : ocnatmid on coupler pes!
00087     // 9 LND, 10 CPLLND
00088     //   6 * 100 + 10 = 610  atmlndid:
00089     //   10 * 100 + 6 = 1006  lndatmid: on coupler pes
00090     // cmpatm is for atm on atm pes
00091     // cmpocn is for ocean, on ocean pe
00092     // cplatm is for atm on coupler pes
00093     // cplocn is for ocean on coupler pes
00094     // atmocnid is for intx atm / ocn on coupler pes
00095     //
00096     int rankInAtmComm = -1;
00097     int cmpatm = 5, cplatm = 6;  // component ids are unique over all pes, and established in advance;
00098     int cmpPhysAtm = 105;        // different from atm spectral ?
00099 #ifdef ENABLE_ATMOCN_COUPLING
00100     std::string ocnFilename = TestDir + "unittest/recMeshOcn.h5m";
00101     int rankInOcnComm       = -1;
00102     int cmpocn = 17, cplocn = 18, atmocnid = 618,
00103         ocnatmid = 1806;  // component ids are unique over all pes, and established in advance;
00104 #endif
00105 #ifdef ENABLE_ATMLND_COUPLING
00106     std::string lndFilename = "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/land_p8.h5m";
00107     int rankInLndComm       = -1;
00108     int cpllnd = 10, cmplnd = 9, atmlndid = 610,
00109         lndatmid = 1006;  // component ids are unique over all pes, and established in advance;
00110 #endif
00111 
00112     int rankInCouComm = -1;
00113 
00114     int nghlay = 0;  // number of ghost layers for loading the file
00115     std::vector< int > groupTasks;
00116     int startG1 = 0, startG2 = 0, endG1 = numProcesses - 1, endG2 = numProcesses - 1, startG3 = startG1, endG3 = endG1;
00117     int startG4 = startG1, endG4 = endG1;  // these are for coupler layout
00118     int context_id = -1;                   // used now for freeing buffers
00119 
00120     // default: load atm on 2 proc, ocean on 2, land on 2; migrate to 2 procs, then compute intx
00121     // later, we need to compute weight matrix with tempestremap
00122 
00123     ProgOptions opts;
00124     opts.addOpt< std::string >( "atmosphere,t", "atm mesh filename (source)", &atmFilename );
00125 #ifdef ENABLE_ATMOCN_COUPLING
00126     opts.addOpt< std::string >( "ocean,m", "ocean mesh filename (target)", &ocnFilename );
00127 #endif
00128 
00129 #ifdef ENABLE_ATMLND_COUPLING
00130     opts.addOpt< std::string >( "land,l", "land mesh filename (target)", &lndFilename );
00131 #endif
00132 
00133     opts.addOpt< std::string >( "physgrid,q", "physics grid file", &atmPhysMesh );
00134 
00135     opts.addOpt< int >( "startAtm,a", "start task for atmosphere layout", &startG1 );
00136     opts.addOpt< int >( "endAtm,b", "end task for atmosphere layout", &endG1 );
00137 #ifdef ENABLE_ATMOCN_COUPLING
00138     opts.addOpt< int >( "startOcn,c", "start task for ocean layout", &startG2 );
00139     opts.addOpt< int >( "endOcn,d", "end task for ocean layout", &endG2 );
00140 #endif
00141 
00142 #ifdef ENABLE_ATMLND_COUPLING
00143     opts.addOpt< int >( "startLnd,e", "start task for land layout", &startG3 );
00144     opts.addOpt< int >( "endLnd,f", "end task for land layout", &endG3 );
00145 #endif
00146 
00147     opts.addOpt< int >( "startCoupler,g", "start task for coupler layout", &startG4 );
00148     opts.addOpt< int >( "endCoupler,j", "end task for coupler layout", &endG4 );
00149 
00150     opts.addOpt< int >( "partitioning,p", "partitioning option for migration", &repartitioner_scheme );
00151 
00152     opts.parseCommandLine( argc, argv );
00153 
00154     char fileWriteOptions[] = "PARALLEL=WRITE_PART";
00155 
00156     if( !rankInGlobalComm )
00157     {
00158         std::cout << " atm file: " << atmFilename << "\n   on tasks : " << startG1 << ":" << endG1 <<
00159 #ifdef ENABLE_ATMOCN_COUPLING
00160             "\n ocn file: " << ocnFilename << "\n     on tasks : " << startG2 << ":" << endG2 <<
00161 #endif
00162 #ifdef ENABLE_ATMLND_COUPLING
00163             "\n land file: " << lndFilename << "\n     on tasks : " << startG3 << ":" << endG3 <<
00164 #endif
00165 
00166             "\n atm phys file: " << atmPhysMesh << "\n     on tasks : " << startG1 << ":" << endG1 <<
00167 
00168             "\n  partitioning (0 trivial, 1 graph, 2 geometry) " << repartitioner_scheme << "\n  ";
00169     }
00170     // load files on 3 different communicators, groups
00171     // first groups has task 0, second group tasks 0 and 1
00172     // coupler will be on joint tasks, will be on a third group (0 and 1, again)
00173     MPI_Group atmPEGroup;
00174     MPI_Comm atmComm;
00175     ierr = create_group_and_comm( startG1, endG1, jgroup, &atmPEGroup, &atmComm );
00176     CHECKIERR( ierr, "Cannot create atm MPI group and communicator " )
00177 
00178 #ifdef ENABLE_ATMOCN_COUPLING
00179     MPI_Group ocnPEGroup;
00180     MPI_Comm ocnComm;
00181     ierr = create_group_and_comm( startG2, endG2, jgroup, &ocnPEGroup, &ocnComm );
00182     CHECKIERR( ierr, "Cannot create ocn MPI group and communicator " )
00183 #endif
00184 
00185 #ifdef ENABLE_ATMLND_COUPLING
00186     MPI_Group lndPEGroup;
00187     MPI_Comm lndComm;
00188     ierr = create_group_and_comm( startG3, endG3, jgroup, &lndPEGroup, &lndComm );
00189     CHECKIERR( ierr, "Cannot create lnd MPI group and communicator " )
00190 #endif
00191 
00192     // we will always have a coupler
00193     MPI_Group couPEGroup;
00194     MPI_Comm couComm;
00195     ierr = create_group_and_comm( startG4, endG4, jgroup, &couPEGroup, &couComm );
00196     CHECKIERR( ierr, "Cannot create cpl MPI group and communicator " )
00197 
00198     // now, create the joint communicators atm_coupler, ocn_coupler, lnd_coupler
00199     // for each, we will have to create the group first, then the communicator
00200 
00201     // atm_coupler
00202     MPI_Group joinAtmCouGroup;
00203     MPI_Comm atmCouComm;
00204     ierr = create_joint_comm_group( atmPEGroup, couPEGroup, &joinAtmCouGroup, &atmCouComm );
00205     CHECKIERR( ierr, "Cannot create joint atm cou communicator" )
00206 
00207 #ifdef ENABLE_ATMOCN_COUPLING
00208     // ocn_coupler
00209     MPI_Group joinOcnCouGroup;
00210     MPI_Comm ocnCouComm;
00211     ierr = create_joint_comm_group( ocnPEGroup, couPEGroup, &joinOcnCouGroup, &ocnCouComm );
00212     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00213 #endif
00214 
00215 #ifdef ENABLE_ATMLND_COUPLING
00216     // lnd_coupler
00217     MPI_Group joinLndCouGroup;
00218     MPI_Comm lndCouComm;
00219     ierr = create_joint_comm_group( lndPEGroup, couPEGroup, &joinLndCouGroup, &lndCouComm );
00220     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00221 #endif
00222 
00223     ierr = iMOAB_Initialize( argc, argv );  // not really needed anything from argc, argv, yet; maybe we should
00224     CHECKIERR( ierr, "Cannot initialize iMOAB" )
00225 
00226     int cmpAtmAppID       = -1;
00227     iMOAB_AppID cmpAtmPID = &cmpAtmAppID;  // atm
00228     int cplAtmAppID       = -1;            // -1 means it is not initialized
00229     iMOAB_AppID cplAtmPID = &cplAtmAppID;  // atm on coupler PEs
00230 #ifdef ENABLE_ATMOCN_COUPLING
00231     int cmpOcnAppID       = -1;
00232     iMOAB_AppID cmpOcnPID = &cmpOcnAppID;                            // ocn
00233     int cplOcnAppID = -1, cplAtmOcnAppID = -1, cplOcnAtmAppID = -1;  // -1 means it is not initialized
00234     iMOAB_AppID cplOcnPID    = &cplOcnAppID;                         // ocn on coupler PEs
00235     iMOAB_AppID cplAtmOcnPID = &cplAtmOcnAppID;                      // intx atm - ocn on coupler PEs
00236     iMOAB_AppID cplOcnAtmPID = &cplOcnAtmAppID;                      // intx ocn - atm on coupler PEs
00237 
00238 #endif
00239 
00240 #ifdef ENABLE_ATMLND_COUPLING
00241     int cmpLndAppID       = -1;
00242     iMOAB_AppID cmpLndPID = &cmpLndAppID;                            // lnd
00243     int cplLndAppID = -1, cplAtmLndAppID = -1, cplLndAtmAppID = -1;  // -1 means it is not initialized
00244     iMOAB_AppID cplLndPID    = &cplLndAppID;                         // land on coupler PEs
00245     iMOAB_AppID cplAtmLndPID = &cplAtmLndAppID;  // intx atm - lnd on coupler PEs will be similar to ocn atm intx
00246 
00247     iMOAB_AppID cplLndAtmPID = &cplLndAtmAppID;
00248 #endif
00249 
00250     int cmpPhysAtmID        = -1;
00251     iMOAB_AppID cmpPhAtmPID = &cmpPhysAtmID;  // phys atm; we do not need to move it to cpl!
00252 
00253     if( couComm != MPI_COMM_NULL )
00254     {
00255         MPI_Comm_rank( couComm, &rankInCouComm );
00256         // Register all the applications on the coupler PEs
00257         ierr = iMOAB_RegisterApplication( "ATMX", &couComm, &cplatm, cplAtmPID );  // atm on coupler pes
00258         CHECKIERR( ierr, "Cannot register ATM over coupler PEs" )
00259 #ifdef ENABLE_ATMOCN_COUPLING
00260         ierr = iMOAB_RegisterApplication( "OCNX", &couComm, &cplocn, cplOcnPID );  // ocn on coupler pes
00261         CHECKIERR( ierr, "Cannot register OCN over coupler PEs" )
00262 #endif
00263 
00264 #ifdef ENABLE_ATMLND_COUPLING
00265         ierr = iMOAB_RegisterApplication( "LNDX", &couComm, &cpllnd, cplLndPID );  // lnd on coupler pes
00266         CHECKIERR( ierr, "Cannot register LND over coupler PEs" )
00267 #endif
00268     }
00269 
00270     if( atmComm != MPI_COMM_NULL )
00271     {
00272         MPI_Comm_rank( atmComm, &rankInAtmComm );
00273         ierr = iMOAB_RegisterApplication( "ATM1", &atmComm, &cmpatm, cmpAtmPID );
00274         CHECKIERR( ierr, "Cannot register ATM App" )
00275     }
00276 
00277 #ifdef ENABLE_ATMOCN_COUPLING
00278     if( ocnComm != MPI_COMM_NULL )
00279     {
00280         MPI_Comm_rank( ocnComm, &rankInOcnComm );
00281         ierr = iMOAB_RegisterApplication( "OCN1", &ocnComm, &cmpocn, cmpOcnPID );
00282         CHECKIERR( ierr, "Cannot register OCN App" )
00283     }
00284 #endif
00285 
00286     // atm
00287     ierr =
00288         setup_component_coupler_meshes( cmpAtmPID, cmpatm, cplAtmPID, cplatm, &atmComm, &atmPEGroup, &couComm,
00289                                         &couPEGroup, &atmCouComm, atmFilename, readopts, nghlay, repartitioner_scheme );
00290     CHECKIERR( ierr, "Cannot load and migrate atm mesh" )
00291 
00292     MPI_Barrier( MPI_COMM_WORLD );
00293 
00294 #ifdef ENABLE_ATMOCN_COUPLING
00295     // ocean
00296     ierr =
00297         setup_component_coupler_meshes( cmpOcnPID, cmpocn, cplOcnPID, cplocn, &ocnComm, &ocnPEGroup, &couComm,
00298                                         &couPEGroup, &ocnCouComm, ocnFilename, readopts, nghlay, repartitioner_scheme );
00299     CHECKIERR( ierr, "Cannot load and migrate ocn mesh" )
00300 
00301     if( couComm != MPI_COMM_NULL )
00302     {
00303         char outputFileTgt3[] = "recvOcn3.h5m";
00304         PUSH_TIMER( "Write migrated OCN mesh on coupler PEs" )
00305         ierr = iMOAB_WriteMesh( cplOcnPID, outputFileTgt3, fileWriteOptions );
00306         CHECKIERR( ierr, "cannot write ocn mesh after receiving" )
00307         POP_TIMER( couComm, rankInCouComm )
00308     }
00309 #endif  // #ifdef ENABLE_ATMOCN_COUPLING
00310 
00311     MPI_Barrier( MPI_COMM_WORLD );
00312     // load phys atm mesh, with some data on it already
00313     if( atmComm != MPI_COMM_NULL )
00314     {
00315 
00316         ierr = iMOAB_RegisterApplication( "PhysAtm", &atmComm, &cmpPhysAtm, cmpPhAtmPID );
00317         CHECKIERR( ierr, "Cannot register Phys Atm App " )
00318 
00319         // load the next component mesh
00320         PUSH_TIMER( "Load Phys Atm  mesh" )
00321         ierr = iMOAB_LoadMesh( cmpPhAtmPID, atmPhysMesh.c_str(), readoptsPhysAtm.c_str(), &nghlay );
00322         CHECKIERR( ierr, "Cannot load Atm Phys  mesh on atm pes" )
00323         POP_TIMER( atmComm, rankInAtmComm )
00324 
00325         int nverts[3], nelem[3];
00326         ierr = iMOAB_GetMeshInfo( cmpPhAtmPID, nverts, nelem, 0, 0, 0 );
00327         CHECKIERR( ierr, "failed to get mesh info" );
00328         printf( "Phys Atm Component Mesh: %d vertices and %d elements\n", nverts[0], nelem[0] );
00329     }
00330 
00331     MPI_Barrier( MPI_COMM_WORLD );
00332 
00333 #ifdef ENABLE_ATMLND_COUPLING
00334     // land
00335     if( lndComm != MPI_COMM_NULL )
00336     {
00337         ierr = iMOAB_RegisterApplication( "LND1", &lndComm, &cmplnd, cmpLndPID );
00338         CHECKIERR( ierr, "Cannot register LND App " )
00339     }
00340     ierr = setup_component_coupler_meshes( cmpLndPID, cmplnd, cplLndPID, cpllnd, &lndComm, &lndPEGroup, &couComm,
00341                                            &couPEGroup, &lndCouComm, lndFilename, readoptsPhysAtm, nghlay,
00342                                            repartitioner_scheme );
00343 
00344     if( couComm != MPI_COMM_NULL )
00345     {  // write only for n==1 case
00346         char outputFileLnd[] = "recvLnd.h5m";
00347         ierr                 = iMOAB_WriteMesh( cplLndPID, outputFileLnd, fileWriteOptions );
00348         CHECKIERR( ierr, "cannot write lnd mesh after receiving" )
00349     }
00350 
00351 #endif  // #ifdef ENABLE_ATMLND_COUPLING
00352 
00353 #ifdef ENABLE_ATMOCN_COUPLING
00354     if( couComm != MPI_COMM_NULL )
00355     {
00356         // now compute intersection between OCNx and ATMx on coupler PEs
00357         ierr = iMOAB_RegisterApplication( "ATMOCN", &couComm, &atmocnid, cplAtmOcnPID );
00358         CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " )
00359 
00360         ierr = iMOAB_RegisterApplication( "OCNATM", &couComm, &ocnatmid, cplOcnAtmPID );
00361         CHECKIERR( ierr, "Cannot register atm_ocn intx over coupler pes " )
00362     }
00363 #endif
00364 
00365 #ifdef ENABLE_ATMLND_COUPLING
00366     if( couComm != MPI_COMM_NULL )
00367     {
00368         // now compute intersection between LNDx and ATMx on coupler PEs
00369         ierr = iMOAB_RegisterApplication( "ATMLND", &couComm, &atmlndid, cplAtmLndPID );
00370         CHECKIERR( ierr, "Cannot register atm_lnd intx over coupler pes " )
00371         // now compute intersection between ATMx and LNDx on coupler PEs
00372         ierr = iMOAB_RegisterApplication( "LNDATM", &couComm, &lndatmid, cplLndAtmPID );
00373         CHECKIERR( ierr, "Cannot register lnd_atm intx over coupler pes " )
00374     }
00375 #endif
00376 
00377     const char* weights_identifiers[2] = { "scalar", "scalar-pc" };
00378     int disc_orders[3]                 = { 4, 1, 1 };
00379     const char* disc_methods[3]        = { "cgll", "fv", "pcloud" };
00380     const char* dof_tag_names[3]       = { "GLOBAL_DOFS", "GLOBAL_ID", "GLOBAL_ID" };
00381 #ifdef ENABLE_ATMOCN_COUPLING
00382     if( couComm != MPI_COMM_NULL )
00383     {
00384         PUSH_TIMER( "Compute ATM-OCN mesh intersection" )
00385         ierr = iMOAB_ComputeMeshIntersectionOnSphere(
00386             cplAtmPID, cplOcnPID, cplAtmOcnPID );  // coverage mesh was computed here, for cplAtmPID, atm on coupler pes
00387         // basically, atm was redistributed according to target (ocean) partition, to "cover" the ocean partitions
00388         // check if intx valid, write some h5m intx file
00389         CHECKIERR( ierr, "cannot compute intersection" )
00390         POP_TIMER( couComm, rankInCouComm )
00391 
00392         PUSH_TIMER( "Compute OCN-ATM mesh intersection" )
00393         ierr =
00394             iMOAB_ComputeMeshIntersectionOnSphere( cplOcnPID, cplAtmPID, cplOcnAtmPID );  // coverage mesh was computed
00395         CHECKIERR( ierr, "cannot compute intersection" )
00396         POP_TIMER( couComm, rankInCouComm )
00397     }
00398 
00399     if( atmCouComm != MPI_COMM_NULL )
00400     {
00401         // the new graph will be for sending data from atm comp to coverage mesh;
00402         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00403         // results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh
00404         // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more
00405         // precise info about what to send for ocean cover ; every time, we will
00406         //  use the element global id, which should uniquely identify the element
00407         PUSH_TIMER( "Compute OCN coverage graph for ATM mesh" )
00408         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID, &cmpatm, &cplatm,
00409                                     &cplocn );  // it happens over joint communicator
00410         CHECKIERR( ierr, "cannot recompute direct coverage graph for ocean" )
00411         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00412     }
00413     if( ocnCouComm != MPI_COMM_NULL )
00414     {
00415         // now for the second intersection, ocn-atm; will be sending data from ocean to atm
00416         // Can we reuse the intx atm-ocn? Not sure yet; we will compute everything again :(
00417         // the new graph will be for sending data from ocn comp to coverage mesh over atm;
00418         // it involves initial ocn app; cmpOcnPID; also migrated ocn mesh on coupler pes, cplOcnPID
00419         // results are in cplOcnAtmPID, intx mesh; remapper also has some info about coverage mesh
00420         // after this, the sending of tags from ocn pes to coupler pes will use the new par comm graph, that has more
00421         // precise info about what to send for atm cover ; every time, we will
00422         //  use the element global id, which should uniquely identify the element
00423         PUSH_TIMER( "Compute ATM coverage graph for OCN mesh" )
00424         ierr = iMOAB_CoverageGraph( &ocnCouComm, cmpOcnPID, cplOcnPID, cplOcnAtmPID, &cmpocn, &cplocn,
00425                                     &cplatm );  // it happens over joint communicator, ocean + coupler
00426         CHECKIERR( ierr, "cannot recompute direct coverage graph for atm" )
00427         POP_TIMER( ocnCouComm, rankInOcnComm )  // hijack this rank
00428     }
00429 
00430     // need to compute graph between phys atm and atm/ocn intx coverage
00431     if( atmCouComm != MPI_COMM_NULL )
00432     {
00433         int typeA = 2;  // point cloud, phys mesh
00434         int typeB = 3;  // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ?
00435         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmOcnPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00436                                        &cmpatm, &atmocnid );
00437         CHECKIERR( ierr, "cannot compute graph between phys grid on atm and intx between FV atm and ocn" )
00438     }
00439 
00440     // also
00441     // need to compute graph between ocn/atm intx and phys atm mesh
00442     /*if( atmCouComm != MPI_COMM_NULL )
00443     {
00444         int typeA = 3;  // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ?
00445         int typeB = 2;  // point cloud, phys mesh
00446         ierr = iMOAB_ComputeCommGraph( cplOcnAtmPID, cmpPhAtmPID, &atmCouComm, &couPEGroup, &atmPEGroup, &typeA, &typeB,
00447                                        &ocnatmid, &cmpatm );
00448     }*/
00449     // need to compute graph between atm on coupler and phys atm mesh on component
00450     if( atmCouComm != MPI_COMM_NULL )
00451     {
00452         int typeA = 2;  // point cloud, phys mesh
00453         int typeB = 3;  // cells of atmosphere, dof based; need another type for ParCommGraph graphtype ?
00454         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00455                                        &cmpatm, &cplatm );
00456         CHECKIERR( ierr, "cannot compute graph between phys grid on atm and FV atm on coupler" )
00457     }
00458 #endif
00459 
00460 #ifdef ENABLE_ATMLND_COUPLING
00461     if( couComm != MPI_COMM_NULL )
00462     {
00463         PUSH_TIMER( "Compute ATM-LND mesh intersection" )
00464         ierr = iMOAB_ComputeMeshIntersectionOnSphere( cplAtmPID, cplLndPID, cplAtmLndPID );
00465         CHECKIERR( ierr, "failed to compute atm - land intx for mapping" );
00466         POP_TIMER( couComm, rankInCouComm )
00467 
00468         PUSH_TIMER( "Compute LND-ATM mesh intersection" )
00469         ierr =
00470             iMOAB_ComputeMeshIntersectionOnSphere( cplLndPID, cplAtmPID, cplLndAtmPID );  // coverage mesh was computed
00471         CHECKIERR( ierr, "cannot compute intersection" )
00472         POP_TIMER( couComm, rankInCouComm )
00473     }
00474     if( atmCouComm != MPI_COMM_NULL )
00475     {
00476         // the new graph will be for sending data from atm comp to coverage mesh for land mesh;
00477         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00478         // results are in cplAtmLndPID, intx mesh; remapper also has some info about coverage mesh
00479         // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more
00480         // precise info about what to send (specifically for land cover); every time,
00481         /// we will use the element global id, which should uniquely identify the element
00482         PUSH_TIMER( "Compute LND coverage graph for ATM mesh" )
00483         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmLndPID, &cmpatm, &cplatm,
00484                                     &cpllnd );  // it happens over joint communicator
00485         CHECKIERR( ierr, "cannot recompute direct coverage graph for land" )
00486         POP_TIMER( atmCouComm, rankInAtmComm )  // hijack this rank
00487     }
00488 
00489     // we will compute comm graph between atm phys and land, directly;
00490     // on the new TriGrid workflow, we do need intersection between atm and land;
00491     if( atmCouComm != MPI_COMM_NULL )
00492     {
00493         int typeA = 2;  // point cloud
00494         int typeB = 3;  // type 3 for land on coupler, based on global ids for land cells ?
00495         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmLndPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00496                                        &cmpatm, &atmlndid );
00497         CHECKIERR( ierr, "cannot compute comm graph between atm and atm/lnd intersection" )
00498     }
00499 
00500     // for reverse direction, lnd - atm intx:
00501     if( lndCouComm != MPI_COMM_NULL )
00502     {
00503         // now for the second intersection, lnd-atm; will be sending data from lnd to atm
00504         // Can we reuse the intx atm-lnd? Not sure yet; we will compute everything again :(
00505         // the new graph will be for sending data from lnd comp to coverage mesh over atm;
00506         // it involves initial lnd app; cmpLndPID; also migrated lnd mesh on coupler pes, cplLndPID
00507         // results are in cplLndAtmPID, intx mesh; remapper also has some info about coverage mesh
00508         // after this, the sending of tags from lnd pes to coupler pes will use the new par comm graph, that has more
00509         // precise info about what to send for atm cover ; every time, we will
00510         //  use the element global id, which should uniquely identify the element
00511         PUSH_TIMER( "Compute ATM coverage graph for LND mesh" )
00512         ierr = iMOAB_CoverageGraph( &lndCouComm, cmpLndPID, cplLndPID, cplLndAtmPID, &cmplnd, &cpllnd,
00513                                     &cplatm );  // it happens over joint communicator, ocean + coupler
00514         CHECKIERR( ierr, "cannot recompute direct coverage graph for atm for intx with land" )
00515         POP_TIMER( lndCouComm, rankInLndComm )
00516     }
00517 #endif
00518     MPI_Barrier( MPI_COMM_WORLD );
00519 
00520     int fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0, fNoBubble = 1;
00521 
00522 #ifdef ENABLE_ATMOCN_COUPLING
00523 #ifdef VERBOSE
00524     if( couComm != MPI_COMM_NULL )
00525     {
00526         char serialWriteOptions[] = "";  // for writing in serial
00527         std::stringstream outf;
00528         outf << "intxAtmOcn_" << rankInCouComm << ".h5m";
00529         std::string intxfile = outf.str();  // write in serial the intx file, for debugging
00530         ierr                 = iMOAB_WriteMesh( cplAtmOcnPID, intxfile.c_str(), serialWriteOptions );
00531         CHECKIERR( ierr, "cannot write intx file result" )
00532     }
00533 #endif
00534 
00535     if( couComm != MPI_COMM_NULL )
00536     {
00537         PUSH_TIMER( "Compute the projection weights with TempestRemap" )
00538         ierr = iMOAB_ComputeScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], disc_methods[1],
00539                                                      &disc_orders[1],                   // fv
00540                                                      disc_methods[1], &disc_orders[1],  // fv
00541                                                      &fNoBubble, &fMonotoneTypeID, &fVolumetric, &fNoConserve,
00542                                                      &fValidate, dof_tag_names[1], dof_tag_names[1] );
00543         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00544         POP_TIMER( couComm, rankInCouComm )
00545     }
00546 
00547     // now compute weight maps for ocn to atm mapping;
00548     if( couComm != MPI_COMM_NULL )
00549     {
00550         PUSH_TIMER( "Compute the projection weights with TempestRemap for ocn - atm map" )
00551         ierr = iMOAB_ComputeScalarProjectionWeights( cplOcnAtmPID, weights_identifiers[0], disc_methods[1],
00552                                                      &disc_orders[1],                   // fv
00553                                                      disc_methods[1], &disc_orders[1],  // fv
00554                                                      &fNoBubble, &fMonotoneTypeID, &fVolumetric, &fNoConserve,
00555                                                      &fValidate, dof_tag_names[1], dof_tag_names[1] );
00556         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00557         POP_TIMER( couComm, rankInCouComm )
00558     }
00559 
00560 #endif
00561 
00562     MPI_Barrier( MPI_COMM_WORLD );
00563 
00564     // we do not need to compute weights ? just send tags between dofs ?
00565 
00566 #ifdef ENABLE_ATMLND_COUPLING
00567     if( couComm != MPI_COMM_NULL )
00568     {
00569         // Compute the weights to project the solution from ATM component to LND component
00570         PUSH_TIMER( "Compute ATM-LND remapping weights" )
00571         ierr = iMOAB_ComputeScalarProjectionWeights( cplAtmLndPID, weights_identifiers[0], disc_methods[1],
00572                                                      &disc_orders[1], disc_methods[1], &disc_orders[1], &fNoBubble,
00573                                                      &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate,
00574                                                      dof_tag_names[1], dof_tag_names[1] );
00575         CHECKIERR( ierr, "failed to compute remapping projection weights for ATM-LND scalar non-conservative field" );
00576         POP_TIMER( couComm, rankInCouComm )
00577 
00578         // Compute the weights to project the solution from LND component to ATM component
00579         PUSH_TIMER( "Compute LND-ATM remapping weights" )
00580         ierr = iMOAB_ComputeScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], disc_methods[1],
00581                                                      &disc_orders[1], disc_methods[1], &disc_orders[1], &fNoBubble,
00582                                                      &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate,
00583                                                      dof_tag_names[1], dof_tag_names[1] );
00584         CHECKIERR( ierr, "failed to compute remapping projection weights for LND-ATM scalar non-conservative field" );
00585         POP_TIMER( couComm, rankInCouComm )
00586     }
00587 #endif
00588 
00589     int tagIndex[2];
00590     int tagTypes[2]  = { DENSE_DOUBLE, DENSE_DOUBLE };
00591     int atmCompNDoFs = 1 /* FV disc_orders[0]*disc_orders[0] */, ocnCompNDoFs = 1 /*FV*/;
00592 
00593     const char* bottomTempField          = "T_ph";  // same as on phys atm mesh
00594     const char* bottomTempProjectedField = "T_proj";
00595     // Define more fields
00596     const char* bottomUVelField          = "u_ph";
00597     const char* bottomUVelProjectedField = "u_proj";
00598     const char* bottomVVelField          = "v_ph";
00599     const char* bottomVVelProjectedField = "v_proj";
00600 
00601     // coming from ocn to atm, project back from T_proj
00602     const char* bottomTempField2 = "T2_ph";  // same as on phys atm mesh
00603     const char* bottomUVelField2 = "u2_ph";
00604     const char* bottomVVelField2 = "v2_ph";
00605 
00606     // coming from lnd to atm, project back from T_proj
00607     const char* bottomTempField3 = "T3_ph";  // same as on phys atm mesh
00608     const char* bottomUVelField3 = "u3_ph";
00609     const char* bottomVVelField3 = "v3_ph";
00610 
00611     // tags on phys grid atm mesh
00612     const char* bottomTempPhProjectedField = "Tph_proj";
00613     const char* bottomUVelPhProjectedField = "uph_proj";
00614     const char* bottomVVelPhProjectedField = "vph_proj";
00615 
00616     // tags on phys grid atm mesh
00617     const char* bottomTempPhLndProjectedField = "TphL_proj";  // L and Lnd signify the fields projected from land
00618     const char* bottomUVelPhLndProjectedField = "uphL_proj";
00619     const char* bottomVVelPhLndProjectedField = "vphL_proj";
00620 
00621     if( couComm != MPI_COMM_NULL )
00622     {
00623         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00624         CHECKIERR( ierr, "failed to define the field tag T_ph" );
00625 #ifdef ENABLE_ATMOCN_COUPLING
00626 
00627         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00628         CHECKIERR( ierr, "failed to define the field tag T_proj" );
00629 #endif
00630 
00631         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00632         CHECKIERR( ierr, "failed to define the field tag u_ph" );
00633 #ifdef ENABLE_ATMOCN_COUPLING
00634 
00635         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00636         CHECKIERR( ierr, "failed to define the field tag u_proj" );
00637 #endif
00638 
00639         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00640         CHECKIERR( ierr, "failed to define the field tag v_ph" );
00641 #ifdef ENABLE_ATMOCN_COUPLING
00642         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00643         CHECKIERR( ierr, "failed to define the field tag v_proj" );
00644 #endif
00645 
00646 #ifdef ENABLE_ATMLND_COUPLING
00647         // need to define tag storage for land; will use the same T_proj, u_proj, v_proj name, because it will be
00648         // used to send between point clouds !
00649         // use the same ndof and same size as ocnCompNDoFs (1) !!
00650         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00651         CHECKIERR( ierr, "failed to define the field tag T_proj" );
00652         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00653         CHECKIERR( ierr, "failed to define the field tag u_proj" );
00654         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1] );
00655         CHECKIERR( ierr, "failed to define the field tag v_proj" );
00656 #endif
00657     }
00658     // need to make sure that the coverage mesh (created during intx method) received the tag that need to be projected
00659     // to target so far, the coverage mesh has only the ids and global dofs; need to change the migrate method to
00660     // accommodate any GLL tag now send a tag from original atmosphere (cmpAtmPID) towards migrated coverage mesh
00661     // (cplAtmPID), using the new coverage graph communicator
00662 
00663     // make the tag 0, to check we are actually sending needed data
00664     {
00665         if( cplAtmAppID >= 0 )
00666         {
00667             int nverts[3], nelem[3], nblocks[3], nsbc[3], ndbc[3];
00668             /*
00669              * Each process in the communicator will have access to a local mesh instance, which will contain the
00670              * original cells in the local partition and ghost entities. Number of vertices, primary cells, visible
00671              * blocks, number of sidesets and nodesets boundary conditions will be returned in numProcesses 3 arrays,
00672              * for local, ghost and total numbers.
00673              */
00674             ierr = iMOAB_GetMeshInfo( cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc );
00675             CHECKIERR( ierr, "failed to get num primary elems" );
00676             int numAllElem = nelem[2];
00677             std::vector< double > vals;
00678             int storLeng = atmCompNDoFs * numAllElem;
00679             vals.resize( storLeng );
00680             for( int k = 0; k < storLeng; k++ )
00681                 vals[k] = 0.;
00682             int eetype = 1;
00683             ierr       = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomTempField, &storLeng, &eetype, &vals[0] );
00684             CHECKIERR( ierr, "cannot make tag nul" )
00685             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomUVelField, &storLeng, &eetype, &vals[0] );
00686             CHECKIERR( ierr, "cannot make tag nul" )
00687             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomVVelField, &storLeng, &eetype, &vals[0] );
00688             CHECKIERR( ierr, "cannot make tag nul" )
00689             // set the tag to 0
00690         }
00691     }
00692 
00693 #ifdef ENABLE_ATMOCN_COUPLING
00694     PUSH_TIMER( "Send/receive data from atm component to coupler in ocn context" )
00695     if( atmComm != MPI_COMM_NULL )
00696     {
00697         // as always, use nonblocking sends
00698         // this is for projection to ocean:
00699         ierr = iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &atmocnid );
00700         CHECKIERR( ierr, "cannot send tag values" )
00701     }
00702     if( couComm != MPI_COMM_NULL )
00703     {
00704         // receive on atm on coupler pes, that was redistributed according to coverage
00705         ierr = iMOAB_ReceiveElementTag( cplAtmOcnPID, "T_ph;u_ph;v_ph;", &atmCouComm, &cmpatm );
00706         CHECKIERR( ierr, "cannot receive tag values" )
00707     }
00708     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00709 
00710     // we can now free the sender buffers
00711     if( atmComm != MPI_COMM_NULL )
00712     {
00713         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmocnid );  // context is for ocean
00714         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh" )
00715     }
00716     /*
00717     #ifdef VERBOSE
00718       if (couComm != MPI_COMM_NULL) {
00719         char outputFileRecvd[] = "recvAtmCoupOcn.h5m";
00720         ierr = iMOAB_WriteMesh(cplAtmPID, outputFileRecvd, fileWriteOptions );
00721       }
00722     #endif
00723     */
00724 
00725     if( couComm != MPI_COMM_NULL )
00726     {
00727         const char* concat_fieldname  = "T_ph;u_ph;v_ph;";
00728         const char* concat_fieldnameT = "T_proj;u_proj;v_proj;";
00729 
00730         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00731            on the source mesh and get the projection on the target mesh */
00732         PUSH_TIMER( "Apply Scalar projection weights" )
00733         ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], concat_fieldname,
00734                                                    concat_fieldnameT );
00735         CHECKIERR( ierr, "failed to compute projection weight application" );
00736         POP_TIMER( couComm, rankInCouComm )
00737 
00738         char outputFileTgt[] = "fOcnOnCpl3.h5m";
00739         ierr                 = iMOAB_WriteMesh( cplOcnPID, outputFileTgt, fileWriteOptions );
00740         CHECKIERR( ierr, "failed to write fOcnOnCpl3.h5m " );
00741     }
00742     // send the projected tag back to ocean pes, with send/receive tag
00743     if( ocnComm != MPI_COMM_NULL )
00744     {
00745         int tagIndexIn2;
00746         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00747         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag T_proj on ocn pes" );
00748         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00749         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag u_proj on ocn pes" );
00750         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00751         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag v_proj on ocn pes" );
00752     }
00753     // send the tag to ocean pes, from ocean mesh on coupler pes
00754     //   from couComm, using common joint comm ocn_coupler
00755     // as always, use nonblocking sends
00756     // original graph (context is -1_
00757     if( couComm != MPI_COMM_NULL )
00758     {
00759         context_id = cmpocn;
00760         ierr       = iMOAB_SendElementTag( cplOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id );
00761         CHECKIERR( ierr, "cannot send tag values back to ocean pes" )
00762     }
00763 
00764     // receive on component 2, ocean
00765     if( ocnComm != MPI_COMM_NULL )
00766     {
00767         context_id = cplocn;
00768         ierr       = iMOAB_ReceiveElementTag( cmpOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id );
00769         CHECKIERR( ierr, "cannot receive tag values from ocean mesh on coupler pes" )
00770     }
00771 
00772     MPI_Barrier( MPI_COMM_WORLD );
00773 
00774     if( couComm != MPI_COMM_NULL )
00775     {
00776         context_id = cmpocn;
00777         ierr       = iMOAB_FreeSenderBuffers( cplOcnPID, &context_id );
00778         CHECKIERR( ierr, "cannot free buffers related to send tag" )
00779     }
00780     if( ocnComm != MPI_COMM_NULL )
00781     {
00782         char outputFileOcn[] = "OcnWithProj3.h5m";
00783         ierr                 = iMOAB_WriteMesh( cmpOcnPID, outputFileOcn, fileWriteOptions );
00784         CHECKIERR( ierr, "cannot write OcnWithProj3.h5m" )
00785     }
00786 #endif
00787 
00788     MPI_Barrier( MPI_COMM_WORLD );
00789 
00790 #ifdef ENABLE_ATMLND_COUPLING
00791     // start land proj:
00792 
00793     // we used this to compute
00794     //  ierr = iMOAB_ComputeCommGraph(cmpPhAtmPID, cplLndPID, &atmCouComm, &atmPEGroup, &couPEGroup,
00795     //     &typeA, &typeB, &cmpatm, &atmlndid);
00796 
00797     // end copy
00798     PUSH_TIMER( "Send/receive data from phys comp atm to coupler land, using computed graph" )
00799     if( atmComm != MPI_COMM_NULL )
00800     {
00801 
00802         // as always, use nonblocking sends
00803         // this is for projection to land:
00804         ierr = iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &atmlndid );
00805         CHECKIERR( ierr, "cannot send tag values towards cpl on land" )
00806     }
00807     if( couComm != MPI_COMM_NULL )
00808     {
00809         // receive on lnd on coupler pes
00810         ierr = iMOAB_ReceiveElementTag( cplAtmLndPID, "T_ph;u_ph;v_ph;", &atmCouComm, &cmpatm );
00811         CHECKIERR( ierr, "cannot receive tag values on land on coupler, for atm coupling" )
00812     }
00813     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00814 
00815     // we can now free the sender buffers
00816     if( atmComm != MPI_COMM_NULL )
00817     {
00818         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmlndid );
00819         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the land on coupler" )
00820     }
00821 
00822     if( couComm != MPI_COMM_NULL )
00823     {
00824         const char* concat_fieldname  = "T_ph;u_ph;v_ph;";
00825         const char* concat_fieldnameT = "T_proj;u_proj;v_proj;";
00826 
00827         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00828            on the source mesh and get the projection on the target mesh */
00829         PUSH_TIMER( "Apply Scalar projection weights" )
00830         ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmLndPID, weights_identifiers[0], concat_fieldname,
00831                                                    concat_fieldnameT );
00832         CHECKIERR( ierr, "failed to compute projection weight application" );
00833         POP_TIMER( couComm, rankInCouComm )
00834 
00835         char outputFileTgt[] = "fLndOnCpl3.h5m";
00836         ierr                 = iMOAB_WriteMesh( cplLndPID, outputFileTgt, fileWriteOptions );
00837         CHECKIERR( ierr, "cannot write land on coupler" )
00838     }
00839 
00840     // end land proj
00841     // send the tags back to land pes, from land mesh on coupler pes
00842     // send from cplLndPID to cmpLndPID, using common joint comm
00843     // as always, use nonblocking sends
00844     // original graph
00845     // int context_id = -1;
00846     // the land might not have these tags yet; it should be a different name for land
00847     // in e3sm we do have different names
00848     if( lndComm != MPI_COMM_NULL )
00849     {
00850         int tagIndexIn2;
00851         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00852         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oTbot_proj on lnd pes" );
00853         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00854         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oUbot_proj on lnd pes" );
00855         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2 );
00856         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oVbot_proj on lnd pes" );
00857     }
00858     if( couComm != MPI_COMM_NULL )
00859     {
00860         context_id = cmplnd;
00861         ierr       = iMOAB_SendElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id );
00862         CHECKIERR( ierr, "cannot send tag values back to land pes" )
00863     }
00864     // receive on component 3, land
00865     if( lndComm != MPI_COMM_NULL )
00866     {
00867         context_id = cpllnd;
00868         ierr       = iMOAB_ReceiveElementTag( cmpLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id );
00869         CHECKIERR( ierr, "cannot receive tag values from land mesh on coupler pes" )
00870     }
00871 
00872     MPI_Barrier( MPI_COMM_WORLD );
00873     if( couComm != MPI_COMM_NULL )
00874     {
00875         context_id = cmplnd;
00876         ierr       = iMOAB_FreeSenderBuffers( cplLndPID, &context_id );
00877         CHECKIERR( ierr, "cannot free buffers related to sending tags from coupler to land pes" )
00878     }
00879     if( lndComm != MPI_COMM_NULL )
00880     {
00881         char outputFileLnd[] = "LndWithProj3.h5m";
00882         ierr                 = iMOAB_WriteMesh( cmpLndPID, outputFileLnd, fileWriteOptions );
00883         CHECKIERR( ierr, "cannot write land file with projection" )
00884     }
00885 
00886     // we have received on ocean component some data projected from atm, on coupler
00887 
00888     // path was T_ph (atm phys) towards atm on FV mesh, we projected, then we sent back to ocean comp
00889 // start copy ocn atm coupling; go reverse direction now !!
00890 #ifdef ENABLE_ATMOCN_COUPLING
00891     PUSH_TIMER( "Send/receive data from ocn component to coupler in atm context" )
00892     if( ocnComm != MPI_COMM_NULL )
00893     {
00894         // as always, use nonblocking sends
00895         // this is for projection to ocean:
00896         ierr = iMOAB_SendElementTag( cmpOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &cplatm );
00897         CHECKIERR( ierr, "cannot send tag values T_proj, etc towards ocn coupler" )
00898     }
00899     if( couComm != MPI_COMM_NULL )
00900     {
00901         // receive on ocn on coupler pes, that was redistributed according to coverage
00902         ierr = iMOAB_ReceiveElementTag( cplOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &cplatm );
00903         CHECKIERR( ierr, "cannot receive tag values" )
00904     }
00905     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00906 
00907     // we can now free the sender buffers
00908     if( ocnComm != MPI_COMM_NULL )
00909     {
00910         ierr = iMOAB_FreeSenderBuffers( cmpOcnPID, &cplatm );  // context is for atm
00911         CHECKIERR( ierr, "cannot free buffers used to send ocn tag towards the coverage mesh for atm" )
00912     }
00913     //#ifdef VERBOSE
00914     if( couComm != MPI_COMM_NULL )
00915     {
00916         // write only for n==1 case
00917         char outputFileRecvd[] = "recvOcnCpl.h5m";
00918         ierr                   = iMOAB_WriteMesh( cplOcnPID, outputFileRecvd, fileWriteOptions );
00919         CHECKIERR( ierr, "could not write recvOcnCplOcn.h5m to disk" )
00920     }
00921     //#endif
00922 
00923     if( couComm != MPI_COMM_NULL )
00924     {
00925         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00926            on the source mesh and get the projection on the target mesh */
00927         PUSH_TIMER( "Apply Scalar projection weights" )
00928         const char* concat_fieldname  = "T_proj;u_proj;v_proj;";  // this is now source tag
00929         const char* concat_fieldnameT = "T2_ph;u2_ph;v2_ph;";     // projected tag on
00930         // make sure the new tags exist on atm coupler mesh;
00931         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00932         CHECKIERR( ierr, "failed to define the field tag T2_ph" );
00933 
00934         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00935         CHECKIERR( ierr, "failed to define the field tag u2_ph" );
00936 
00937         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
00938         CHECKIERR( ierr, "failed to define the field tag v2_ph" );
00939 
00940         ierr = iMOAB_ApplyScalarProjectionWeights( cplOcnAtmPID, weights_identifiers[0], concat_fieldname,
00941                                                    concat_fieldnameT );
00942         CHECKIERR( ierr, "failed to compute projection weight application from ocn to atm " );
00943         POP_TIMER( couComm, rankInCouComm )
00944 
00945         char outputFileTgt[] = "fAtm2OnCpl2.h5m";
00946         ierr                 = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions );
00947         CHECKIERR( ierr, "could not write fAtm2OnCpl.h5m to disk" )
00948     }
00949 
00950     // send the projected tag back to atm pes, with send/receive partial par graph computed
00951     //  from intx ocn/atm towards atm physics mesh !
00952     if( atmComm != MPI_COMM_NULL )
00953     {
00954         int tagIndexIn2;
00955         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomTempPhProjectedField, &tagTypes[1], &atmCompNDoFs,
00956                                        &tagIndexIn2 );
00957         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00958                          "Tph_proj on atm pes" );
00959         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomUVelPhProjectedField, &tagTypes[1], &atmCompNDoFs,
00960                                        &tagIndexIn2 );
00961         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00962                          "uph_proj on atm pes" );
00963         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomVVelPhProjectedField, &tagTypes[1], &atmCompNDoFs,
00964                                        &tagIndexIn2 );
00965         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
00966                          "vph_proj on atm pes" );
00967     }
00968     // send the tag to atm pes, from atm pg2 mesh on coupler pes
00969     //   from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse
00970     // as always, use nonblocking sends
00971     // graph context comes from commgraph ?
00972 
00973     //      ierr = iMOAB_ComputeCommGraph(  cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA,
00974     //      &typeB,
00975     //  &cmpatm, &cplatm );
00976 
00977     if( couComm != MPI_COMM_NULL )
00978     {
00979         context_id = cmpatm;
00980         ierr       = iMOAB_SendElementTag( cplAtmPID, "T2_ph;u2_ph;v2_ph;", &atmCouComm, &context_id );
00981         CHECKIERR( ierr, "cannot send tag values back to atm pes" )
00982     }
00983 
00984     // receive on component atm phys mesh
00985     if( atmComm != MPI_COMM_NULL )
00986     {
00987         context_id = cplatm;
00988         ierr       = iMOAB_ReceiveElementTag( cmpPhAtmPID, "Tph_proj;uph_proj;vph_proj;", &atmCouComm, &context_id );
00989         CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" )
00990     }
00991 
00992     MPI_Barrier( MPI_COMM_WORLD );
00993 
00994     if( couComm != MPI_COMM_NULL )
00995     {
00996         context_id = cmpatm;
00997         ierr       = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id );
00998         CHECKIERR( ierr, "cannot free buffers for sending T2_ph from cpl to phys atm" )
00999     }
01000     if( atmComm != MPI_COMM_NULL )  // write only for n==1 case
01001     {
01002         char outputFileAtmPh[] = "AtmPhysProj.h5m";
01003         ierr                   = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions );
01004         CHECKIERR( ierr, "could not write AtmPhysProj.h5m to disk" )
01005     }
01006 #endif
01007     // end copy need more work for land
01008 // start copy
01009 // lnd atm coupling; go reverse direction now, from lnd to atm , using lnd - atm intx, weight gen
01010 // send back the T_proj , etc , from lan comp to land coupler
01011 #ifdef ENABLE_ATMLND_COUPLING
01012     PUSH_TIMER( "Send/receive data from lnd component to coupler in atm context" )
01013     if( lndComm != MPI_COMM_NULL )
01014     {
01015         // as always, use nonblocking sends
01016         ierr = iMOAB_SendElementTag( cmpLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &cplatm );
01017         CHECKIERR( ierr, "cannot send tag values T_proj, etc towards lnd coupler" )
01018     }
01019     if( couComm != MPI_COMM_NULL )
01020     {
01021         // receive on ocn on coupler pes, that was redistributed according to coverage
01022         ierr = iMOAB_ReceiveElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &cplatm );
01023         CHECKIERR( ierr, "cannot receive tag values" )
01024     }
01025     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
01026 
01027     // we can now free the sender buffers
01028     if( lndComm != MPI_COMM_NULL )
01029     {
01030         ierr = iMOAB_FreeSenderBuffers( cmpLndPID, &cplatm );  // context is for atm
01031         CHECKIERR( ierr, "cannot free buffers used to send lnd tag towards the coverage mesh for atm" )
01032     }
01033     //#ifdef VERBOSE
01034     if( couComm != MPI_COMM_NULL )
01035     {
01036 
01037         char outputFileRecvd[] = "recvLndCpl.h5m";
01038         ierr                   = iMOAB_WriteMesh( cplLndPID, outputFileRecvd, fileWriteOptions );
01039         CHECKIERR( ierr, "could not write recvLndCpl.h5m to disk" )
01040     }
01041     //#endif
01042 
01043     if( couComm != MPI_COMM_NULL )
01044     {
01045         PUSH_TIMER( "Apply Scalar projection weights for lnd - atm coupling" )
01046         const char* concat_fieldname  = "T_proj;u_proj;v_proj;";  // this is now source tag, on land cpl
01047         const char* concat_fieldnameT = "T3_ph;u3_ph;v3_ph;";     // projected tag on
01048         // make sure the new tags exist on atm coupler mesh;
01049         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
01050         CHECKIERR( ierr, "failed to define the field tag T3_ph" );
01051 
01052         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
01053         CHECKIERR( ierr, "failed to define the field tag u3_ph" );
01054 
01055         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0] );
01056         CHECKIERR( ierr, "failed to define the field tag v3_ph" );
01057 
01058         ierr = iMOAB_ApplyScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], concat_fieldname,
01059                                                    concat_fieldnameT );
01060         CHECKIERR( ierr, "failed to compute projection weight application from lnd to atm " );
01061         POP_TIMER( couComm, rankInCouComm )
01062 
01063         char outputFileTgt[] = "fAtm3OnCpl.h5m";  // this is for T3_ph, etc
01064         ierr                 = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions );
01065         CHECKIERR( ierr, "could not write fAtm3OnCpl.h5m to disk" )
01066     }
01067 
01068     // send the projected tag back to atm pes, with send/receive partial par graph computed
01069     //  from intx lnd/atm towards atm physics mesh !
01070     if( atmComm != MPI_COMM_NULL )
01071     {
01072         int tagIndexIn2;
01073         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomTempPhLndProjectedField, &tagTypes[1], &atmCompNDoFs,
01074                                        &tagIndexIn2 );
01075         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
01076                          "TphL_proj on atm pes" );
01077         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomUVelPhLndProjectedField, &tagTypes[1], &atmCompNDoFs,
01078                                        &tagIndexIn2 );
01079         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
01080                          "uphL_proj on atm pes" );
01081         ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomVVelPhLndProjectedField, &tagTypes[1], &atmCompNDoFs,
01082                                        &tagIndexIn2 );
01083         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag "
01084                          "vphL_proj on atm pes" );
01085     }
01086     // send the tag to atm pes, from atm pg2 mesh on coupler pes
01087     //   from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse
01088     // as always, use nonblocking sends
01089     // graph context comes from commgraph ?
01090 
01091     //      ierr = iMOAB_ComputeCommGraph(  cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA,
01092     //      &typeB,
01093     //  &cmpatm, &cplatm );
01094 
01095     if( couComm != MPI_COMM_NULL )
01096     {
01097         context_id = cmpatm;
01098         ierr       = iMOAB_SendElementTag( cplAtmPID, "T3_ph;u3_ph;v3_ph;", &atmCouComm, &context_id );
01099         CHECKIERR( ierr, "cannot send tag values back to atm pes" )
01100     }
01101 
01102     // receive on component atm phys mesh
01103     if( atmComm != MPI_COMM_NULL )
01104     {
01105         context_id = cplatm;
01106         ierr       = iMOAB_ReceiveElementTag( cmpPhAtmPID, "TphL_proj;uphL_proj;vphL_proj;", &atmCouComm, &context_id );
01107         CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" )
01108     }
01109 
01110     MPI_Barrier( MPI_COMM_WORLD );
01111 
01112     if( couComm != MPI_COMM_NULL )
01113     {
01114         context_id = cmpatm;
01115         ierr       = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id );
01116         CHECKIERR( ierr, "cannot free buffers used for sending back atm tags " )
01117     }
01118     if( atmComm != MPI_COMM_NULL )  // write only for n==1 case
01119     {
01120         char outputFileAtmPh[] = "AtmPhysProj3.h5m";
01121         ierr                   = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions );
01122         CHECKIERR( ierr, "could not write AtmPhysProj3.h5m to disk" )
01123     }
01124 #endif
01125     // we could deregister cplLndAtmPID
01126     if( couComm != MPI_COMM_NULL )
01127     {
01128         ierr = iMOAB_DeregisterApplication( cplLndAtmPID );
01129         CHECKIERR( ierr, "cannot deregister app intx LA" )
01130     }
01131 
01132     // we could deregister cplAtmLndPID
01133     if( couComm != MPI_COMM_NULL )
01134     {
01135         ierr = iMOAB_DeregisterApplication( cplAtmLndPID );
01136         CHECKIERR( ierr, "cannot deregister app intx AL" )
01137     }
01138 #endif  // ENABLE_ATMLND_COUPLING
01139 
01140 #ifdef ENABLE_ATMOCN_COUPLING
01141     if( couComm != MPI_COMM_NULL )
01142     {
01143         ierr = iMOAB_DeregisterApplication( cplOcnAtmPID );
01144         CHECKIERR( ierr, "cannot deregister app intx OA" )
01145     }
01146     if( couComm != MPI_COMM_NULL )
01147     {
01148         ierr = iMOAB_DeregisterApplication( cplAtmOcnPID );
01149         CHECKIERR( ierr, "cannot deregister app intx AO" )
01150     }
01151 #endif
01152 
01153 #ifdef ENABLE_ATMLND_COUPLING
01154     if( lndComm != MPI_COMM_NULL )
01155     {
01156         ierr = iMOAB_DeregisterApplication( cmpLndPID );
01157         CHECKIERR( ierr, "cannot deregister app LND1" )
01158     }
01159 #endif  // ENABLE_ATMLND_COUPLING
01160     if( atmComm != MPI_COMM_NULL )
01161     {
01162         ierr = iMOAB_DeregisterApplication( cmpPhAtmPID );
01163         CHECKIERR( ierr, "cannot deregister app PhysAtm " )
01164     }
01165 #ifdef ENABLE_ATMOCN_COUPLING
01166     if( ocnComm != MPI_COMM_NULL )
01167     {
01168         ierr = iMOAB_DeregisterApplication( cmpOcnPID );
01169         CHECKIERR( ierr, "cannot deregister app OCN1" )
01170     }
01171 #endif  // ENABLE_ATMOCN_COUPLING
01172 
01173     if( atmComm != MPI_COMM_NULL )
01174     {
01175         ierr = iMOAB_DeregisterApplication( cmpAtmPID );
01176         CHECKIERR( ierr, "cannot deregister app ATM1" )
01177     }
01178 
01179 #ifdef ENABLE_ATMLND_COUPLING
01180     if( couComm != MPI_COMM_NULL )
01181     {
01182         ierr = iMOAB_DeregisterApplication( cplLndPID );
01183         CHECKIERR( ierr, "cannot deregister app LNDX" )
01184     }
01185 #endif  // ENABLE_ATMLND_COUPLING
01186 
01187 #ifdef ENABLE_ATMOCN_COUPLING
01188     if( couComm != MPI_COMM_NULL )
01189     {
01190         ierr = iMOAB_DeregisterApplication( cplOcnPID );
01191         CHECKIERR( ierr, "cannot deregister app OCNX" )
01192     }
01193 #endif  // ENABLE_ATMOCN_COUPLING
01194 
01195     if( couComm != MPI_COMM_NULL )
01196     {
01197         ierr = iMOAB_DeregisterApplication( cplAtmPID );
01198         CHECKIERR( ierr, "cannot deregister app ATMX" )
01199     }
01200 
01201     //#endif
01202     ierr = iMOAB_Finalize();
01203     CHECKIERR( ierr, "did not finalize iMOAB" )
01204 
01205     // free atm coupler group and comm
01206     if( MPI_COMM_NULL != atmCouComm ) MPI_Comm_free( &atmCouComm );
01207     MPI_Group_free( &joinAtmCouGroup );
01208     if( MPI_COMM_NULL != atmComm ) MPI_Comm_free( &atmComm );
01209 
01210 #ifdef ENABLE_ATMOCN_COUPLING
01211     if( MPI_COMM_NULL != ocnComm ) MPI_Comm_free( &ocnComm );
01212     // free ocn - coupler group and comm
01213     if( MPI_COMM_NULL != ocnCouComm ) MPI_Comm_free( &ocnCouComm );
01214     MPI_Group_free( &joinOcnCouGroup );
01215 #endif
01216 
01217 #ifdef ENABLE_ATMLND_COUPLING
01218     if( MPI_COMM_NULL != lndComm ) MPI_Comm_free( &lndComm );
01219     // free land - coupler group and comm
01220     if( MPI_COMM_NULL != lndCouComm ) MPI_Comm_free( &lndCouComm );
01221     MPI_Group_free( &joinLndCouGroup );
01222 #endif
01223 
01224     if( MPI_COMM_NULL != couComm ) MPI_Comm_free( &couComm );
01225 
01226     MPI_Group_free( &atmPEGroup );
01227 #ifdef ENABLE_ATMOCN_COUPLING
01228     MPI_Group_free( &ocnPEGroup );
01229 #endif
01230 #ifdef ENABLE_ATMLND_COUPLING
01231     MPI_Group_free( &lndPEGroup );
01232 #endif
01233     MPI_Group_free( &couPEGroup );
01234     MPI_Group_free( &jgroup );
01235 
01236     MPI_Finalize();
01237     // endif #if 0
01238 
01239     return 0;
01240 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines