MOAB: Mesh Oriented datABase  (version 5.2.1)
imoab_phatm_ocn_coupler.cpp
Go to the documentation of this file.
00001 /**
00002  * imoab_phAtm_ocn_coupler.cpp
00003  *
00004  *  This imoab_phAtm_ocn_coupler test will simulate coupling between 3 components
00005  *  meshes will be loaded from 3 files (atm, ocean, and phys atm), and
00006  *  Atm and ocn will be migrated to coupler pes and compute intx on them
00007  *  then, intx will be performed between migrated meshes
00008  * and weights will be generated, such that a field from phys atm will be transferred to ocn
00009  * currently, the phys atm will send some data to be projected to ocean
00010  *
00011  * first, intersect atm and ocn, and recompute comm graph 1 between atm phys and atm_cx, for ocn
00012  * intx maybe repeat the same for land
00013  */
00014 
00015 #include "moab/Core.hpp"
00016 #ifndef MOAB_HAVE_MPI
00017 #error mbtempest tool requires MPI configuration
00018 #endif
00019 
00020 // MPI includes
00021 #include "moab_mpi.h"
00022 #include "moab/ParallelComm.hpp"
00023 #include "MBParallelConventions.h"
00024 
00025 #include "moab/iMOAB.h"
00026 #include "TestUtil.hpp"
00027 #include "moab/CpuTimer.hpp"
00028 #include "moab/ProgOptions.hpp"
00029 #include <iostream>
00030 #include <sstream>
00031 
00032 #include "imoab_coupler_utils.hpp"
00033 
00034 using namespace moab;
00035 
00036 //#define VERBOSE
00037 
00038 #ifndef MOAB_HAVE_TEMPESTREMAP
00039 #error The climate coupler test example requires MOAB configuration with TempestRemap
00040 #endif
00041 
00042 #define ENABLE_ATMOCN_COUPLING
00043 // for land coupling with phys atm, we do not have to "compute" intersection
00044 // it is enough to know that the ids are the same, we can project from atm to land that way, using a
00045 // computed graph
00046 #define ENABLE_ATMLND_COUPLING
00047 
00048 int main( int argc, char* argv[] )
00049 {
00050     int ierr;
00051     int rankInGlobalComm, numProcesses;
00052     MPI_Group jgroup;
00053     std::string readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" );
00054     std::string readoptsPhysAtm( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" );
00055 
00056     // Timer data
00057     moab::CpuTimer timer;
00058     double timer_ops;
00059     std::string opName;
00060 
00061     int repartitioner_scheme = 0;
00062 #ifdef MOAB_HAVE_ZOLTAN
00063     repartitioner_scheme = 2;  // use the graph partitioner in that case
00064 #endif
00065 
00066     MPI_Init( &argc, &argv );
00067     MPI_Comm_rank( MPI_COMM_WORLD, &rankInGlobalComm );
00068     MPI_Comm_size( MPI_COMM_WORLD, &numProcesses );
00069 
00070     MPI_Comm_group( MPI_COMM_WORLD, &jgroup );  // all processes in jgroup
00071 
00072     std::string atmFilename = TestDir + "/wholeATM_T.h5m";  // we should use only mesh from here
00073     std::string atmPhysMesh = TestDir + "/AtmPhys_01.h5m";  // it has some data associated to vertices, T_ph, u_ph, v_ph
00074     // we will eventually project that data to ocean mesh, after intx atm/ocn
00075 
00076     // on a regular case,  5 ATM, 6 CPLATM (ATMX), 17 OCN     , 18 CPLOCN (OCNX)  ;
00077     // intx atm/ocn is not in e3sm yet, give a number
00078     //   6 * 100+ 18 = 618 : atmocnid
00079     // 9 LND, 10 CPLLND
00080     //   6 * 100 + 10 = 610  atmlndid:
00081     // cmpatm is for atm on atm pes
00082     // cmpocn is for ocean, on ocean pe
00083     // cplatm is for atm on coupler pes
00084     // cplocn is for ocean on coupler pes
00085     // atmocnid is for intx atm / ocn on coupler pes
00086     //
00087 
00088     int cmpatm        = 5,
00089         cplatm        = 6;    // component ids are unique over all pes, and established in advance;
00090     int cmpPhysAtm    = 105;  // different from atm spectral ?
00091 #ifdef ENABLE_ATMOCN_COUPLING
00092     std::string ocnFilename = TestDir + "/recMeshOcn.h5m";
00093     int rankInOcnComm       = -1;
00094     int cmpocn = 17, cplocn = 18,
00095         atmocnid = 618;  // component ids are unique over all pes, and established in advance;
00096 #endif
00097 #ifdef ENABLE_ATMLND_COUPLING
00098     std::string lndFilename = TestDir + "/wholeLnd.h5m";
00099     int rankInLndComm       = -1;
00100     int cpllnd              = 10,
00101         cmplnd              = 9;  // component ids are unique over all pes, and established in advance;
00102 #endif
00103 
00104     int rankInCouComm = -1;
00105 
00106     int nghlay = 0;  // number of ghost layers for loading the file
00107 
00108     int startG1 = 0, startG2 = 0, endG1 = numProcesses - 1, endG2 = numProcesses - 1, startG3 = startG1, endG3 = endG1;
00109     int startG4 = startG1, endG4 = endG1;  // these are for coupler layout
00110     int context_id = -1;                   // used now for freeing buffers
00111 
00112     // default: load atm on 2 proc, ocean on 2, land on 2; migrate to 2 procs, then compute intx
00113     // later, we need to compute weight matrix with tempestremap
00114 
00115     ProgOptions opts;
00116     opts.addOpt< std::string >( "atmosphere,t", "atm mesh filename (source)", &atmFilename );
00117 #ifdef ENABLE_ATMOCN_COUPLING
00118     opts.addOpt< std::string >( "ocean,m", "ocean mesh filename (target)", &ocnFilename );
00119 #endif
00120 
00121 #ifdef ENABLE_ATMLND_COUPLING
00122     opts.addOpt< std::string >( "land,l", "land mesh filename (target)", &lndFilename );
00123 #endif
00124 
00125     opts.addOpt< std::string >( "physgrid,q", "physics grid file", &atmPhysMesh );
00126 
00127     opts.addOpt< int >( "startAtm,a", "start task for atmosphere layout", &startG1 );
00128     opts.addOpt< int >( "endAtm,b", "end task for atmosphere layout", &endG1 );
00129 #ifdef ENABLE_ATMOCN_COUPLING
00130     opts.addOpt< int >( "startOcn,c", "start task for ocean layout", &startG2 );
00131     opts.addOpt< int >( "endOcn,d", "end task for ocean layout", &endG2 );
00132 #endif
00133 
00134 #ifdef ENABLE_ATMLND_COUPLING
00135     opts.addOpt< int >( "startLnd,e", "start task for land layout", &startG3 );
00136     opts.addOpt< int >( "endLnd,f", "end task for land layout", &endG3 );
00137 #endif
00138 
00139     opts.addOpt< int >( "startCoupler,g", "start task for coupler layout", &startG4 );
00140     opts.addOpt< int >( "endCoupler,j", "end task for coupler layout", &endG4 );
00141 
00142     opts.addOpt< int >( "partitioning,p", "partitioning option for migration", &repartitioner_scheme );
00143 
00144     opts.parseCommandLine( argc, argv );
00145 
00146     char fileWriteOptions[] = "PARALLEL=WRITE_PART";
00147 
00148     if( !rankInGlobalComm )
00149     {
00150         std::cout << " atm file: " << atmFilename << "\n   on tasks : " << startG1 << ":" << endG1 <<
00151 #ifdef ENABLE_ATMOCN_COUPLING
00152             "\n ocn file: " << ocnFilename << "\n     on tasks : " << startG2 << ":" << endG2 <<
00153 #endif
00154 #ifdef ENABLE_ATMLND_COUPLING
00155             "\n land file: " << lndFilename << "\n     on tasks : " << startG3 << ":" << endG3 <<
00156 #endif
00157 
00158             "\n atm phys file: " << atmPhysMesh << "\n     on tasks : " << startG1 << ":" << endG1 <<
00159 
00160             "\n  partitioning (0 trivial, 1 graph, 2 geometry) " << repartitioner_scheme << "\n  ";
00161     }
00162 
00163     // load files on 3 different communicators, groups
00164     // first groups has task 0, second group tasks 0 and 1
00165     // coupler will be on joint tasks, will be on a third group (0 and 1, again)
00166     MPI_Group atmPEGroup;
00167     MPI_Comm atmComm;
00168     ierr = create_group_and_comm(startG1, endG1, jgroup, &atmPEGroup, &atmComm);
00169     CHECKIERR( ierr, "Cannot create atm MPI group and communicator " )
00170 
00171 #ifdef ENABLE_ATMOCN_COUPLING
00172     MPI_Group ocnPEGroup;
00173     MPI_Comm ocnComm;
00174     ierr = create_group_and_comm(startG2, endG2, jgroup, &ocnPEGroup, &ocnComm);
00175     CHECKIERR( ierr, "Cannot create ocn MPI group and communicator " )
00176 #endif
00177 
00178 #ifdef ENABLE_ATMLND_COUPLING
00179     MPI_Group lndPEGroup;
00180     MPI_Comm lndComm;
00181     ierr = create_group_and_comm(startG3, endG3, jgroup, &lndPEGroup, &lndComm);
00182     CHECKIERR( ierr, "Cannot create lnd MPI group and communicator " )
00183 #endif
00184 
00185     // we will always have a coupler
00186     MPI_Group couPEGroup;
00187     MPI_Comm couComm;
00188     ierr = create_group_and_comm(startG4, endG4, jgroup, &couPEGroup, &couComm);
00189     CHECKIERR( ierr, "Cannot create cpl MPI group and communicator " )
00190 
00191     // now, create the joint communicators atm_coupler, ocn_coupler, lnd_coupler
00192     // for each, we will have to create the group first, then the communicator
00193 
00194     // atm_coupler
00195     MPI_Group joinAtmCouGroup;
00196     MPI_Comm atmCouComm;
00197     ierr = create_joint_comm_group(atmPEGroup, couPEGroup,  &joinAtmCouGroup, &atmCouComm);
00198     CHECKIERR( ierr, "Cannot create joint atm cou communicator" )
00199 
00200 #ifdef ENABLE_ATMOCN_COUPLING
00201     // ocn_coupler
00202     MPI_Group joinOcnCouGroup;
00203     MPI_Comm ocnCouComm;
00204     ierr = create_joint_comm_group(ocnPEGroup, couPEGroup,  &joinOcnCouGroup, &ocnCouComm);
00205     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00206 #endif
00207 
00208 #ifdef ENABLE_ATMLND_COUPLING
00209     // lnd_coupler
00210     MPI_Group joinLndCouGroup;
00211     MPI_Comm lndCouComm;
00212     ierr = create_joint_comm_group(lndPEGroup, couPEGroup,  &joinLndCouGroup, &lndCouComm);
00213     CHECKIERR( ierr, "Cannot create joint ocn cou communicator" )
00214 #endif
00215 
00216     ierr = iMOAB_Initialize( argc, argv );  // not really needed anything from argc, argv, yet; maybe we should
00217     CHECKIERR( ierr, "Cannot initialize iMOAB" )
00218 
00219     int cmpAtmAppID       = -1;
00220     iMOAB_AppID cmpAtmPID = &cmpAtmAppID;  // atm
00221     int cplAtmAppID       = -1;            // -1 means it is not initialized
00222     iMOAB_AppID cplAtmPID = &cplAtmAppID;  // atm on coupler PEs
00223 
00224 #ifdef ENABLE_ATMOCN_COUPLING
00225     int cmpOcnAppID       = -1;
00226     iMOAB_AppID cmpOcnPID = &cmpOcnAppID;        // ocn
00227     int cplOcnAppID = -1, cplAtmOcnAppID = -1;   // -1 means it is not initialized
00228     iMOAB_AppID cplOcnPID    = &cplOcnAppID;     // ocn on coupler PEs
00229     iMOAB_AppID cplAtmOcnPID = &cplAtmOcnAppID;  // intx atm -ocn on coupler PEs
00230 #endif
00231 
00232 #ifdef ENABLE_ATMLND_COUPLING
00233     int cmpLndAppID       = -1;
00234     iMOAB_AppID cmpLndPID = &cmpLndAppID;  // lnd
00235     int cplLndAppID       = -1;            //, cplAtmLndAppID=-1;// -1 means it is not initialized
00236     iMOAB_AppID cplLndPID = &cplLndAppID;  // land on coupler PEs
00237     // iMOAB_AppID cplAtmLndPID=&cplAtmLndAppID; // intx atm - lnd on coupler PEs not needed anymore
00238 #endif
00239 
00240     int cmpPhysAtmID        = -1;
00241     iMOAB_AppID cmpPhAtmPID = &cmpPhysAtmID;  // phys atm; we do not need to move it to cpl!
00242 
00243     if( couComm != MPI_COMM_NULL )
00244     {
00245         MPI_Comm_rank( couComm, &rankInCouComm );
00246         // Register all the applications on the coupler PEs
00247         ierr = iMOAB_RegisterApplication( "ATMX", &couComm, &cplatm,
00248                                           cplAtmPID );  // atm on coupler pes
00249         CHECKIERR( ierr, "Cannot register ATM over coupler PEs" )
00250 #ifdef ENABLE_ATMOCN_COUPLING
00251         ierr = iMOAB_RegisterApplication( "OCNX", &couComm, &cplocn,
00252                                           cplOcnPID );  // ocn on coupler pes
00253         CHECKIERR( ierr, "Cannot register OCN over coupler PEs" )
00254 #endif
00255 
00256 #ifdef ENABLE_ATMLND_COUPLING
00257         ierr = iMOAB_RegisterApplication( "LNDX", &couComm, &cpllnd,
00258                                           cplLndPID );  // lnd on coupler pes
00259         CHECKIERR( ierr, "Cannot register LND over coupler PEs" )
00260 #endif
00261     }
00262 
00263     if( atmCouComm != MPI_COMM_NULL )
00264     {
00265         ierr = iMOAB_RegisterApplication( "ATM1", &atmComm, &cmpatm, cmpAtmPID );
00266         CHECKIERR( ierr, "Cannot register ATM App" )
00267     }
00268 
00269 #ifdef ENABLE_ATMOCN_COUPLING
00270     if( ocnComm != MPI_COMM_NULL )
00271     {
00272         MPI_Comm_rank( ocnComm, &rankInOcnComm );
00273         ierr = iMOAB_RegisterApplication( "OCN1", &ocnComm, &cmpocn, cmpOcnPID );
00274         CHECKIERR( ierr, "Cannot register OCN App" )
00275     }
00276 #endif
00277 
00278     //atm
00279     ierr = setup_component_coupler_meshes(cmpAtmPID, cmpatm, cplAtmPID, cplatm, &atmComm, &atmPEGroup, &couComm,
00280              &couPEGroup, &atmCouComm, atmFilename, readopts, nghlay, repartitioner_scheme);
00281     CHECKIERR( ierr, "Cannot load and migrate atm mesh" )
00282 
00283     MPI_Barrier( MPI_COMM_WORLD );
00284 
00285 #ifdef ENABLE_ATMOCN_COUPLING
00286     // ocean
00287     ierr = setup_component_coupler_meshes(cmpOcnPID, cmpocn, cplOcnPID, cplocn, &ocnComm,  &ocnPEGroup, &couComm,
00288              &couPEGroup, &ocnCouComm, ocnFilename, readopts, nghlay, repartitioner_scheme);
00289     CHECKIERR( ierr, "Cannot load and migrate ocn mesh" )
00290 #ifdef VERBOSE
00291     if( couComm != MPI_COMM_NULL )
00292     {
00293         char outputFileTgt3[] = "recvOcn2.h5m";
00294         ierr = iMOAB_WriteMesh( cplOcnPID, outputFileTgt3, fileWriteOptions, strlen( outputFileTgt3 ),
00295                                 strlen( fileWriteOptions ) );
00296         CHECKIERR( ierr, "cannot write ocn mesh after receiving" )
00297     }
00298 #endif
00299 
00300 #endif
00301 
00302     MPI_Barrier( MPI_COMM_WORLD );
00303 
00304     // load phys atm mesh, with some data on it already
00305     if( atmComm != MPI_COMM_NULL )
00306     {
00307         ierr = iMOAB_RegisterApplication( "PhysAtm", &atmComm, &cmpPhysAtm, cmpPhAtmPID );
00308         CHECKIERR( ierr, "Cannot register Phys Atm App " )
00309 
00310         // load the next component mesh
00311         ierr = iMOAB_LoadMesh( cmpPhAtmPID, atmPhysMesh.c_str(), readoptsPhysAtm.c_str(), &nghlay, atmPhysMesh.length(),
00312                                readoptsPhysAtm.length() );
00313         CHECKIERR( ierr, "Cannot load Atm Phys  mesh on atm pes" )
00314 
00315         int nverts[3], nelem[3];
00316         ierr = iMOAB_GetMeshInfo( cmpPhAtmPID, nverts, nelem, 0, 0, 0 );
00317         CHECKIERR( ierr, "failed to get mesh info" );
00318         printf( "Phys Atm Component Mesh: %d vertices and %d elements\n", nverts[0], nelem[0] );
00319     }
00320 
00321     MPI_Barrier( MPI_COMM_WORLD );
00322 
00323 #ifdef ENABLE_ATMLND_COUPLING
00324     // land
00325     if( lndComm != MPI_COMM_NULL )
00326     {
00327         ierr = iMOAB_RegisterApplication( "LND1", &lndComm, &cmplnd, cmpLndPID );
00328         CHECKIERR( ierr, "Cannot register LND App " )
00329     }
00330     ierr = setup_component_coupler_meshes(cmpLndPID, cmplnd, cplLndPID, cpllnd, &lndComm,  &lndPEGroup, &couComm,
00331              &couPEGroup, &lndCouComm, lndFilename, readoptsPhysAtm, nghlay, repartitioner_scheme);
00332     if( couComm != MPI_COMM_NULL )
00333     {
00334         char outputFileLnd[] = "recvLnd2.h5m";
00335 
00336         ierr = iMOAB_WriteMesh( cplLndPID, outputFileLnd, fileWriteOptions, strlen( outputFileLnd ),
00337                                 strlen( fileWriteOptions ) );
00338         CHECKIERR( ierr, "cannot write lnd mesh after receiving" )
00339     }
00340 
00341 #endif  //
00342 
00343     MPI_Barrier( MPI_COMM_WORLD );
00344 
00345 #ifdef ENABLE_ATMOCN_COUPLING
00346     if( couComm != MPI_COMM_NULL )
00347     {
00348         // now compute intersection between OCNx and ATMx on coupler PEs
00349         ierr = iMOAB_RegisterApplication( "ATMOCN", &couComm, &atmocnid, cplAtmOcnPID );
00350         CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " )
00351     }
00352 #endif
00353 
00354 
00355     const char* weights_identifiers[2] = { "scalar", "scalar-pc" };
00356     int disc_orders[3]                 = { 4, 1, 1 };
00357     const char* disc_methods[3]        = { "cgll", "fv", "pcloud" };
00358     const char* dof_tag_names[3]       = { "GLOBAL_DOFS", "GLOBAL_ID", "GLOBAL_ID" };
00359 #ifdef ENABLE_ATMOCN_COUPLING
00360     if( couComm != MPI_COMM_NULL )
00361     {
00362 
00363         ierr = iMOAB_ComputeMeshIntersectionOnSphere(
00364             cplAtmPID, cplOcnPID,
00365             cplAtmOcnPID );  // coverage mesh was computed here, for cplAtmPID, atm on coupler pes
00366         // basically, atm was redistributed according to target (ocean) partition, to "cover" the
00367         // ocean partitions check if intx valid, write some h5m intx file
00368         CHECKIERR( ierr, "cannot compute intersection" )
00369 
00370     }
00371 
00372     if( atmCouComm != MPI_COMM_NULL )
00373     {
00374         // the new graph will be for sending data from atm comp to coverage mesh;
00375         // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID
00376         // results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh
00377         // after this, the sending of tags from atm pes to coupler pes will use the new par comm
00378         // graph, that has more precise info about what to send for ocean cover ; every time, we
00379         // will
00380         //  use the element global id, which should uniquely identify the element
00381 
00382         ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID,
00383                                     &cplocn );  // it happens over joint communicator
00384         CHECKIERR( ierr, "cannot recompute direct coverage graph for ocean" )
00385 
00386     }
00387 #endif
00388     // need to compute graph between phys atm and atm/ocn intx coverage
00389     if( atmCouComm != MPI_COMM_NULL )
00390     {
00391         int typeA = 2;  // point cloud
00392         int typeB = 1;  // quads in coverage set
00393         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmOcnPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00394                                        &cmpatm, &atmocnid );
00395     }
00396 
00397 #ifdef ENABLE_ATMLND_COUPLING
00398     // we will compute comm graph between atm phys and land, directly; we do not need any
00399     // intersection later, we will send data from atm phys to land on coupler; then back to land
00400     // comp;
00401     if( atmCouComm != MPI_COMM_NULL )
00402     {
00403         int typeA = 2;  // point cloud
00404         int typeB = 2;  // point cloud for land on coupler, too
00405         ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplLndPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB,
00406                                        &cmpatm, &cpllnd );
00407     }
00408 #endif
00409     MPI_Barrier( MPI_COMM_WORLD );
00410 
00411     int fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0;
00412 
00413 #ifdef ENABLE_ATMOCN_COUPLING
00414 #ifdef VERBOSE
00415     if( couComm != MPI_COMM_NULL )
00416     {
00417         char serialWriteOptions[] = "";  // for writing in serial
00418         std::stringstream outf;
00419         outf << "intxAtmOcn_" << rankInCouComm << ".h5m";
00420         std::string intxfile = outf.str();  // write in serial the intx file, for debugging
00421         ierr = iMOAB_WriteMesh( cplAtmOcnPID, (char*)intxfile.c_str(), serialWriteOptions, (int)intxfile.length(),
00422                                 strlen( serialWriteOptions ) );
00423         CHECKIERR( ierr, "cannot write intx file result" )
00424     }
00425 #endif
00426 
00427     if( couComm != MPI_COMM_NULL )
00428     {
00429         PUSH_TIMER( "Compute the projection weights with TempestRemap" )
00430         ierr = iMOAB_ComputeScalarProjectionWeights(
00431             cplAtmOcnPID, weights_identifiers[0], disc_methods[0], &disc_orders[0], disc_methods[1], &disc_orders[1],
00432             &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate, dof_tag_names[0], dof_tag_names[1],
00433             strlen( weights_identifiers[0] ), strlen( disc_methods[0] ), strlen( disc_methods[1] ),
00434             strlen( dof_tag_names[0] ), strlen( dof_tag_names[1] ) );
00435         CHECKIERR( ierr, "cannot compute scalar projection weights" )
00436         POP_TIMER( couComm, rankInCouComm )
00437     }
00438 
00439 #endif
00440 
00441     MPI_Barrier( MPI_COMM_WORLD );
00442 
00443     int tagIndex[2];
00444     int tagTypes[2]  = { DENSE_DOUBLE, DENSE_DOUBLE };
00445     int atmCompNDoFs = disc_orders[0] * disc_orders[0], ocnCompNDoFs = 1 /*FV*/;
00446 
00447     const char* bottomTempField          = "T16_ph";
00448     const char* bottomTempProjectedField = "T_proj";
00449     // Define more fields
00450     const char* bottomUVelField          = "u16_ph";
00451     const char* bottomUVelProjectedField = "u_proj";
00452     const char* bottomVVelField          = "v16_ph";
00453     const char* bottomVVelProjectedField = "v_proj";
00454 
00455     if( couComm != MPI_COMM_NULL )
00456     {
00457         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0],
00458                                        strlen( bottomTempField ) );
00459         CHECKIERR( ierr, "failed to define the field tag T16_ph" );
00460 #ifdef ENABLE_ATMOCN_COUPLING
00461 
00462         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00463                                        strlen( bottomTempProjectedField ) );
00464         CHECKIERR( ierr, "failed to define the field tag T_proj" );
00465 #endif
00466 
00467         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0],
00468                                        strlen( bottomUVelField ) );
00469         CHECKIERR( ierr, "failed to define the field tag u16_ph" );
00470 #ifdef ENABLE_ATMOCN_COUPLING
00471 
00472         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00473                                        strlen( bottomUVelProjectedField ) );
00474         CHECKIERR( ierr, "failed to define the field tag u_proj" );
00475 #endif
00476 
00477         ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0],
00478                                        strlen( bottomVVelField ) );
00479         CHECKIERR( ierr, "failed to define the field tag v16_ph" );
00480 #ifdef ENABLE_ATMOCN_COUPLING
00481         ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00482                                        strlen( bottomVVelProjectedField ) );
00483         CHECKIERR( ierr, "failed to define the field tag v_proj" );
00484 #endif
00485 
00486 #ifdef ENABLE_ATMLND_COUPLING
00487         // need to define tag storage for land; will use the same T_proj, u_proj, v_proj name,
00488         // because it will be used to send between point clouds ! use the same ndof and same size as
00489         // ocnCompNDoFs (1) !!
00490         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00491                                        strlen( bottomTempProjectedField ) );
00492         CHECKIERR( ierr, "failed to define the field tag a2oTbot_proj" );
00493         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00494                                        strlen( bottomUVelProjectedField ) );
00495         CHECKIERR( ierr, "failed to define the field tag a2oUbot_proj" );
00496         ierr = iMOAB_DefineTagStorage( cplLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1],
00497                                        strlen( bottomVVelProjectedField ) );
00498         CHECKIERR( ierr, "failed to define the field tag a2oUbot_proj" );
00499 #endif
00500     }
00501     // need to make sure that the coverage mesh (created during intx method) received the tag that
00502     // need to be projected to target so far, the coverage mesh has only the ids and global dofs;
00503     // need to change the migrate method to accommodate any GLL tag
00504     // now send a tag from original atmosphere (cmpAtmPID) towards migrated coverage mesh
00505     // (cplAtmPID), using the new coverage graph communicator
00506 
00507     // make the tag 0, to check we are actually sending needed data
00508     {
00509         if( cplAtmAppID >= 0 )
00510         {
00511             int nverts[3], nelem[3], nblocks[3], nsbc[3], ndbc[3];
00512             /*
00513              * Each process in the communicator will have access to a local mesh instance, which
00514              * will contain the original cells in the local partition and ghost entities. Number of
00515              * vertices, primary cells, visible blocks, number of sidesets and nodesets boundary
00516              * conditions will be returned in numProcesses 3 arrays, for local, ghost and total
00517              * numbers.
00518              */
00519             ierr = iMOAB_GetMeshInfo( cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc );
00520             CHECKIERR( ierr, "failed to get num primary elems" );
00521             int numAllElem = nelem[2];
00522             std::vector< double > vals;
00523             int storLeng = atmCompNDoFs * numAllElem;
00524             vals.resize( storLeng );
00525             for( int k = 0; k < storLeng; k++ )
00526                 vals[k] = 0.;
00527             int eetype = 1;
00528             ierr       = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomTempField, &storLeng, &eetype, &vals[0],
00529                                               strlen( bottomTempField ) );
00530             CHECKIERR( ierr, "cannot make tag nul" )
00531             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomUVelField, &storLeng, &eetype, &vals[0],
00532                                               strlen( bottomUVelField ) );
00533             CHECKIERR( ierr, "cannot make tag nul" )
00534             ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomVVelField, &storLeng, &eetype, &vals[0],
00535                                               strlen( bottomVVelField ) );
00536             CHECKIERR( ierr, "cannot make tag nul" )
00537             // set the tag to 0
00538         }
00539     }
00540 
00541 #ifdef ENABLE_ATMOCN_COUPLING
00542     PUSH_TIMER( "Send/receive data from atm component to coupler in ocn context" )
00543     if( atmComm != MPI_COMM_NULL )
00544     {
00545         // as always, use nonblocking sends
00546         // this is for projection to ocean:
00547         ierr =
00548             iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &atmocnid, strlen( "T_ph;u_ph;v_ph;" ) );
00549         CHECKIERR( ierr, "cannot send tag values" )
00550 #ifdef VERBOSE
00551         int is_sender = 1;
00552         int context   = atmocnid;  // used to identity the parcommgraph in charge
00553         iMOAB_DumpCommGraph( cmpPhAtmPID, &context, &is_sender, "PhysAtmA2OS", strlen( "PhysAtmA2OS" ) );
00554 #endif
00555     }
00556     if( couComm != MPI_COMM_NULL )
00557     {
00558         // receive on atm on coupler pes, that was redistributed according to coverage
00559         ierr = iMOAB_ReceiveElementTag( cplAtmOcnPID, "T16_ph;u16_ph;v16_ph;", &atmCouComm, &cmpatm,
00560                                         strlen( "T16_ph;u16_ph;v16_ph;" ) );
00561         CHECKIERR( ierr, "cannot receive tag values" )
00562 #ifdef VERBOSE
00563         int is_sender = 0;
00564         int context   = cmpatm;  // used to identity the parcommgraph in charge
00565         iMOAB_DumpCommGraph( cplAtmOcnPID, &context, &is_sender, "PhysAtmA2OR", strlen( "PhysAtmA2OR" ) );
00566 #endif
00567     }
00568     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00569 
00570     // we can now free the sender buffers
00571     if( atmComm != MPI_COMM_NULL )
00572     {
00573         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmocnid );  // context is for ocean
00574         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh" )
00575     }
00576     /*
00577     #ifdef VERBOSE
00578       if (couComm != MPI_COMM_NULL) {
00579         char outputFileRecvd[] = "recvAtmCoupOcn.h5m";
00580         ierr = iMOAB_WriteMesh(cplAtmPID, outputFileRecvd, fileWriteOptions,
00581             strlen(outputFileRecvd), strlen(fileWriteOptions) );
00582       }
00583     #endif
00584     */
00585 
00586     const char* concat_fieldname  = "T16_ph;u16_ph;v16_ph;";
00587     const char* concat_fieldnameT = "T_proj;u_proj;v_proj;";
00588 
00589     if( couComm != MPI_COMM_NULL )
00590     {
00591         /* We have the remapping weights now. Let us apply the weights onto the tag we defined
00592            on the source mesh and get the projection on the target mesh */
00593         PUSH_TIMER( "Apply Scalar projection weights" )
00594         ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], concat_fieldname,
00595                                                    concat_fieldnameT, strlen( weights_identifiers[0] ),
00596                                                    strlen( concat_fieldname ), strlen( concat_fieldnameT ) );
00597         CHECKIERR( ierr, "failed to compute projection weight application" );
00598         POP_TIMER( couComm, rankInCouComm )
00599 
00600         char outputFileTgt[] = "fOcnOnCpl2.h5m";
00601         ierr                 = iMOAB_WriteMesh( cplOcnPID, outputFileTgt, fileWriteOptions, strlen( outputFileTgt ),
00602                                 strlen( fileWriteOptions ) );
00603     }
00604     // send the projected tag back to ocean pes, with send/receive tag
00605     if( ocnComm != MPI_COMM_NULL )
00606     {
00607         int tagIndexIn2;
00608         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00609                                        strlen( bottomTempProjectedField ) );
00610         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag T_proj on ocn pes" );
00611         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00612                                        strlen( bottomUVelProjectedField ) );
00613         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag u_proj on ocn pes" );
00614         ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00615                                        strlen( bottomVVelProjectedField ) );
00616         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag v_proj on ocn pes" );
00617     }
00618     // send the tag to ocean pes, from ocean mesh on coupler pes
00619     //   from couComm, using common joint comm ocn_coupler
00620     // as always, use nonblocking sends
00621     // original graph (context is -1_
00622     if( couComm != MPI_COMM_NULL )
00623     {
00624         ierr = iMOAB_SendElementTag( cplOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id,
00625                                      strlen( "T_proj;u_proj;v_proj;" ) );
00626         CHECKIERR( ierr, "cannot send tag values back to ocean pes" )
00627     }
00628 
00629     // receive on component 2, ocean
00630     if( ocnComm != MPI_COMM_NULL )
00631     {
00632         ierr = iMOAB_ReceiveElementTag( cmpOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id,
00633                                         strlen( "T_proj;u_proj;v_proj;" ) );
00634         CHECKIERR( ierr, "cannot receive tag values from ocean mesh on coupler pes" )
00635     }
00636 
00637     MPI_Barrier( MPI_COMM_WORLD );
00638 
00639     if( couComm != MPI_COMM_NULL ) { ierr = iMOAB_FreeSenderBuffers( cplOcnPID, &context_id ); }
00640     if( ocnComm != MPI_COMM_NULL )
00641     {
00642         char outputFileOcn[] = "OcnWithProj2.h5m";
00643         ierr                 = iMOAB_WriteMesh( cmpOcnPID, outputFileOcn, fileWriteOptions, strlen( outputFileOcn ),
00644                                 strlen( fileWriteOptions ) );
00645     }
00646 #endif
00647 
00648     MPI_Barrier( MPI_COMM_WORLD );
00649 
00650 #ifdef ENABLE_ATMLND_COUPLING
00651     // start land proj:
00652 
00653     // we used this to compute
00654     //  ierr = iMOAB_ComputeCommGraph(cmpPhAtmPID, cplLndPID, &atmCouComm, &atmPEGroup, &couPEGroup,
00655     //     &typeA, &typeB, &cmpatm, &cpllnd);
00656 
00657     // end copy
00658     PUSH_TIMER( "Send/receive data from phys comp atm to coupler land, using computed graph" )
00659     if( atmComm != MPI_COMM_NULL )
00660     {
00661 
00662         // as always, use nonblocking sends
00663         // this is for projection to land:
00664         ierr =
00665             iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &cpllnd, strlen( "T_ph;u_ph;v_ph;" ) );
00666         CHECKIERR( ierr, "cannot send tag values towards cpl on land" )
00667     }
00668     if( couComm != MPI_COMM_NULL )
00669     {
00670         // receive on lnd on coupler pes
00671         ierr = iMOAB_ReceiveElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &atmCouComm, &cmpatm,
00672                                         strlen( "T_proj;u_proj;v_proj;" ) );
00673         CHECKIERR( ierr, "cannot receive tag values on land on coupler" )
00674     }
00675     POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm )
00676 
00677     // we can now free the sender buffers
00678     if( atmComm != MPI_COMM_NULL )
00679     {
00680         ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &cpllnd );
00681         CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the land on coupler" )
00682     }
00683 
00684 #ifdef VERBOSE
00685     if( couComm != MPI_COMM_NULL )
00686     {
00687         char outputFileTgtLnd[] = "fLndOnCpl.h5m";
00688         ierr = iMOAB_WriteMesh( cplLndPID, outputFileTgtLnd, fileWriteOptions, strlen( outputFileTgtLnd ),
00689                                 strlen( fileWriteOptions ) );
00690     }
00691 #endif
00692 
00693     // end land proj
00694     // send the tags back to land pes, from land mesh on coupler pes
00695     // send from cplLndPID to cmpLndPID, using common joint comm
00696     // as always, use nonblocking sends
00697     // original graph
00698     // int context_id = -1;
00699     // the land might not have these tags yet; it should be a different name for land
00700     // in e3sm we do have different names
00701     if( lndComm != MPI_COMM_NULL )
00702     {
00703         int tagIndexIn2;
00704         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00705                                        strlen( bottomTempProjectedField ) );
00706         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oTbot_proj on lnd pes" );
00707         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00708                                        strlen( bottomUVelProjectedField ) );
00709         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oUbot_proj on lnd pes" );
00710         ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2,
00711                                        strlen( bottomVVelProjectedField ) );
00712         CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oVbot_proj on lnd pes" );
00713     }
00714     if( couComm != MPI_COMM_NULL )
00715     {
00716         ierr = iMOAB_SendElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id,
00717                                      strlen( "T_proj;u_proj;v_proj;" ) );
00718         CHECKIERR( ierr, "cannot send tag values back to land pes" )
00719     }
00720     // receive on component 3, land
00721     if( lndComm != MPI_COMM_NULL )
00722     {
00723         ierr = iMOAB_ReceiveElementTag( cmpLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id,
00724                                         strlen( "T_proj;u_proj;v_proj;" ) );
00725         CHECKIERR( ierr, "cannot receive tag values from land mesh on coupler pes" )
00726     }
00727 
00728     MPI_Barrier( MPI_COMM_WORLD );
00729     if( couComm != MPI_COMM_NULL ) { ierr = iMOAB_FreeSenderBuffers( cplLndPID, &context_id ); }
00730     if( lndComm != MPI_COMM_NULL )
00731     {
00732         char outputFileLnd[] = "LndWithProj2.h5m";
00733         ierr                 = iMOAB_WriteMesh( cmpLndPID, outputFileLnd, fileWriteOptions, strlen( outputFileLnd ),
00734                                 strlen( fileWriteOptions ) );
00735     }
00736 
00737 #endif  // ENABLE_ATMLND_COUPLING
00738 
00739 #ifdef ENABLE_ATMOCN_COUPLING
00740     if( couComm != MPI_COMM_NULL )
00741     {
00742         ierr = iMOAB_DeregisterApplication( cplAtmOcnPID );
00743         CHECKIERR( ierr, "cannot deregister app intx AO" )
00744     }
00745 #endif
00746 
00747 #ifdef ENABLE_ATMLND_COUPLING
00748     if( lndComm != MPI_COMM_NULL )
00749     {
00750         ierr = iMOAB_DeregisterApplication( cmpLndPID );
00751         CHECKIERR( ierr, "cannot deregister app LND1" )
00752     }
00753 #endif  // ENABLE_ATMLND_COUPLING
00754     if( atmComm != MPI_COMM_NULL )
00755     {
00756         ierr = iMOAB_DeregisterApplication( cmpPhAtmPID );
00757         CHECKIERR( ierr, "cannot deregister app PhysAtm " )
00758     }
00759 #ifdef ENABLE_ATMOCN_COUPLING
00760     if( ocnComm != MPI_COMM_NULL )
00761     {
00762         ierr = iMOAB_DeregisterApplication( cmpOcnPID );
00763         CHECKIERR( ierr, "cannot deregister app OCN1" )
00764     }
00765 #endif  // ENABLE_ATMOCN_COUPLING
00766 
00767     if( atmComm != MPI_COMM_NULL )
00768     {
00769         ierr = iMOAB_DeregisterApplication( cmpAtmPID );
00770         CHECKIERR( ierr, "cannot deregister app ATM1" )
00771     }
00772 
00773 #ifdef ENABLE_ATMLND_COUPLING
00774     if( couComm != MPI_COMM_NULL )
00775     {
00776         ierr = iMOAB_DeregisterApplication( cplLndPID );
00777         CHECKIERR( ierr, "cannot deregister app LNDX" )
00778     }
00779 #endif  // ENABLE_ATMLND_COUPLING
00780 
00781 #ifdef ENABLE_ATMOCN_COUPLING
00782     if( couComm != MPI_COMM_NULL )
00783     {
00784         ierr = iMOAB_DeregisterApplication( cplOcnPID );
00785         CHECKIERR( ierr, "cannot deregister app OCNX" )
00786     }
00787 #endif  // ENABLE_ATMOCN_COUPLING
00788 
00789     if( couComm != MPI_COMM_NULL )
00790     {
00791         ierr = iMOAB_DeregisterApplication( cplAtmPID );
00792         CHECKIERR( ierr, "cannot deregister app ATMX" )
00793     }
00794 
00795     //#endif
00796     ierr = iMOAB_Finalize();
00797     CHECKIERR( ierr, "did not finalize iMOAB" )
00798 
00799     // free atm coupler group and comm
00800     if( MPI_COMM_NULL != atmCouComm ) MPI_Comm_free( &atmCouComm );
00801     MPI_Group_free( &joinAtmCouGroup );
00802     if( MPI_COMM_NULL != atmComm ) MPI_Comm_free( &atmComm );
00803 
00804 #ifdef ENABLE_ATMOCN_COUPLING
00805     if( MPI_COMM_NULL != ocnComm ) MPI_Comm_free( &ocnComm );
00806     // free ocn - coupler group and comm
00807     if( MPI_COMM_NULL != ocnCouComm ) MPI_Comm_free( &ocnCouComm );
00808     MPI_Group_free( &joinOcnCouGroup );
00809 #endif
00810 
00811 #ifdef ENABLE_ATMLND_COUPLING
00812     if( MPI_COMM_NULL != lndComm ) MPI_Comm_free( &lndComm );
00813     // free land - coupler group and comm
00814     if( MPI_COMM_NULL != lndCouComm ) MPI_Comm_free( &lndCouComm );
00815     MPI_Group_free( &joinLndCouGroup );
00816 #endif
00817 
00818     if( MPI_COMM_NULL != couComm ) MPI_Comm_free( &couComm );
00819 
00820     MPI_Group_free( &atmPEGroup );
00821 #ifdef ENABLE_ATMOCN_COUPLING
00822     MPI_Group_free( &ocnPEGroup );
00823 #endif
00824 #ifdef ENABLE_ATMLND_COUPLING
00825     MPI_Group_free( &lndPEGroup );
00826 #endif
00827     MPI_Group_free( &couPEGroup );
00828     MPI_Group_free( &jgroup );
00829 
00830     MPI_Finalize();
00831     // endif #if 0
00832 
00833     return 0;
00834 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines