MOAB: Mesh Oriented datABase
(version 5.2.1)
|
00001 /** 00002 * imoab_apg2_ol_coupler.cpp 00003 * 00004 * This imoab_apg2_ol_coupler test will simulate coupling between 3 components 00005 * meshes will be loaded from 3 files (atm phys + atm pg2, ocean, and land), and 00006 * Atm and ocn will be migrated to coupler pes and compute intx on them 00007 * then, intx will be performed between migrated meshes 00008 * and weights will be generated, such that a field from phys atm will be transferred to ocn 00009 * currently, the phys atm will send some data to be projected to ocean 00010 * similar for land, but land will be intersected too with the atm pg2 00011 * Land is defined by the domain mesh 00012 * 00013 * first, intersect atm and ocn, and recompute comm graph 1 between atm phys and atm_cx, for ocn intx 00014 * repeat the same for land; for atm/lnd coupling will use similar Fv -Fv maps; maybe later will identify some 00015 * equivalent to bilinear maps 00016 */ 00017 00018 #include "moab/Core.hpp" 00019 #ifndef MOAB_HAVE_MPI 00020 #error imoab coupler test requires MPI configuration 00021 #endif 00022 00023 // MPI includes 00024 #include "moab_mpi.h" 00025 #include "moab/ParallelComm.hpp" 00026 #include "MBParallelConventions.h" 00027 00028 #include "moab/iMOAB.h" 00029 #include "TestUtil.hpp" 00030 #include "moab/CpuTimer.hpp" 00031 #include "moab/ProgOptions.hpp" 00032 #include <iostream> 00033 #include <sstream> 00034 00035 #include "imoab_coupler_utils.hpp" 00036 00037 using namespace moab; 00038 00039 // #define VERBOSE 00040 00041 #ifndef MOAB_HAVE_TEMPESTREMAP 00042 #error The climate coupler test example requires MOAB configuration with TempestRemap 00043 #endif 00044 00045 #define ENABLE_ATMOCN_COUPLING 00046 // for land coupling with phys atm, we do not have to "compute" intersection 00047 // it is enough to know that the ids are the same, we can project from atm to land that way, using a computed graph 00048 #define ENABLE_ATMLND_COUPLING 00049 00050 int main( int argc, char* argv[] ) 00051 { 00052 int ierr; 00053 int rankInGlobalComm, numProcesses; 00054 MPI_Group jgroup; 00055 std::string readopts( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION;PARALLEL_RESOLVE_SHARED_ENTS" ); 00056 std::string readoptsPhysAtm( "PARALLEL=READ_PART;PARTITION=PARALLEL_PARTITION" ); 00057 00058 /*if (argc < 2) 00059 return 0; // no test; we will force naming the land domain file*/ 00060 // Timer data 00061 moab::CpuTimer timer; 00062 double timer_ops; 00063 std::string opName; 00064 00065 int repartitioner_scheme = 0; 00066 #ifdef MOAB_HAVE_ZOLTAN 00067 repartitioner_scheme = 2; // use the geometry partitioner in that case 00068 #endif 00069 00070 MPI_Init( &argc, &argv ); 00071 MPI_Comm_rank( MPI_COMM_WORLD, &rankInGlobalComm ); 00072 MPI_Comm_size( MPI_COMM_WORLD, &numProcesses ); 00073 00074 MPI_Comm_group( MPI_COMM_WORLD, &jgroup ); // all processes in jgroup 00075 00076 std::string atmFilename = 00077 "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/ne4pg2_p8.h5m"; // we should use only mesh from here 00078 std::string atmPhysMesh = 00079 "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/AtmPhys_pg2.h5m"; // it has some data associated to vertices, T_ph, 00080 // u_ph, v_ph 00081 // we will eventually project that data to ocean mesh, after intx atm/ocn 00082 00083 // on a regular case, 5 ATM, 6 CPLATM (ATMX), 17 OCN , 18 CPLOCN (OCNX) ; 00084 // intx atm/ocn is not in e3sm yet, give a number 00085 // 6 * 100 + 18 = 618 : atmocnid 00086 // 18 * 100 + 6 = 1806 : ocnatmid on coupler pes! 00087 // 9 LND, 10 CPLLND 00088 // 6 * 100 + 10 = 610 atmlndid: 00089 // 10 * 100 + 6 = 1006 lndatmid: on coupler pes 00090 // cmpatm is for atm on atm pes 00091 // cmpocn is for ocean, on ocean pe 00092 // cplatm is for atm on coupler pes 00093 // cplocn is for ocean on coupler pes 00094 // atmocnid is for intx atm / ocn on coupler pes 00095 // 00096 int rankInAtmComm = -1; 00097 int cmpatm = 5, cplatm = 6; // component ids are unique over all pes, and established in advance; 00098 int cmpPhysAtm = 105; // different from atm spectral ? 00099 #ifdef ENABLE_ATMOCN_COUPLING 00100 std::string ocnFilename = TestDir + "/recMeshOcn.h5m"; 00101 int rankInOcnComm = -1; 00102 int cmpocn = 17, cplocn = 18, atmocnid = 618, 00103 ocnatmid = 1806; // component ids are unique over all pes, and established in advance; 00104 #endif 00105 #ifdef ENABLE_ATMLND_COUPLING 00106 std::string lndFilename = "../../sandbox/MeshFiles/e3sm/ne4pg2_o240/land_p8.h5m"; 00107 int rankInLndComm = -1; 00108 int cpllnd = 10, cmplnd = 9, atmlndid = 610, 00109 lndatmid = 1006; // component ids are unique over all pes, and established in advance; 00110 #endif 00111 00112 int rankInCouComm = -1; 00113 00114 int nghlay = 0; // number of ghost layers for loading the file 00115 std::vector< int > groupTasks; 00116 int startG1 = 0, startG2 = 0, endG1 = numProcesses - 1, endG2 = numProcesses - 1, startG3 = startG1, endG3 = endG1; 00117 int startG4 = startG1, endG4 = endG1; // these are for coupler layout 00118 int context_id = -1; // used now for freeing buffers 00119 00120 // default: load atm on 2 proc, ocean on 2, land on 2; migrate to 2 procs, then compute intx 00121 // later, we need to compute weight matrix with tempestremap 00122 00123 ProgOptions opts; 00124 opts.addOpt< std::string >( "atmosphere,t", "atm mesh filename (source)", &atmFilename ); 00125 #ifdef ENABLE_ATMOCN_COUPLING 00126 opts.addOpt< std::string >( "ocean,m", "ocean mesh filename (target)", &ocnFilename ); 00127 #endif 00128 00129 #ifdef ENABLE_ATMLND_COUPLING 00130 opts.addOpt< std::string >( "land,l", "land mesh filename (target)", &lndFilename ); 00131 #endif 00132 00133 opts.addOpt< std::string >( "physgrid,q", "physics grid file", &atmPhysMesh ); 00134 00135 opts.addOpt< int >( "startAtm,a", "start task for atmosphere layout", &startG1 ); 00136 opts.addOpt< int >( "endAtm,b", "end task for atmosphere layout", &endG1 ); 00137 #ifdef ENABLE_ATMOCN_COUPLING 00138 opts.addOpt< int >( "startOcn,c", "start task for ocean layout", &startG2 ); 00139 opts.addOpt< int >( "endOcn,d", "end task for ocean layout", &endG2 ); 00140 #endif 00141 00142 #ifdef ENABLE_ATMLND_COUPLING 00143 opts.addOpt< int >( "startLnd,e", "start task for land layout", &startG3 ); 00144 opts.addOpt< int >( "endLnd,f", "end task for land layout", &endG3 ); 00145 #endif 00146 00147 opts.addOpt< int >( "startCoupler,g", "start task for coupler layout", &startG4 ); 00148 opts.addOpt< int >( "endCoupler,j", "end task for coupler layout", &endG4 ); 00149 00150 opts.addOpt< int >( "partitioning,p", "partitioning option for migration", &repartitioner_scheme ); 00151 00152 opts.parseCommandLine( argc, argv ); 00153 00154 char fileWriteOptions[] = "PARALLEL=WRITE_PART"; 00155 00156 if( !rankInGlobalComm ) 00157 { 00158 std::cout << " atm file: " << atmFilename << "\n on tasks : " << startG1 << ":" << endG1 << 00159 #ifdef ENABLE_ATMOCN_COUPLING 00160 "\n ocn file: " << ocnFilename << "\n on tasks : " << startG2 << ":" << endG2 << 00161 #endif 00162 #ifdef ENABLE_ATMLND_COUPLING 00163 "\n land file: " << lndFilename << "\n on tasks : " << startG3 << ":" << endG3 << 00164 #endif 00165 00166 "\n atm phys file: " << atmPhysMesh << "\n on tasks : " << startG1 << ":" << endG1 << 00167 00168 "\n partitioning (0 trivial, 1 graph, 2 geometry) " << repartitioner_scheme << "\n "; 00169 } 00170 // load files on 3 different communicators, groups 00171 // first groups has task 0, second group tasks 0 and 1 00172 // coupler will be on joint tasks, will be on a third group (0 and 1, again) 00173 MPI_Group atmPEGroup; 00174 MPI_Comm atmComm; 00175 ierr = create_group_and_comm(startG1, endG1, jgroup, &atmPEGroup, &atmComm); 00176 CHECKIERR(ierr, "Cannot create atm MPI group and communicator ") 00177 00178 #ifdef ENABLE_ATMOCN_COUPLING 00179 MPI_Group ocnPEGroup; 00180 MPI_Comm ocnComm; 00181 ierr = create_group_and_comm(startG2, endG2, jgroup, &ocnPEGroup, &ocnComm); 00182 CHECKIERR(ierr, "Cannot create ocn MPI group and communicator ") 00183 #endif 00184 00185 #ifdef ENABLE_ATMLND_COUPLING 00186 MPI_Group lndPEGroup; 00187 MPI_Comm lndComm; 00188 ierr = create_group_and_comm(startG3, endG3, jgroup, &lndPEGroup, &lndComm); 00189 CHECKIERR(ierr, "Cannot create lnd MPI group and communicator ") 00190 #endif 00191 00192 // we will always have a coupler 00193 MPI_Group couPEGroup; 00194 MPI_Comm couComm; 00195 ierr = create_group_and_comm(startG4, endG4, jgroup, &couPEGroup, &couComm); 00196 CHECKIERR(ierr, "Cannot create cpl MPI group and communicator ") 00197 00198 // now, create the joint communicators atm_coupler, ocn_coupler, lnd_coupler 00199 // for each, we will have to create the group first, then the communicator 00200 00201 // atm_coupler 00202 MPI_Group joinAtmCouGroup; 00203 MPI_Comm atmCouComm; 00204 ierr = create_joint_comm_group(atmPEGroup, couPEGroup, &joinAtmCouGroup, 00205 &atmCouComm); 00206 CHECKIERR(ierr, "Cannot create joint atm cou communicator") 00207 00208 #ifdef ENABLE_ATMOCN_COUPLING 00209 // ocn_coupler 00210 MPI_Group joinOcnCouGroup; 00211 MPI_Comm ocnCouComm; 00212 ierr = create_joint_comm_group(ocnPEGroup, couPEGroup, &joinOcnCouGroup, 00213 &ocnCouComm); 00214 CHECKIERR(ierr, "Cannot create joint ocn cou communicator") 00215 #endif 00216 00217 #ifdef ENABLE_ATMLND_COUPLING 00218 // lnd_coupler 00219 MPI_Group joinLndCouGroup; 00220 MPI_Comm lndCouComm; 00221 ierr = create_joint_comm_group(lndPEGroup, couPEGroup, &joinLndCouGroup, 00222 &lndCouComm); 00223 CHECKIERR(ierr, "Cannot create joint ocn cou communicator") 00224 #endif 00225 00226 ierr = iMOAB_Initialize( argc, argv ); // not really needed anything from argc, argv, yet; maybe we should 00227 CHECKIERR( ierr, "Cannot initialize iMOAB" ) 00228 00229 int cmpAtmAppID = -1; 00230 iMOAB_AppID cmpAtmPID = &cmpAtmAppID; // atm 00231 int cplAtmAppID = -1; // -1 means it is not initialized 00232 iMOAB_AppID cplAtmPID = &cplAtmAppID; // atm on coupler PEs 00233 #ifdef ENABLE_ATMOCN_COUPLING 00234 int cmpOcnAppID = -1; 00235 iMOAB_AppID cmpOcnPID = &cmpOcnAppID; // ocn 00236 int cplOcnAppID = -1, cplAtmOcnAppID = -1, cplOcnAtmAppID = -1; // -1 means it is not initialized 00237 iMOAB_AppID cplOcnPID = &cplOcnAppID; // ocn on coupler PEs 00238 iMOAB_AppID cplAtmOcnPID = &cplAtmOcnAppID; // intx atm - ocn on coupler PEs 00239 iMOAB_AppID cplOcnAtmPID = &cplOcnAtmAppID; // intx ocn - atm on coupler PEs 00240 00241 #endif 00242 00243 #ifdef ENABLE_ATMLND_COUPLING 00244 int cmpLndAppID = -1; 00245 iMOAB_AppID cmpLndPID = &cmpLndAppID; // lnd 00246 int cplLndAppID = -1, cplAtmLndAppID = -1, cplLndAtmAppID = -1; // -1 means it is not initialized 00247 iMOAB_AppID cplLndPID = &cplLndAppID; // land on coupler PEs 00248 iMOAB_AppID cplAtmLndPID = &cplAtmLndAppID; // intx atm - lnd on coupler PEs will be similar to ocn atm intx 00249 00250 iMOAB_AppID cplLndAtmPID = &cplLndAtmAppID; 00251 #endif 00252 00253 int cmpPhysAtmID = -1; 00254 iMOAB_AppID cmpPhAtmPID = &cmpPhysAtmID; // phys atm; we do not need to move it to cpl! 00255 00256 if( couComm != MPI_COMM_NULL ) 00257 { 00258 MPI_Comm_rank( couComm, &rankInCouComm ); 00259 // Register all the applications on the coupler PEs 00260 ierr = iMOAB_RegisterApplication( "ATMX", &couComm, &cplatm, cplAtmPID ); // atm on coupler pes 00261 CHECKIERR( ierr, "Cannot register ATM over coupler PEs" ) 00262 #ifdef ENABLE_ATMOCN_COUPLING 00263 ierr = iMOAB_RegisterApplication( "OCNX", &couComm, &cplocn, cplOcnPID ); // ocn on coupler pes 00264 CHECKIERR( ierr, "Cannot register OCN over coupler PEs" ) 00265 #endif 00266 00267 #ifdef ENABLE_ATMLND_COUPLING 00268 ierr = iMOAB_RegisterApplication( "LNDX", &couComm, &cpllnd, cplLndPID ); // lnd on coupler pes 00269 CHECKIERR( ierr, "Cannot register LND over coupler PEs" ) 00270 #endif 00271 } 00272 00273 00274 if( atmComm != MPI_COMM_NULL ) 00275 { 00276 MPI_Comm_rank( atmComm, &rankInAtmComm ); 00277 ierr = iMOAB_RegisterApplication( "ATM1", &atmComm, &cmpatm, cmpAtmPID ); 00278 CHECKIERR( ierr, "Cannot register ATM App" ) 00279 } 00280 00281 #ifdef ENABLE_ATMOCN_COUPLING 00282 if( ocnComm != MPI_COMM_NULL ) 00283 { 00284 MPI_Comm_rank( ocnComm, &rankInOcnComm ); 00285 ierr = iMOAB_RegisterApplication( "OCN1", &ocnComm, &cmpocn, cmpOcnPID ); 00286 CHECKIERR( ierr, "Cannot register OCN App" ) 00287 } 00288 #endif 00289 00290 //atm 00291 ierr = setup_component_coupler_meshes(cmpAtmPID, cmpatm, cplAtmPID, cplatm, &atmComm, &atmPEGroup, &couComm, 00292 &couPEGroup, &atmCouComm, atmFilename, readopts, nghlay, repartitioner_scheme); 00293 CHECKIERR( ierr, "Cannot load and migrate atm mesh" ) 00294 00295 MPI_Barrier( MPI_COMM_WORLD ); 00296 00297 00298 #ifdef ENABLE_ATMOCN_COUPLING 00299 // ocean 00300 ierr = setup_component_coupler_meshes(cmpOcnPID, cmpocn, cplOcnPID, cplocn, &ocnComm, &ocnPEGroup, &couComm, 00301 &couPEGroup, &ocnCouComm, ocnFilename, readopts, nghlay, repartitioner_scheme); 00302 CHECKIERR( ierr, "Cannot load and migrate ocn mesh" ) 00303 00304 if( couComm != MPI_COMM_NULL ) 00305 { 00306 char outputFileTgt3[] = "recvOcn3.h5m"; 00307 PUSH_TIMER( "Write migrated OCN mesh on coupler PEs" ) 00308 ierr = iMOAB_WriteMesh( cplOcnPID, outputFileTgt3, fileWriteOptions, strlen( outputFileTgt3 ), 00309 strlen( fileWriteOptions ) ); 00310 CHECKIERR( ierr, "cannot write ocn mesh after receiving" ) 00311 POP_TIMER( couComm, rankInCouComm ) 00312 } 00313 #endif // #ifdef ENABLE_ATMOCN_COUPLING 00314 00315 MPI_Barrier( MPI_COMM_WORLD ); 00316 // load phys atm mesh, with some data on it already 00317 if( atmComm != MPI_COMM_NULL ) 00318 { 00319 00320 ierr = iMOAB_RegisterApplication( "PhysAtm", &atmComm, &cmpPhysAtm, cmpPhAtmPID ); 00321 CHECKIERR( ierr, "Cannot register Phys Atm App " ) 00322 00323 // load the next component mesh 00324 PUSH_TIMER( "Load Phys Atm mesh" ) 00325 ierr = iMOAB_LoadMesh( cmpPhAtmPID, atmPhysMesh.c_str(), readoptsPhysAtm.c_str(), &nghlay, atmPhysMesh.length(), 00326 readoptsPhysAtm.length() ); 00327 CHECKIERR( ierr, "Cannot load Atm Phys mesh on atm pes" ) 00328 POP_TIMER( atmComm, rankInAtmComm ) 00329 00330 int nverts[3], nelem[3]; 00331 ierr = iMOAB_GetMeshInfo( cmpPhAtmPID, nverts, nelem, 0, 0, 0 ); 00332 CHECKIERR( ierr, "failed to get mesh info" ); 00333 printf( "Phys Atm Component Mesh: %d vertices and %d elements\n", nverts[0], nelem[0] ); 00334 } 00335 00336 MPI_Barrier( MPI_COMM_WORLD ); 00337 00338 #ifdef ENABLE_ATMLND_COUPLING 00339 // land 00340 if( lndComm != MPI_COMM_NULL ) 00341 { 00342 ierr = iMOAB_RegisterApplication( "LND1", &lndComm, &cmplnd, cmpLndPID ); 00343 CHECKIERR( ierr, "Cannot register LND App " ) 00344 } 00345 ierr = setup_component_coupler_meshes(cmpLndPID, cmplnd, cplLndPID, cpllnd, &lndComm, &lndPEGroup, &couComm, 00346 &couPEGroup, &lndCouComm, lndFilename, readoptsPhysAtm, nghlay, repartitioner_scheme); 00347 00348 if( couComm != MPI_COMM_NULL ) 00349 { // write only for n==1 case 00350 char outputFileLnd[] = "recvLnd.h5m"; 00351 ierr = iMOAB_WriteMesh( cplLndPID, outputFileLnd, fileWriteOptions, strlen( outputFileLnd ), 00352 strlen( fileWriteOptions ) ); 00353 CHECKIERR( ierr, "cannot write lnd mesh after receiving" ) 00354 } 00355 00356 #endif // #ifdef ENABLE_ATMLND_COUPLING 00357 00358 #ifdef ENABLE_ATMOCN_COUPLING 00359 if( couComm != MPI_COMM_NULL ) 00360 { 00361 // now compute intersection between OCNx and ATMx on coupler PEs 00362 ierr = iMOAB_RegisterApplication( "ATMOCN", &couComm, &atmocnid, cplAtmOcnPID ); 00363 CHECKIERR( ierr, "Cannot register ocn_atm intx over coupler pes " ) 00364 00365 ierr = iMOAB_RegisterApplication( "OCNATM", &couComm, &ocnatmid, cplOcnAtmPID ); 00366 CHECKIERR( ierr, "Cannot register atm_ocn intx over coupler pes " ) 00367 } 00368 #endif 00369 00370 #ifdef ENABLE_ATMLND_COUPLING 00371 if( couComm != MPI_COMM_NULL ) 00372 { 00373 // now compute intersection between LNDx and ATMx on coupler PEs 00374 ierr = iMOAB_RegisterApplication( "ATMLND", &couComm, &atmlndid, cplAtmLndPID ); 00375 CHECKIERR( ierr, "Cannot register atm_lnd intx over coupler pes " ) 00376 // now compute intersection between ATMx and LNDx on coupler PEs 00377 ierr = iMOAB_RegisterApplication( "LNDATM", &couComm, &lndatmid, cplLndAtmPID ); 00378 CHECKIERR( ierr, "Cannot register lnd_atm intx over coupler pes " ) 00379 } 00380 #endif 00381 00382 const char* weights_identifiers[2] = { "scalar", "scalar-pc" }; 00383 int disc_orders[3] = { 4, 1, 1 }; 00384 const char* disc_methods[3] = { "cgll", "fv", "pcloud" }; 00385 const char* dof_tag_names[3] = { "GLOBAL_DOFS", "GLOBAL_ID", "GLOBAL_ID" }; 00386 #ifdef ENABLE_ATMOCN_COUPLING 00387 if( couComm != MPI_COMM_NULL ) 00388 { 00389 PUSH_TIMER( "Compute ATM-OCN mesh intersection" ) 00390 ierr = iMOAB_ComputeMeshIntersectionOnSphere( 00391 cplAtmPID, cplOcnPID, cplAtmOcnPID ); // coverage mesh was computed here, for cplAtmPID, atm on coupler pes 00392 // basically, atm was redistributed according to target (ocean) partition, to "cover" the ocean partitions 00393 // check if intx valid, write some h5m intx file 00394 CHECKIERR( ierr, "cannot compute intersection" ) 00395 POP_TIMER( couComm, rankInCouComm ) 00396 00397 PUSH_TIMER( "Compute OCN-ATM mesh intersection" ) 00398 ierr = 00399 iMOAB_ComputeMeshIntersectionOnSphere( cplOcnPID, cplAtmPID, cplOcnAtmPID ); // coverage mesh was computed 00400 CHECKIERR( ierr, "cannot compute intersection" ) 00401 POP_TIMER( couComm, rankInCouComm ) 00402 } 00403 00404 if( atmCouComm != MPI_COMM_NULL ) 00405 { 00406 // the new graph will be for sending data from atm comp to coverage mesh; 00407 // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID 00408 // results are in cplAtmOcnPID, intx mesh; remapper also has some info about coverage mesh 00409 // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more 00410 // precise info about what to send for ocean cover ; every time, we will 00411 // use the element global id, which should uniquely identify the element 00412 PUSH_TIMER( "Compute OCN coverage graph for ATM mesh" ) 00413 ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmOcnPID, 00414 &cplocn ); // it happens over joint communicator 00415 CHECKIERR( ierr, "cannot recompute direct coverage graph for ocean" ) 00416 POP_TIMER( atmCouComm, rankInAtmComm ) // hijack this rank 00417 } 00418 if( ocnCouComm != MPI_COMM_NULL ) 00419 { 00420 // now for the second intersection, ocn-atm; will be sending data from ocean to atm 00421 // Can we reuse the intx atm-ocn? Not sure yet; we will compute everything again :( 00422 // the new graph will be for sending data from ocn comp to coverage mesh over atm; 00423 // it involves initial ocn app; cmpOcnPID; also migrated ocn mesh on coupler pes, cplOcnPID 00424 // results are in cplOcnAtmPID, intx mesh; remapper also has some info about coverage mesh 00425 // after this, the sending of tags from ocn pes to coupler pes will use the new par comm graph, that has more 00426 // precise info about what to send for atm cover ; every time, we will 00427 // use the element global id, which should uniquely identify the element 00428 PUSH_TIMER( "Compute ATM coverage graph for OCN mesh" ) 00429 ierr = iMOAB_CoverageGraph( &ocnCouComm, cmpOcnPID, cplOcnPID, cplOcnAtmPID, 00430 &cplatm ); // it happens over joint communicator, ocean + coupler 00431 CHECKIERR( ierr, "cannot recompute direct coverage graph for atm" ) 00432 POP_TIMER( ocnCouComm, rankInOcnComm ) // hijack this rank 00433 } 00434 00435 // need to compute graph between phys atm and atm/ocn intx coverage 00436 if( atmCouComm != MPI_COMM_NULL ) 00437 { 00438 int typeA = 2; // point cloud, phys mesh 00439 int typeB = 3; // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ? 00440 ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmOcnPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB, 00441 &cmpatm, &atmocnid ); 00442 CHECKIERR( ierr, "cannot compute graph between phys grid on atm and intx between FV atm and ocn" ) 00443 } 00444 00445 // also 00446 // need to compute graph between ocn/atm intx and phys atm mesh 00447 /*if( atmCouComm != MPI_COMM_NULL ) 00448 { 00449 int typeA = 3; // cells of atmosphere, dof based; maybe need another type for ParCommGraph graphtype ? 00450 int typeB = 2; // point cloud, phys mesh 00451 ierr = iMOAB_ComputeCommGraph( cplOcnAtmPID, cmpPhAtmPID, &atmCouComm, &couPEGroup, &atmPEGroup, &typeA, &typeB, 00452 &ocnatmid, &cmpatm ); 00453 }*/ 00454 // need to compute graph between atm on coupler and phys atm mesh on component 00455 if( atmCouComm != MPI_COMM_NULL ) 00456 { 00457 int typeA = 2; // point cloud, phys mesh 00458 int typeB = 3; // cells of atmosphere, dof based; need another type for ParCommGraph graphtype ? 00459 ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB, 00460 &cmpatm, &cplatm ); 00461 CHECKIERR( ierr, "cannot compute graph between phys grid on atm and FV atm on coupler" ) 00462 } 00463 #endif 00464 00465 #ifdef ENABLE_ATMLND_COUPLING 00466 if( couComm != MPI_COMM_NULL ) 00467 { 00468 PUSH_TIMER( "Compute ATM-LND mesh intersection" ) 00469 ierr = iMOAB_ComputeMeshIntersectionOnSphere( cplAtmPID, cplLndPID, cplAtmLndPID ); 00470 CHECKIERR( ierr, "failed to compute atm - land intx for mapping" ); 00471 POP_TIMER( couComm, rankInCouComm ) 00472 00473 PUSH_TIMER( "Compute LND-ATM mesh intersection" ) 00474 ierr = 00475 iMOAB_ComputeMeshIntersectionOnSphere( cplLndPID, cplAtmPID, cplLndAtmPID ); // coverage mesh was computed 00476 CHECKIERR( ierr, "cannot compute intersection" ) 00477 POP_TIMER( couComm, rankInCouComm ) 00478 } 00479 if( atmCouComm != MPI_COMM_NULL ) 00480 { 00481 // the new graph will be for sending data from atm comp to coverage mesh for land mesh; 00482 // it involves initial atm app; cmpAtmPID; also migrate atm mesh on coupler pes, cplAtmPID 00483 // results are in cplAtmLndPID, intx mesh; remapper also has some info about coverage mesh 00484 // after this, the sending of tags from atm pes to coupler pes will use the new par comm graph, that has more 00485 // precise info about what to send (specifically for land cover); every time, 00486 /// we will use the element global id, which should uniquely identify the element 00487 PUSH_TIMER( "Compute LND coverage graph for ATM mesh" ) 00488 ierr = iMOAB_CoverageGraph( &atmCouComm, cmpAtmPID, cplAtmPID, cplAtmLndPID, 00489 &cpllnd ); // it happens over joint communicator 00490 CHECKIERR( ierr, "cannot recompute direct coverage graph for land" ) 00491 POP_TIMER( atmCouComm, rankInAtmComm ) // hijack this rank 00492 } 00493 00494 // we will compute comm graph between atm phys and land, directly; 00495 // on the new TriGrid workflow, we do need intersection between atm and land; 00496 if( atmCouComm != MPI_COMM_NULL ) 00497 { 00498 int typeA = 2; // point cloud 00499 int typeB = 3; // type 3 for land on coupler, based on global ids for land cells ? 00500 ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmLndPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, &typeB, 00501 &cmpatm, &atmlndid ); 00502 CHECKIERR( ierr, "cannot compute comm graph between atm and atm/lnd intersection" ) 00503 } 00504 00505 // for reverse direction, lnd - atm intx: 00506 if( lndCouComm != MPI_COMM_NULL ) 00507 { 00508 // now for the second intersection, lnd-atm; will be sending data from lnd to atm 00509 // Can we reuse the intx atm-lnd? Not sure yet; we will compute everything again :( 00510 // the new graph will be for sending data from lnd comp to coverage mesh over atm; 00511 // it involves initial lnd app; cmpLndPID; also migrated lnd mesh on coupler pes, cplLndPID 00512 // results are in cplLndAtmPID, intx mesh; remapper also has some info about coverage mesh 00513 // after this, the sending of tags from lnd pes to coupler pes will use the new par comm graph, that has more 00514 // precise info about what to send for atm cover ; every time, we will 00515 // use the element global id, which should uniquely identify the element 00516 PUSH_TIMER( "Compute ATM coverage graph for LND mesh" ) 00517 ierr = iMOAB_CoverageGraph( &lndCouComm, cmpLndPID, cplLndPID, cplLndAtmPID, 00518 &cplatm ); // it happens over joint communicator, ocean + coupler 00519 CHECKIERR( ierr, "cannot recompute direct coverage graph for atm for intx with land" ) 00520 POP_TIMER( lndCouComm, rankInLndComm ) 00521 } 00522 #endif 00523 MPI_Barrier( MPI_COMM_WORLD ); 00524 00525 int fMonotoneTypeID = 0, fVolumetric = 0, fValidate = 1, fNoConserve = 0; 00526 00527 #ifdef ENABLE_ATMOCN_COUPLING 00528 #ifdef VERBOSE 00529 if( couComm != MPI_COMM_NULL ) 00530 { 00531 char serialWriteOptions[] = ""; // for writing in serial 00532 std::stringstream outf; 00533 outf << "intxAtmOcn_" << rankInCouComm << ".h5m"; 00534 std::string intxfile = outf.str(); // write in serial the intx file, for debugging 00535 ierr = iMOAB_WriteMesh( cplAtmOcnPID, (char*)intxfile.c_str(), serialWriteOptions, (int)intxfile.length(), 00536 strlen( serialWriteOptions ) ); 00537 CHECKIERR( ierr, "cannot write intx file result" ) 00538 } 00539 #endif 00540 00541 if( couComm != MPI_COMM_NULL ) 00542 { 00543 PUSH_TIMER( "Compute the projection weights with TempestRemap" ) 00544 ierr = iMOAB_ComputeScalarProjectionWeights( 00545 cplAtmOcnPID, weights_identifiers[0], disc_methods[1], &disc_orders[1], // fv 00546 disc_methods[1], &disc_orders[1], // fv 00547 &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1], 00548 strlen( weights_identifiers[0] ), strlen( disc_methods[1] ), strlen( disc_methods[1] ), 00549 strlen( dof_tag_names[1] ), strlen( dof_tag_names[1] ) ); 00550 CHECKIERR( ierr, "cannot compute scalar projection weights" ) 00551 POP_TIMER( couComm, rankInCouComm ) 00552 } 00553 00554 // now compute weight maps for ocn to atm mapping; 00555 if( couComm != MPI_COMM_NULL ) 00556 { 00557 PUSH_TIMER( "Compute the projection weights with TempestRemap for ocn - atm map" ) 00558 ierr = iMOAB_ComputeScalarProjectionWeights( 00559 cplOcnAtmPID, weights_identifiers[0], disc_methods[1], &disc_orders[1], // fv 00560 disc_methods[1], &disc_orders[1], // fv 00561 &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1], 00562 strlen( weights_identifiers[0] ), strlen( disc_methods[1] ), strlen( disc_methods[1] ), 00563 strlen( dof_tag_names[1] ), strlen( dof_tag_names[1] ) ); 00564 CHECKIERR( ierr, "cannot compute scalar projection weights" ) 00565 POP_TIMER( couComm, rankInCouComm ) 00566 } 00567 00568 #endif 00569 00570 MPI_Barrier( MPI_COMM_WORLD ); 00571 00572 // we do not need to compute weights ? just send tags between dofs ? 00573 00574 #ifdef ENABLE_ATMLND_COUPLING 00575 if( couComm != MPI_COMM_NULL ) 00576 { 00577 // Compute the weights to project the solution from ATM component to LND component 00578 PUSH_TIMER( "Compute ATM-LND remapping weights" ) 00579 ierr = iMOAB_ComputeScalarProjectionWeights( 00580 cplAtmLndPID, weights_identifiers[0], disc_methods[1], &disc_orders[1], disc_methods[1], &disc_orders[1], 00581 &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1], 00582 strlen( weights_identifiers[0] ), strlen( disc_methods[1] ), strlen( disc_methods[1] ), 00583 strlen( dof_tag_names[1] ), strlen( dof_tag_names[1] ) ); 00584 CHECKIERR( ierr, "failed to compute remapping projection weights for ATM-LND scalar non-conservative field" ); 00585 POP_TIMER( couComm, rankInCouComm ) 00586 00587 // Compute the weights to project the solution from LND component to ATM component 00588 PUSH_TIMER( "Compute LND-ATM remapping weights" ) 00589 ierr = iMOAB_ComputeScalarProjectionWeights( 00590 cplLndAtmPID, weights_identifiers[0], disc_methods[1], &disc_orders[1], disc_methods[1], &disc_orders[1], 00591 &fMonotoneTypeID, &fVolumetric, &fNoConserve, &fValidate, dof_tag_names[1], dof_tag_names[1], 00592 strlen( weights_identifiers[0] ), strlen( disc_methods[1] ), strlen( disc_methods[1] ), 00593 strlen( dof_tag_names[1] ), strlen( dof_tag_names[1] ) ); 00594 CHECKIERR( ierr, "failed to compute remapping projection weights for LND-ATM scalar non-conservative field" ); 00595 POP_TIMER( couComm, rankInCouComm ) 00596 } 00597 #endif 00598 00599 int tagIndex[2]; 00600 int tagTypes[2] = { DENSE_DOUBLE, DENSE_DOUBLE }; 00601 int atmCompNDoFs = 1 /* FV disc_orders[0]*disc_orders[0] */, ocnCompNDoFs = 1 /*FV*/; 00602 00603 const char* bottomTempField = "T_ph"; // same as on phys atm mesh 00604 const char* bottomTempProjectedField = "T_proj"; 00605 // Define more fields 00606 const char* bottomUVelField = "u_ph"; 00607 const char* bottomUVelProjectedField = "u_proj"; 00608 const char* bottomVVelField = "v_ph"; 00609 const char* bottomVVelProjectedField = "v_proj"; 00610 00611 // coming from ocn to atm, project back from T_proj 00612 const char* bottomTempField2 = "T2_ph"; // same as on phys atm mesh 00613 const char* bottomUVelField2 = "u2_ph"; 00614 const char* bottomVVelField2 = "v2_ph"; 00615 00616 // coming from lnd to atm, project back from T_proj 00617 const char* bottomTempField3 = "T3_ph"; // same as on phys atm mesh 00618 const char* bottomUVelField3 = "u3_ph"; 00619 const char* bottomVVelField3 = "v3_ph"; 00620 00621 // tags on phys grid atm mesh 00622 const char* bottomTempPhProjectedField = "Tph_proj"; 00623 const char* bottomUVelPhProjectedField = "uph_proj"; 00624 const char* bottomVVelPhProjectedField = "vph_proj"; 00625 00626 // tags on phys grid atm mesh 00627 const char* bottomTempPhLndProjectedField = "TphL_proj"; // L and Lnd signify the fields projected from land 00628 const char* bottomUVelPhLndProjectedField = "uphL_proj"; 00629 const char* bottomVVelPhLndProjectedField = "vphL_proj"; 00630 00631 if( couComm != MPI_COMM_NULL ) 00632 { 00633 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00634 strlen( bottomTempField ) ); 00635 CHECKIERR( ierr, "failed to define the field tag T_ph" ); 00636 #ifdef ENABLE_ATMOCN_COUPLING 00637 00638 ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00639 strlen( bottomTempProjectedField ) ); 00640 CHECKIERR( ierr, "failed to define the field tag T_proj" ); 00641 #endif 00642 00643 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00644 strlen( bottomUVelField ) ); 00645 CHECKIERR( ierr, "failed to define the field tag u_ph" ); 00646 #ifdef ENABLE_ATMOCN_COUPLING 00647 00648 ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00649 strlen( bottomUVelProjectedField ) ); 00650 CHECKIERR( ierr, "failed to define the field tag u_proj" ); 00651 #endif 00652 00653 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00654 strlen( bottomVVelField ) ); 00655 CHECKIERR( ierr, "failed to define the field tag v_ph" ); 00656 #ifdef ENABLE_ATMOCN_COUPLING 00657 ierr = iMOAB_DefineTagStorage( cplOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00658 strlen( bottomVVelProjectedField ) ); 00659 CHECKIERR( ierr, "failed to define the field tag v_proj" ); 00660 #endif 00661 00662 #ifdef ENABLE_ATMLND_COUPLING 00663 // need to define tag storage for land; will use the same T_proj, u_proj, v_proj name, because it will be 00664 // used to send between point clouds ! 00665 // use the same ndof and same size as ocnCompNDoFs (1) !! 00666 ierr = iMOAB_DefineTagStorage( cplLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00667 strlen( bottomTempProjectedField ) ); 00668 CHECKIERR( ierr, "failed to define the field tag T_proj" ); 00669 ierr = iMOAB_DefineTagStorage( cplLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00670 strlen( bottomUVelProjectedField ) ); 00671 CHECKIERR( ierr, "failed to define the field tag u_proj" ); 00672 ierr = iMOAB_DefineTagStorage( cplLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndex[1], 00673 strlen( bottomVVelProjectedField ) ); 00674 CHECKIERR( ierr, "failed to define the field tag v_proj" ); 00675 #endif 00676 } 00677 // need to make sure that the coverage mesh (created during intx method) received the tag that need to be projected 00678 // to target so far, the coverage mesh has only the ids and global dofs; need to change the migrate method to 00679 // accommodate any GLL tag now send a tag from original atmosphere (cmpAtmPID) towards migrated coverage mesh 00680 // (cplAtmPID), using the new coverage graph communicator 00681 00682 // make the tag 0, to check we are actually sending needed data 00683 { 00684 if( cplAtmAppID >= 0 ) 00685 { 00686 int nverts[3], nelem[3], nblocks[3], nsbc[3], ndbc[3]; 00687 /* 00688 * Each process in the communicator will have access to a local mesh instance, which will contain the 00689 * original cells in the local partition and ghost entities. Number of vertices, primary cells, visible 00690 * blocks, number of sidesets and nodesets boundary conditions will be returned in numProcesses 3 arrays, 00691 * for local, ghost and total numbers. 00692 */ 00693 ierr = iMOAB_GetMeshInfo( cplAtmPID, nverts, nelem, nblocks, nsbc, ndbc ); 00694 CHECKIERR( ierr, "failed to get num primary elems" ); 00695 int numAllElem = nelem[2]; 00696 std::vector< double > vals; 00697 int storLeng = atmCompNDoFs * numAllElem; 00698 vals.resize( storLeng ); 00699 for( int k = 0; k < storLeng; k++ ) 00700 vals[k] = 0.; 00701 int eetype = 1; 00702 ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomTempField, &storLeng, &eetype, &vals[0], 00703 strlen( bottomTempField ) ); 00704 CHECKIERR( ierr, "cannot make tag nul" ) 00705 ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomUVelField, &storLeng, &eetype, &vals[0], 00706 strlen( bottomUVelField ) ); 00707 CHECKIERR( ierr, "cannot make tag nul" ) 00708 ierr = iMOAB_SetDoubleTagStorage( cplAtmPID, bottomVVelField, &storLeng, &eetype, &vals[0], 00709 strlen( bottomVVelField ) ); 00710 CHECKIERR( ierr, "cannot make tag nul" ) 00711 // set the tag to 0 00712 } 00713 } 00714 00715 #ifdef ENABLE_ATMOCN_COUPLING 00716 PUSH_TIMER( "Send/receive data from atm component to coupler in ocn context" ) 00717 if( atmComm != MPI_COMM_NULL ) 00718 { 00719 // as always, use nonblocking sends 00720 // this is for projection to ocean: 00721 ierr = 00722 iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &atmocnid, strlen( "T_ph;u_ph;v_ph;" ) ); 00723 CHECKIERR( ierr, "cannot send tag values" ) 00724 } 00725 if( couComm != MPI_COMM_NULL ) 00726 { 00727 // receive on atm on coupler pes, that was redistributed according to coverage 00728 ierr = iMOAB_ReceiveElementTag( cplAtmOcnPID, "T_ph;u_ph;v_ph;", &atmCouComm, &cmpatm, 00729 strlen( "T_ph;u_ph;v_ph;" ) ); 00730 CHECKIERR( ierr, "cannot receive tag values" ) 00731 } 00732 POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm ) 00733 00734 // we can now free the sender buffers 00735 if( atmComm != MPI_COMM_NULL ) 00736 { 00737 ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmocnid ); // context is for ocean 00738 CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the coverage mesh" ) 00739 } 00740 /* 00741 #ifdef VERBOSE 00742 if (couComm != MPI_COMM_NULL) { 00743 char outputFileRecvd[] = "recvAtmCoupOcn.h5m"; 00744 ierr = iMOAB_WriteMesh(cplAtmPID, outputFileRecvd, fileWriteOptions, 00745 strlen(outputFileRecvd), strlen(fileWriteOptions) ); 00746 } 00747 #endif 00748 */ 00749 00750 if( couComm != MPI_COMM_NULL ) 00751 { 00752 const char* concat_fieldname = "T_ph;u_ph;v_ph;"; 00753 const char* concat_fieldnameT = "T_proj;u_proj;v_proj;"; 00754 00755 /* We have the remapping weights now. Let us apply the weights onto the tag we defined 00756 on the source mesh and get the projection on the target mesh */ 00757 PUSH_TIMER( "Apply Scalar projection weights" ) 00758 ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmOcnPID, weights_identifiers[0], concat_fieldname, 00759 concat_fieldnameT, strlen( weights_identifiers[0] ), 00760 strlen( concat_fieldname ), strlen( concat_fieldnameT ) ); 00761 CHECKIERR( ierr, "failed to compute projection weight application" ); 00762 POP_TIMER( couComm, rankInCouComm ) 00763 00764 char outputFileTgt[] = "fOcnOnCpl3.h5m"; 00765 ierr = iMOAB_WriteMesh( cplOcnPID, outputFileTgt, fileWriteOptions, strlen( outputFileTgt ), 00766 strlen( fileWriteOptions ) ); 00767 CHECKIERR( ierr, "failed to write fOcnOnCpl3.h5m " ); 00768 } 00769 // send the projected tag back to ocean pes, with send/receive tag 00770 if( ocnComm != MPI_COMM_NULL ) 00771 { 00772 int tagIndexIn2; 00773 ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00774 strlen( bottomTempProjectedField ) ); 00775 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag T_proj on ocn pes" ); 00776 ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00777 strlen( bottomUVelProjectedField ) ); 00778 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag u_proj on ocn pes" ); 00779 ierr = iMOAB_DefineTagStorage( cmpOcnPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00780 strlen( bottomVVelProjectedField ) ); 00781 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag v_proj on ocn pes" ); 00782 } 00783 // send the tag to ocean pes, from ocean mesh on coupler pes 00784 // from couComm, using common joint comm ocn_coupler 00785 // as always, use nonblocking sends 00786 // original graph (context is -1_ 00787 if( couComm != MPI_COMM_NULL ) 00788 { 00789 ierr = iMOAB_SendElementTag( cplOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id, 00790 strlen( "T_proj;u_proj;v_proj;" ) ); 00791 CHECKIERR( ierr, "cannot send tag values back to ocean pes" ) 00792 } 00793 00794 // receive on component 2, ocean 00795 if( ocnComm != MPI_COMM_NULL ) 00796 { 00797 ierr = iMOAB_ReceiveElementTag( cmpOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &context_id, 00798 strlen( "T_proj;u_proj;v_proj;" ) ); 00799 CHECKIERR( ierr, "cannot receive tag values from ocean mesh on coupler pes" ) 00800 } 00801 00802 MPI_Barrier( MPI_COMM_WORLD ); 00803 00804 if( couComm != MPI_COMM_NULL ) { 00805 ierr = iMOAB_FreeSenderBuffers( cplOcnPID, &context_id ); 00806 CHECKIERR( ierr, "cannot free buffers related to send tag") 00807 } 00808 if( ocnComm != MPI_COMM_NULL ) 00809 { 00810 char outputFileOcn[] = "OcnWithProj3.h5m"; 00811 ierr = iMOAB_WriteMesh( cmpOcnPID, outputFileOcn, fileWriteOptions, strlen( outputFileOcn ), 00812 strlen( fileWriteOptions ) ); 00813 CHECKIERR( ierr, "cannot write OcnWithProj3.h5m") 00814 } 00815 #endif 00816 00817 MPI_Barrier( MPI_COMM_WORLD ); 00818 00819 #ifdef ENABLE_ATMLND_COUPLING 00820 // start land proj: 00821 00822 // we used this to compute 00823 // ierr = iMOAB_ComputeCommGraph(cmpPhAtmPID, cplLndPID, &atmCouComm, &atmPEGroup, &couPEGroup, 00824 // &typeA, &typeB, &cmpatm, &atmlndid); 00825 00826 // end copy 00827 PUSH_TIMER( "Send/receive data from phys comp atm to coupler land, using computed graph" ) 00828 if( atmComm != MPI_COMM_NULL ) 00829 { 00830 00831 // as always, use nonblocking sends 00832 // this is for projection to land: 00833 ierr = 00834 iMOAB_SendElementTag( cmpPhAtmPID, "T_ph;u_ph;v_ph;", &atmCouComm, &atmlndid, strlen( "T_ph;u_ph;v_ph;" ) ); 00835 CHECKIERR( ierr, "cannot send tag values towards cpl on land" ) 00836 } 00837 if( couComm != MPI_COMM_NULL ) 00838 { 00839 // receive on lnd on coupler pes 00840 ierr = iMOAB_ReceiveElementTag( cplAtmLndPID, "T_ph;u_ph;v_ph;", &atmCouComm, &cmpatm, 00841 strlen( "T_ph;u_ph;v_ph;" ) ); 00842 CHECKIERR( ierr, "cannot receive tag values on land on coupler, for atm coupling" ) 00843 } 00844 POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm ) 00845 00846 // we can now free the sender buffers 00847 if( atmComm != MPI_COMM_NULL ) 00848 { 00849 ierr = iMOAB_FreeSenderBuffers( cmpPhAtmPID, &atmlndid ); 00850 CHECKIERR( ierr, "cannot free buffers used to resend atm tag towards the land on coupler" ) 00851 } 00852 00853 if( couComm != MPI_COMM_NULL ) 00854 { 00855 const char* concat_fieldname = "T_ph;u_ph;v_ph;"; 00856 const char* concat_fieldnameT = "T_proj;u_proj;v_proj;"; 00857 00858 /* We have the remapping weights now. Let us apply the weights onto the tag we defined 00859 on the source mesh and get the projection on the target mesh */ 00860 PUSH_TIMER( "Apply Scalar projection weights" ) 00861 ierr = iMOAB_ApplyScalarProjectionWeights( cplAtmLndPID, weights_identifiers[0], concat_fieldname, 00862 concat_fieldnameT, strlen( weights_identifiers[0] ), 00863 strlen( concat_fieldname ), strlen( concat_fieldnameT ) ); 00864 CHECKIERR( ierr, "failed to compute projection weight application" ); 00865 POP_TIMER( couComm, rankInCouComm ) 00866 00867 char outputFileTgt[] = "fLndOnCpl3.h5m"; 00868 ierr = iMOAB_WriteMesh( cplLndPID, outputFileTgt, fileWriteOptions, strlen( outputFileTgt ), 00869 strlen( fileWriteOptions ) ); 00870 CHECKIERR( ierr, "cannot write land on coupler" ) 00871 } 00872 00873 // end land proj 00874 // send the tags back to land pes, from land mesh on coupler pes 00875 // send from cplLndPID to cmpLndPID, using common joint comm 00876 // as always, use nonblocking sends 00877 // original graph 00878 // int context_id = -1; 00879 // the land might not have these tags yet; it should be a different name for land 00880 // in e3sm we do have different names 00881 if( lndComm != MPI_COMM_NULL ) 00882 { 00883 int tagIndexIn2; 00884 ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomTempProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00885 strlen( bottomTempProjectedField ) ); 00886 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oTbot_proj on lnd pes" ); 00887 ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomUVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00888 strlen( bottomUVelProjectedField ) ); 00889 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oUbot_proj on lnd pes" ); 00890 ierr = iMOAB_DefineTagStorage( cmpLndPID, bottomVVelProjectedField, &tagTypes[1], &ocnCompNDoFs, &tagIndexIn2, 00891 strlen( bottomVVelProjectedField ) ); 00892 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag a2oVbot_proj on lnd pes" ); 00893 } 00894 if( couComm != MPI_COMM_NULL ) 00895 { 00896 ierr = iMOAB_SendElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id, 00897 strlen( "T_proj;u_proj;v_proj;" ) ); 00898 CHECKIERR( ierr, "cannot send tag values back to land pes" ) 00899 } 00900 // receive on component 3, land 00901 if( lndComm != MPI_COMM_NULL ) 00902 { 00903 ierr = iMOAB_ReceiveElementTag( cmpLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &context_id, 00904 strlen( "T_proj;u_proj;v_proj;" ) ); 00905 CHECKIERR( ierr, "cannot receive tag values from land mesh on coupler pes" ) 00906 } 00907 00908 MPI_Barrier( MPI_COMM_WORLD ); 00909 if( couComm != MPI_COMM_NULL ) { 00910 ierr = iMOAB_FreeSenderBuffers( cplLndPID, &context_id ); 00911 CHECKIERR( ierr, "cannot free buffers related to sending tags from coupler to land pes" ) 00912 } 00913 if( lndComm != MPI_COMM_NULL ) 00914 { 00915 char outputFileLnd[] = "LndWithProj3.h5m"; 00916 ierr = iMOAB_WriteMesh( cmpLndPID, outputFileLnd, fileWriteOptions, strlen( outputFileLnd ), 00917 strlen( fileWriteOptions ) ); 00918 CHECKIERR( ierr, "cannot write land file with projection" ) 00919 } 00920 00921 // we have received on ocean component some data projected from atm, on coupler 00922 00923 // path was T_ph (atm phys) towards atm on FV mesh, we projected, then we sent back to ocean comp 00924 // start copy ocn atm coupling; go reverse direction now !! 00925 #ifdef ENABLE_ATMOCN_COUPLING 00926 PUSH_TIMER( "Send/receive data from ocn component to coupler in atm context" ) 00927 if( ocnComm != MPI_COMM_NULL ) 00928 { 00929 // as always, use nonblocking sends 00930 // this is for projection to ocean: 00931 ierr = iMOAB_SendElementTag( cmpOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &cplatm, 00932 strlen( "T_proj;u_proj;v_proj;" ) ); 00933 CHECKIERR( ierr, "cannot send tag values T_proj, etc towards ocn coupler" ) 00934 } 00935 if( couComm != MPI_COMM_NULL ) 00936 { 00937 // receive on ocn on coupler pes, that was redistributed according to coverage 00938 ierr = iMOAB_ReceiveElementTag( cplOcnPID, "T_proj;u_proj;v_proj;", &ocnCouComm, &cplatm, 00939 strlen( "T_proj;u_proj;v_proj;" ) ); 00940 CHECKIERR( ierr, "cannot receive tag values" ) 00941 } 00942 POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm ) 00943 00944 // we can now free the sender buffers 00945 if( ocnComm != MPI_COMM_NULL ) 00946 { 00947 ierr = iMOAB_FreeSenderBuffers( cmpOcnPID, &cplatm ); // context is for atm 00948 CHECKIERR( ierr, "cannot free buffers used to send ocn tag towards the coverage mesh for atm" ) 00949 } 00950 //#ifdef VERBOSE 00951 if( couComm != MPI_COMM_NULL ) 00952 { 00953 // write only for n==1 case 00954 char outputFileRecvd[] = "recvOcnCpl.h5m"; 00955 ierr = iMOAB_WriteMesh( cplOcnPID, outputFileRecvd, fileWriteOptions, strlen( outputFileRecvd ), 00956 strlen( fileWriteOptions ) ); 00957 CHECKIERR( ierr, "could not write recvOcnCplOcn.h5m to disk" ) 00958 } 00959 //#endif 00960 00961 if( couComm != MPI_COMM_NULL ) 00962 { 00963 /* We have the remapping weights now. Let us apply the weights onto the tag we defined 00964 on the source mesh and get the projection on the target mesh */ 00965 PUSH_TIMER( "Apply Scalar projection weights" ) 00966 const char* concat_fieldname = "T_proj;u_proj;v_proj;"; // this is now source tag 00967 const char* concat_fieldnameT = "T2_ph;u2_ph;v2_ph;"; // projected tag on 00968 // make sure the new tags exist on atm coupler mesh; 00969 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00970 strlen( bottomTempField2 ) ); 00971 CHECKIERR( ierr, "failed to define the field tag T2_ph" ); 00972 00973 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00974 strlen( bottomUVelField2 ) ); 00975 CHECKIERR( ierr, "failed to define the field tag u2_ph" ); 00976 00977 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField2, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 00978 strlen( bottomVVelField2 ) ); 00979 CHECKIERR( ierr, "failed to define the field tag v2_ph" ); 00980 00981 ierr = iMOAB_ApplyScalarProjectionWeights( cplOcnAtmPID, weights_identifiers[0], concat_fieldname, 00982 concat_fieldnameT, strlen( weights_identifiers[0] ), 00983 strlen( concat_fieldname ), strlen( concat_fieldnameT ) ); 00984 CHECKIERR( ierr, "failed to compute projection weight application from ocn to atm " ); 00985 POP_TIMER( couComm, rankInCouComm ) 00986 00987 char outputFileTgt[] = "fAtm2OnCpl2.h5m"; 00988 ierr = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions, strlen( outputFileTgt ), 00989 strlen( fileWriteOptions ) ); 00990 CHECKIERR( ierr, "could not write fAtm2OnCpl.h5m to disk" ) 00991 } 00992 00993 // send the projected tag back to atm pes, with send/receive partial par graph computed 00994 // from intx ocn/atm towards atm physics mesh ! 00995 if( atmComm != MPI_COMM_NULL ) 00996 { 00997 int tagIndexIn2; 00998 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomTempPhProjectedField, &tagTypes[1], &atmCompNDoFs, 00999 &tagIndexIn2, strlen( bottomTempPhProjectedField ) ); 01000 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01001 "Tph_proj on atm pes" ); 01002 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomUVelPhProjectedField, &tagTypes[1], &atmCompNDoFs, 01003 &tagIndexIn2, strlen( bottomUVelPhProjectedField ) ); 01004 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01005 "uph_proj on atm pes" ); 01006 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomVVelPhProjectedField, &tagTypes[1], &atmCompNDoFs, 01007 &tagIndexIn2, strlen( bottomVVelPhProjectedField ) ); 01008 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01009 "vph_proj on atm pes" ); 01010 } 01011 // send the tag to atm pes, from atm pg2 mesh on coupler pes 01012 // from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse 01013 // as always, use nonblocking sends 01014 // graph context comes from commgraph ? 01015 01016 // ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, 01017 // &typeB, 01018 // &cmpatm, &cplatm ); 01019 01020 if( couComm != MPI_COMM_NULL ) 01021 { 01022 context_id = cmpatm; 01023 ierr = iMOAB_SendElementTag( cplAtmPID, "T2_ph;u2_ph;v2_ph;", &atmCouComm, &context_id, 01024 strlen( "T2_ph;u2_ph;v2_ph;" ) ); 01025 CHECKIERR( ierr, "cannot send tag values back to atm pes" ) 01026 } 01027 01028 // receive on component atm phys mesh 01029 if( atmComm != MPI_COMM_NULL ) 01030 { 01031 context_id = cplatm; 01032 ierr = iMOAB_ReceiveElementTag( cmpPhAtmPID, "Tph_proj;uph_proj;vph_proj;", &atmCouComm, &context_id, 01033 strlen( "Tph_proj;uph_proj;vph_proj;" ) ); 01034 CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" ) 01035 } 01036 01037 MPI_Barrier( MPI_COMM_WORLD ); 01038 01039 if( couComm != MPI_COMM_NULL ) 01040 { 01041 context_id = cmpatm; 01042 ierr = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id ); 01043 CHECKIERR( ierr, "cannot free buffers for sending T2_ph from cpl to phys atm" ) 01044 } 01045 if( atmComm != MPI_COMM_NULL ) // write only for n==1 case 01046 { 01047 char outputFileAtmPh[] = "AtmPhysProj.h5m"; 01048 ierr = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions, strlen( outputFileAtmPh ), 01049 strlen( fileWriteOptions ) ); 01050 CHECKIERR( ierr, "could not write AtmPhysProj.h5m to disk" ) 01051 } 01052 #endif 01053 // end copy need more work for land 01054 // start copy 01055 // lnd atm coupling; go reverse direction now, from lnd to atm , using lnd - atm intx, weight gen 01056 // send back the T_proj , etc , from lan comp to land coupler 01057 #ifdef ENABLE_ATMLND_COUPLING 01058 PUSH_TIMER( "Send/receive data from lnd component to coupler in atm context" ) 01059 if( lndComm != MPI_COMM_NULL ) 01060 { 01061 // as always, use nonblocking sends 01062 ierr = iMOAB_SendElementTag( cmpLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &cplatm, 01063 strlen( "T_proj;u_proj;v_proj;" ) ); 01064 CHECKIERR( ierr, "cannot send tag values T_proj, etc towards lnd coupler" ) 01065 } 01066 if( couComm != MPI_COMM_NULL ) 01067 { 01068 // receive on ocn on coupler pes, that was redistributed according to coverage 01069 ierr = iMOAB_ReceiveElementTag( cplLndPID, "T_proj;u_proj;v_proj;", &lndCouComm, &cplatm, 01070 strlen( "T_proj;u_proj;v_proj;" ) ); 01071 CHECKIERR( ierr, "cannot receive tag values" ) 01072 } 01073 POP_TIMER( MPI_COMM_WORLD, rankInGlobalComm ) 01074 01075 // we can now free the sender buffers 01076 if( lndComm != MPI_COMM_NULL ) 01077 { 01078 ierr = iMOAB_FreeSenderBuffers( cmpLndPID, &cplatm ); // context is for atm 01079 CHECKIERR( ierr, "cannot free buffers used to send lnd tag towards the coverage mesh for atm" ) 01080 } 01081 //#ifdef VERBOSE 01082 if( couComm != MPI_COMM_NULL ) 01083 { 01084 01085 char outputFileRecvd[] = "recvLndCpl.h5m"; 01086 ierr = iMOAB_WriteMesh( cplLndPID, outputFileRecvd, fileWriteOptions, strlen( outputFileRecvd ), 01087 strlen( fileWriteOptions ) ); 01088 CHECKIERR( ierr, "could not write recvLndCpl.h5m to disk" ) 01089 } 01090 //#endif 01091 01092 if( couComm != MPI_COMM_NULL ) 01093 { 01094 PUSH_TIMER( "Apply Scalar projection weights for lnd - atm coupling" ) 01095 const char* concat_fieldname = "T_proj;u_proj;v_proj;"; // this is now source tag, on land cpl 01096 const char* concat_fieldnameT = "T3_ph;u3_ph;v3_ph;"; // projected tag on 01097 // make sure the new tags exist on atm coupler mesh; 01098 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomTempField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 01099 strlen( bottomTempField3 ) ); 01100 CHECKIERR( ierr, "failed to define the field tag T3_ph" ); 01101 01102 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomUVelField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 01103 strlen( bottomUVelField3 ) ); 01104 CHECKIERR( ierr, "failed to define the field tag u3_ph" ); 01105 01106 ierr = iMOAB_DefineTagStorage( cplAtmPID, bottomVVelField3, &tagTypes[0], &atmCompNDoFs, &tagIndex[0], 01107 strlen( bottomVVelField3 ) ); 01108 CHECKIERR( ierr, "failed to define the field tag v3_ph" ); 01109 01110 ierr = iMOAB_ApplyScalarProjectionWeights( cplLndAtmPID, weights_identifiers[0], concat_fieldname, 01111 concat_fieldnameT, strlen( weights_identifiers[0] ), 01112 strlen( concat_fieldname ), strlen( concat_fieldnameT ) ); 01113 CHECKIERR( ierr, "failed to compute projection weight application from lnd to atm " ); 01114 POP_TIMER( couComm, rankInCouComm ) 01115 01116 char outputFileTgt[] = "fAtm3OnCpl.h5m"; // this is for T3_ph, etc 01117 ierr = iMOAB_WriteMesh( cplAtmPID, outputFileTgt, fileWriteOptions, strlen( outputFileTgt ), 01118 strlen( fileWriteOptions ) ); 01119 CHECKIERR( ierr, "could not write fAtm3OnCpl.h5m to disk" ) 01120 } 01121 01122 // send the projected tag back to atm pes, with send/receive partial par graph computed 01123 // from intx lnd/atm towards atm physics mesh ! 01124 if( atmComm != MPI_COMM_NULL ) 01125 { 01126 int tagIndexIn2; 01127 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomTempPhLndProjectedField, &tagTypes[1], &atmCompNDoFs, 01128 &tagIndexIn2, strlen( bottomTempPhLndProjectedField ) ); 01129 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01130 "TphL_proj on atm pes" ); 01131 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomUVelPhLndProjectedField, &tagTypes[1], &atmCompNDoFs, 01132 &tagIndexIn2, strlen( bottomUVelPhLndProjectedField ) ); 01133 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01134 "uphL_proj on atm pes" ); 01135 ierr = iMOAB_DefineTagStorage( cmpPhAtmPID, bottomVVelPhLndProjectedField, &tagTypes[1], &atmCompNDoFs, 01136 &tagIndexIn2, strlen( bottomVVelPhLndProjectedField ) ); 01137 CHECKIERR( ierr, "failed to define the field tag for receiving back the tag " 01138 "vphL_proj on atm pes" ); 01139 } 01140 // send the tag to atm pes, from atm pg2 mesh on coupler pes 01141 // from couComm, using common joint comm atm_coupler, and computed graph, phys-grid -> atm FV on cpl, in reverse 01142 // as always, use nonblocking sends 01143 // graph context comes from commgraph ? 01144 01145 // ierr = iMOAB_ComputeCommGraph( cmpPhAtmPID, cplAtmPID, &atmCouComm, &atmPEGroup, &couPEGroup, &typeA, 01146 // &typeB, 01147 // &cmpatm, &cplatm ); 01148 01149 if( couComm != MPI_COMM_NULL ) 01150 { 01151 context_id = cmpatm; 01152 ierr = iMOAB_SendElementTag( cplAtmPID, "T3_ph;u3_ph;v3_ph;", &atmCouComm, &context_id, 01153 strlen( "T3_ph;u3_ph;v3_ph;" ) ); 01154 CHECKIERR( ierr, "cannot send tag values back to atm pes" ) 01155 } 01156 01157 // receive on component atm phys mesh 01158 if( atmComm != MPI_COMM_NULL ) 01159 { 01160 context_id = cplatm; 01161 ierr = iMOAB_ReceiveElementTag( cmpPhAtmPID, "TphL_proj;uphL_proj;vphL_proj;", &atmCouComm, &context_id, 01162 strlen( "TphL_proj;uphL_proj;vphL_proj;" ) ); 01163 CHECKIERR( ierr, "cannot receive tag values from atm pg2 mesh on coupler pes" ) 01164 } 01165 01166 MPI_Barrier( MPI_COMM_WORLD ); 01167 01168 if( couComm != MPI_COMM_NULL ) 01169 { 01170 context_id = cmpatm; 01171 ierr = iMOAB_FreeSenderBuffers( cplAtmPID, &context_id ); 01172 CHECKIERR( ierr, "cannot free buffers used for sending back atm tags " ) 01173 } 01174 if( atmComm != MPI_COMM_NULL ) // write only for n==1 case 01175 { 01176 char outputFileAtmPh[] = "AtmPhysProj3.h5m"; 01177 ierr = iMOAB_WriteMesh( cmpPhAtmPID, outputFileAtmPh, fileWriteOptions, strlen( outputFileAtmPh ), 01178 strlen( fileWriteOptions ) ); 01179 CHECKIERR( ierr, "could not write AtmPhysProj3.h5m to disk" ) 01180 } 01181 #endif 01182 // we could deregister cplLndAtmPID 01183 if( couComm != MPI_COMM_NULL ) 01184 { 01185 ierr = iMOAB_DeregisterApplication( cplLndAtmPID ); 01186 CHECKIERR( ierr, "cannot deregister app intx LA" ) 01187 } 01188 01189 // we could deregister cplAtmLndPID 01190 if( couComm != MPI_COMM_NULL ) 01191 { 01192 ierr = iMOAB_DeregisterApplication( cplAtmLndPID ); 01193 CHECKIERR( ierr, "cannot deregister app intx AL" ) 01194 } 01195 #endif // ENABLE_ATMLND_COUPLING 01196 01197 #ifdef ENABLE_ATMOCN_COUPLING 01198 if( couComm != MPI_COMM_NULL ) 01199 { 01200 ierr = iMOAB_DeregisterApplication( cplOcnAtmPID ); 01201 CHECKIERR( ierr, "cannot deregister app intx OA" ) 01202 } 01203 if( couComm != MPI_COMM_NULL ) 01204 { 01205 ierr = iMOAB_DeregisterApplication( cplAtmOcnPID ); 01206 CHECKIERR( ierr, "cannot deregister app intx AO" ) 01207 } 01208 #endif 01209 01210 #ifdef ENABLE_ATMLND_COUPLING 01211 if( lndComm != MPI_COMM_NULL ) 01212 { 01213 ierr = iMOAB_DeregisterApplication( cmpLndPID ); 01214 CHECKIERR( ierr, "cannot deregister app LND1" ) 01215 } 01216 #endif // ENABLE_ATMLND_COUPLING 01217 if( atmComm != MPI_COMM_NULL ) 01218 { 01219 ierr = iMOAB_DeregisterApplication( cmpPhAtmPID ); 01220 CHECKIERR( ierr, "cannot deregister app PhysAtm " ) 01221 } 01222 #ifdef ENABLE_ATMOCN_COUPLING 01223 if( ocnComm != MPI_COMM_NULL ) 01224 { 01225 ierr = iMOAB_DeregisterApplication( cmpOcnPID ); 01226 CHECKIERR( ierr, "cannot deregister app OCN1" ) 01227 } 01228 #endif // ENABLE_ATMOCN_COUPLING 01229 01230 if( atmComm != MPI_COMM_NULL ) 01231 { 01232 ierr = iMOAB_DeregisterApplication( cmpAtmPID ); 01233 CHECKIERR( ierr, "cannot deregister app ATM1" ) 01234 } 01235 01236 #ifdef ENABLE_ATMLND_COUPLING 01237 if( couComm != MPI_COMM_NULL ) 01238 { 01239 ierr = iMOAB_DeregisterApplication( cplLndPID ); 01240 CHECKIERR( ierr, "cannot deregister app LNDX" ) 01241 } 01242 #endif // ENABLE_ATMLND_COUPLING 01243 01244 #ifdef ENABLE_ATMOCN_COUPLING 01245 if( couComm != MPI_COMM_NULL ) 01246 { 01247 ierr = iMOAB_DeregisterApplication( cplOcnPID ); 01248 CHECKIERR( ierr, "cannot deregister app OCNX" ) 01249 } 01250 #endif // ENABLE_ATMOCN_COUPLING 01251 01252 if( couComm != MPI_COMM_NULL ) 01253 { 01254 ierr = iMOAB_DeregisterApplication( cplAtmPID ); 01255 CHECKIERR( ierr, "cannot deregister app ATMX" ) 01256 } 01257 01258 //#endif 01259 ierr = iMOAB_Finalize(); 01260 CHECKIERR( ierr, "did not finalize iMOAB" ) 01261 01262 // free atm coupler group and comm 01263 if( MPI_COMM_NULL != atmCouComm ) MPI_Comm_free( &atmCouComm ); 01264 MPI_Group_free( &joinAtmCouGroup ); 01265 if( MPI_COMM_NULL != atmComm ) MPI_Comm_free( &atmComm ); 01266 01267 #ifdef ENABLE_ATMOCN_COUPLING 01268 if( MPI_COMM_NULL != ocnComm ) MPI_Comm_free( &ocnComm ); 01269 // free ocn - coupler group and comm 01270 if( MPI_COMM_NULL != ocnCouComm ) MPI_Comm_free( &ocnCouComm ); 01271 MPI_Group_free( &joinOcnCouGroup ); 01272 #endif 01273 01274 #ifdef ENABLE_ATMLND_COUPLING 01275 if( MPI_COMM_NULL != lndComm ) MPI_Comm_free( &lndComm ); 01276 // free land - coupler group and comm 01277 if( MPI_COMM_NULL != lndCouComm ) MPI_Comm_free( &lndCouComm ); 01278 MPI_Group_free( &joinLndCouGroup ); 01279 #endif 01280 01281 if( MPI_COMM_NULL != couComm ) MPI_Comm_free( &couComm ); 01282 01283 MPI_Group_free( &atmPEGroup ); 01284 #ifdef ENABLE_ATMOCN_COUPLING 01285 MPI_Group_free( &ocnPEGroup ); 01286 #endif 01287 #ifdef ENABLE_ATMLND_COUPLING 01288 MPI_Group_free( &lndPEGroup ); 01289 #endif 01290 MPI_Group_free( &couPEGroup ); 01291 MPI_Group_free( &jgroup ); 01292 01293 MPI_Finalize(); 01294 // endif #if 0 01295 01296 return 0; 01297 }