MOAB: Mesh Oriented datABase
(version 5.4.1)
|
00001 #include "moab/ParallelComm.hpp" 00002 #include "MBParallelConventions.h" 00003 #include "ReadParallel.hpp" 00004 #include "moab/FileOptions.hpp" 00005 #include "MBTagConventions.hpp" 00006 #include "moab/Core.hpp" 00007 #include "moab_mpi.h" 00008 #include "TestUtil.hpp" 00009 00010 #include <iostream> 00011 #include <algorithm> 00012 #include <sstream> 00013 #include <cassert> 00014 #if !defined( _MSC_VER ) && !defined( __MINGW32__ ) 00015 #include <unistd.h> 00016 #endif 00017 00018 using namespace moab; 00019 00020 #define CHKERR( a ) \ 00021 do \ 00022 { \ 00023 ErrorCode val = ( a ); \ 00024 if( MB_SUCCESS != val ) \ 00025 { \ 00026 std::cerr << "Error code " << val << " at " << __FILE__ << ":" << __LINE__ << std::endl; \ 00027 return val; \ 00028 } \ 00029 } while( false ) 00030 00031 #define PCHECK( A ) \ 00032 if( is_any_proc_error( !( A ) ) ) return report_error( __FILE__, __LINE__ ) 00033 00034 ErrorCode report_error( const char* file, int line ) 00035 { 00036 std::cerr << "Failure at " << file << ':' << line << std::endl; 00037 return MB_FAILURE; 00038 } 00039 00040 ErrorCode test_read( const char* filename, const char* option ); 00041 00042 #define RUN_TEST_ARG3( A, B, C ) run_test( &( A ), #A, B, C ) 00043 00044 int is_any_proc_error( int is_my_error ) 00045 { 00046 int result = 0; 00047 int err = MPI_Allreduce( &is_my_error, &result, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD ); 00048 return err || result; 00049 } 00050 00051 int run_test( ErrorCode ( *func )( const char*, const char* ), 00052 const char* func_name, 00053 const std::string& file_name, 00054 const char* option ) 00055 { 00056 ErrorCode result = ( *func )( file_name.c_str(), option ); 00057 int is_err = is_any_proc_error( ( MB_SUCCESS != result ) ); 00058 int rank; 00059 MPI_Comm_rank( MPI_COMM_WORLD, &rank ); 00060 if( rank == 0 ) 00061 { 00062 if( is_err ) 00063 std::cout << func_name << " : FAILED!!" << std::endl; 00064 else 00065 std::cout << func_name << " : success" << std::endl; 00066 } 00067 00068 return is_err; 00069 } 00070 00071 int main( int argc, char* argv[] ) 00072 { 00073 int rank, size; 00074 MPI_Init( &argc, &argv ); 00075 MPI_Comm_rank( MPI_COMM_WORLD, &rank ); 00076 MPI_Comm_size( MPI_COMM_WORLD, &size ); 00077 int num_errors = 0; 00078 00079 const char* option; 00080 std::string vtk_test_filename = TestDir + "unittest/hex_2048.vtk"; 00081 00082 #ifdef MOAB_HAVE_HDF5 00083 std::string filename; 00084 if( 1 < argc ) 00085 filename = std::string( argv[1] ); 00086 else 00087 filename = TestDir + "unittest/64bricks_512hex.h5m"; 00088 00089 //=========== read_delete, geom_dimension, resolve_shared 00090 option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" 00091 "PARALLEL_RESOLVE_SHARED_ENTS;"; 00092 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00093 00094 //=========== read_delete, material_set, resolve_shared 00095 option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00096 "SHARED_ENTS;"; 00097 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00098 00099 //=========== bcast_delete, geom_dimension, resolve_shared 00100 option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" 00101 "PARALLEL_RESOLVE_SHARED_ENTS;"; 00102 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00103 00104 //=========== bcast_delete, material_set, resolve_shared 00105 option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00106 "SHARED_ENTS;"; 00107 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00108 00109 //=========== read_delete, geom_dimension, resolve_shared, exch ghost 00110 option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" 00111 "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; 00112 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00113 00114 //=========== read_delete, material_set, resolve_shared, exch ghost 00115 option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00116 "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; 00117 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00118 00119 //=========== bcast_delete, geom_dimension, resolve_shared, exch ghost 00120 option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" 00121 "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; 00122 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00123 00124 //=========== bcast_delete, material_set, resolve_shared, exch ghost 00125 option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00126 "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; 00127 num_errors += RUN_TEST_ARG3( test_read, filename, option ); 00128 #endif 00129 if( vtk_test_filename.size() ) 00130 { 00131 //=========== bcast_delete, trivial, resolve_shared 00132 option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00133 "SHARED_ENTS;"; 00134 num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option ); 00135 //=========== bcast_delete, trivial, resolve_shared + ghosting 00136 option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" 00137 "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; 00138 num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option ); 00139 } 00140 MPI_Finalize(); 00141 00142 return num_errors; 00143 } 00144 00145 ErrorCode test_read( const char* filename, const char* option ) 00146 { 00147 Core mb_instance; 00148 Interface& moab = mb_instance; 00149 ErrorCode rval; 00150 00151 rval = moab.load_file( filename, 0, option );CHKERR( rval ); 00152 00153 ParallelComm* pcomm = ParallelComm::get_pcomm( &moab, 0 ); 00154 00155 rval = pcomm->check_all_shared_handles();CHKERR( rval ); 00156 00157 return MB_SUCCESS; 00158 }