MOAB: Mesh Oriented datABase
(version 5.4.1)
|
#include "moab/ParallelComm.hpp"
#include "MBParallelConventions.h"
#include "ReadParallel.hpp"
#include "moab/FileOptions.hpp"
#include "MBTagConventions.hpp"
#include "moab/Core.hpp"
#include "moab_mpi.h"
#include "TestUtil.hpp"
#include <iostream>
#include <algorithm>
#include <sstream>
#include <cassert>
#include <unistd.h>
Go to the source code of this file.
Defines | |
#define | CHKERR(a) |
#define | PCHECK(A) if( is_any_proc_error( !( A ) ) ) return report_error( __FILE__, __LINE__ ) |
#define | RUN_TEST_ARG3(A, B, C) run_test( &( A ), #A, B, C ) |
Functions | |
ErrorCode | report_error (const char *file, int line) |
ErrorCode | test_read (const char *filename, const char *option) |
int | is_any_proc_error (int is_my_error) |
int | run_test (ErrorCode(*func)(const char *, const char *), const char *func_name, const std::string &file_name, const char *option) |
int | main (int argc, char *argv[]) |
#define CHKERR | ( | a | ) |
do \ { \ ErrorCode val = ( a ); \ if( MB_SUCCESS != val ) \ { \ std::cerr << "Error code " << val << " at " << __FILE__ << ":" << __LINE__ << std::endl; \ return val; \ } \ } while( false )
Definition at line 20 of file uber_parallel_test.cpp.
Referenced by test_read().
#define PCHECK | ( | A | ) | if( is_any_proc_error( !( A ) ) ) return report_error( __FILE__, __LINE__ ) |
Definition at line 31 of file uber_parallel_test.cpp.
#define RUN_TEST_ARG3 | ( | A, | |
B, | |||
C | |||
) | run_test( &( A ), #A, B, C ) |
Definition at line 42 of file uber_parallel_test.cpp.
Referenced by main().
int is_any_proc_error | ( | int | is_my_error | ) |
Definition at line 44 of file uber_parallel_test.cpp.
References MPI_COMM_WORLD.
{ int result = 0; int err = MPI_Allreduce( &is_my_error, &result, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD ); return err || result; }
int main | ( | int | argc, |
char * | argv[] | ||
) |
Definition at line 71 of file uber_parallel_test.cpp.
References filename, MPI_COMM_WORLD, rank, RUN_TEST_ARG3, size, and test_read().
{ int rank, size; MPI_Init( &argc, &argv ); MPI_Comm_rank( MPI_COMM_WORLD, &rank ); MPI_Comm_size( MPI_COMM_WORLD, &size ); int num_errors = 0; const char* option; std::string vtk_test_filename = TestDir + "unittest/hex_2048.vtk"; #ifdef MOAB_HAVE_HDF5 std::string filename; if( 1 < argc ) filename = std::string( argv[1] ); else filename = TestDir + "unittest/64bricks_512hex.h5m"; //=========== read_delete, geom_dimension, resolve_shared option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" "PARALLEL_RESOLVE_SHARED_ENTS;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== read_delete, material_set, resolve_shared option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== bcast_delete, geom_dimension, resolve_shared option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" "PARALLEL_RESOLVE_SHARED_ENTS;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== bcast_delete, material_set, resolve_shared option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== read_delete, geom_dimension, resolve_shared, exch ghost option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== read_delete, material_set, resolve_shared, exch ghost option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== bcast_delete, geom_dimension, resolve_shared, exch ghost option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;" "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); //=========== bcast_delete, material_set, resolve_shared, exch ghost option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; num_errors += RUN_TEST_ARG3( test_read, filename, option ); #endif if( vtk_test_filename.size() ) { //=========== bcast_delete, trivial, resolve_shared option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;"; num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option ); //=========== bcast_delete, trivial, resolve_shared + ghosting option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_" "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;"; num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option ); } MPI_Finalize(); return num_errors; }
ErrorCode report_error | ( | const char * | file, |
int | line | ||
) |
Definition at line 34 of file uber_parallel_test.cpp.
{ std::cerr << "Failure at " << file << ':' << line << std::endl; return MB_FAILURE; }
int run_test | ( | ErrorCode(*)(const char *, const char *) | func, |
const char * | func_name, | ||
const std::string & | file_name, | ||
const char * | option | ||
) |
Definition at line 51 of file uber_parallel_test.cpp.
References ErrorCode, is_any_proc_error(), MB_SUCCESS, MPI_COMM_WORLD, and rank.
{ ErrorCode result = ( *func )( file_name.c_str(), option ); int is_err = is_any_proc_error( ( MB_SUCCESS != result ) ); int rank; MPI_Comm_rank( MPI_COMM_WORLD, &rank ); if( rank == 0 ) { if( is_err ) std::cout << func_name << " : FAILED!!" << std::endl; else std::cout << func_name << " : success" << std::endl; } return is_err; }
Definition at line 145 of file uber_parallel_test.cpp.
References moab::ParallelComm::check_all_shared_handles(), CHKERR, ErrorCode, moab::ParallelComm::get_pcomm(), moab::Interface::load_file(), mb_instance(), and MB_SUCCESS.
{ Core mb_instance; Interface& moab = mb_instance; ErrorCode rval; rval = moab.load_file( filename, 0, option );CHKERR( rval ); ParallelComm* pcomm = ParallelComm::get_pcomm( &moab, 0 ); rval = pcomm->check_all_shared_handles();CHKERR( rval ); return MB_SUCCESS; }