Actual source code: bddc.c

petsc-dev 2014-02-02
Report Typos and Errors
  1: /* TODOLIST

  3:    ConstraintsSetup
  4:    - tolerances for constraints as an option (take care of single precision!)
  5:    - Can MAT_IGNORE_ZERO_ENTRIES be used for Constraints Matrix?

  7:    Solvers
  8:    - Add support for reuse fill and cholecky factor for coarse solver (similar to local solvers)
  9:    - Propagate ksp prefixes for solvers to mat objects?
 10:    - Propagate nearnullspace info among levels

 12:    User interface
 13:    - Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet)
 14:    - Negative indices in dirichlet and Neumann ISs should be skipped (now they cause out-of-bounds access)
 15:    - Provide PCApplyTranpose_BDDC
 16:    - DofSplitting and DM attached to pc?

 18:    Debugging output
 19:    - Better management of verbosity levels of debugging output

 21:    Build
 22:    - make runexe59

 24:    Extra
 25:    - Is it possible to work with PCBDDCGraph on boundary indices only (less memory consumed)?
 26:    - Why options for "pc_bddc_coarse" solver gets propagated to "pc_bddc_coarse_1" solver?
 27:    - add support for computing h,H and related using coordinates?
 28:    - Change of basis approach does not work with my nonlinear mechanics example. why? (seems not an issue with l2gmap)
 29:    - Better management in PCIS code
 30:    - BDDC with MG framework?

 32:    FETIDP
 33:    - Move FETIDP code to its own classes

 35:    MATIS related operations contained in BDDC code
 36:    - Provide general case for subassembling
 37:    - Preallocation routines in MatISGetMPIAXAIJ

 39: */

 41: /* ----------------------------------------------------------------------------------------------------------------------------------------------
 42:    Implementation of BDDC preconditioner based on:
 43:    C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
 44:    ---------------------------------------------------------------------------------------------------------------------------------------------- */

 46: #include <bddc.h> /*I "petscpc.h" I*/  /* includes for fortran wrappers */
 47: #include <bddcprivate.h>
 48: #include <petscblaslapack.h>

 50: /* -------------------------------------------------------------------------- */
 53: PetscErrorCode PCSetFromOptions_BDDC(PC pc)
 54: {
 55:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

 59:   PetscOptionsHead("BDDC options");
 60:   /* Verbose debugging */
 61:   PetscOptionsInt("-pc_bddc_check_level","Verbose output for PCBDDC (intended for debug)","none",pcbddc->dbg_flag,&pcbddc->dbg_flag,NULL);
 62:   /* Primal space cumstomization */
 63:   PetscOptionsBool("-pc_bddc_use_vertices","Use or not corner dofs in coarse space","none",pcbddc->use_vertices,&pcbddc->use_vertices,NULL);
 64:   PetscOptionsBool("-pc_bddc_use_edges","Use or not edge constraints in coarse space","none",pcbddc->use_edges,&pcbddc->use_edges,NULL);
 65:   PetscOptionsBool("-pc_bddc_use_faces","Use or not face constraints in coarse space","none",pcbddc->use_faces,&pcbddc->use_faces,NULL);
 66:   /* Change of basis */
 67:   PetscOptionsBool("-pc_bddc_use_change_of_basis","Use or not change of basis on local edge nodes","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);
 68:   PetscOptionsBool("-pc_bddc_use_change_on_faces","Use or not change of basis on local face nodes","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);
 69:   if (!pcbddc->use_change_of_basis) {
 70:     pcbddc->use_change_on_faces = PETSC_FALSE;
 71:   }
 72:   /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */
 73:   PetscOptionsBool("-pc_bddc_switch_static","Switch on static condensation ops around the interface preconditioner","none",pcbddc->switch_static,&pcbddc->switch_static,NULL);
 74:   PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);
 75:   PetscOptionsInt("-pc_bddc_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);
 76:   PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);
 77:   PetscOptionsTail();
 78:   return(0);
 79: }
 80: /* -------------------------------------------------------------------------- */
 83: static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
 84: {
 85:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

 89:   ISDestroy(&pcbddc->user_primal_vertices);
 90:   PetscObjectReference((PetscObject)PrimalVertices);
 91:   pcbddc->user_primal_vertices = PrimalVertices;
 92:   return(0);
 93: }
 96: /*@
 97:  PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in PCBDDC

 99:    Not collective

101:    Input Parameters:
102: +  pc - the preconditioning context
103: -  PrimalVertices - index set of primal vertices in local numbering

105:    Level: intermediate

107:    Notes:

109: .seealso: PCBDDC
110: @*/
111: PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
112: {

118:   PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));
119:   return(0);
120: }
121: /* -------------------------------------------------------------------------- */
124: static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
125: {
126:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

129:   pcbddc->coarsening_ratio = k;
130:   return(0);
131: }

135: /*@
136:  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel

138:    Logically collective on PC

140:    Input Parameters:
141: +  pc - the preconditioning context
142: -  k - coarsening ratio (H/h at the coarser level)

144:    Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level

146:    Level: intermediate

148:    Notes:

150: .seealso: PCBDDC
151: @*/
152: PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
153: {

159:   PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));
160:   return(0);
161: }

163: /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */
166: static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc,PetscBool flg)
167: {
168:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

171:   pcbddc->use_exact_dirichlet_trick = flg;
172:   return(0);
173: }

177: PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc,PetscBool flg)
178: {

184:   PetscTryMethod(pc,"PCBDDCSetUseExactDirichlet_C",(PC,PetscBool),(pc,flg));
185:   return(0);
186: }

190: static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc,PetscInt level)
191: {
192:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

195:   pcbddc->current_level = level;
196:   return(0);
197: }

201: PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level)
202: {

208:   PetscTryMethod(pc,"PCBDDCSetLevel_C",(PC,PetscInt),(pc,level));
209:   return(0);
210: }

214: static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc,PetscInt levels)
215: {
216:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

219:   pcbddc->max_levels = levels;
220:   return(0);
221: }

225: /*@
226:  PCBDDCSetLevels - Sets the maximum number of levels for multilevel

228:    Logically collective on PC

230:    Input Parameters:
231: +  pc - the preconditioning context
232: -  levels - the maximum number of levels (max 9)

234:    Default value is 0, i.e. traditional one-level BDDC

236:    Level: intermediate

238:    Notes:

240: .seealso: PCBDDC
241: @*/
242: PetscErrorCode PCBDDCSetLevels(PC pc,PetscInt levels)
243: {

249:   PetscTryMethod(pc,"PCBDDCSetLevels_C",(PC,PetscInt),(pc,levels));
250:   return(0);
251: }
252: /* -------------------------------------------------------------------------- */

256: static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
257: {
258:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

262:   PetscObjectReference((PetscObject)NullSpace);
263:   MatNullSpaceDestroy(&pcbddc->NullSpace);
264:   pcbddc->NullSpace = NullSpace;
265:   return(0);
266: }

270: /*@
271:  PCBDDCSetNullSpace - Set nullspace for BDDC operator

273:    Logically collective on PC and MatNullSpace

275:    Input Parameters:
276: +  pc - the preconditioning context
277: -  NullSpace - Null space of the linear operator to be preconditioned (Pmat)

279:    Level: intermediate

281:    Notes:

283: .seealso: PCBDDC
284: @*/
285: PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
286: {

293:   PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));
294:   return(0);
295: }
296: /* -------------------------------------------------------------------------- */

300: static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
301: {
302:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

306:   ISDestroy(&pcbddc->DirichletBoundaries);
307:   PetscObjectReference((PetscObject)DirichletBoundaries);
308:   pcbddc->DirichletBoundaries=DirichletBoundaries;
309:   pcbddc->recompute_topography = PETSC_TRUE;
310:   return(0);
311: }

315: /*@
316:  PCBDDCSetDirichletBoundaries - Set IS defining Dirichlet boundaries for the global problem.

318:    Not collective

320:    Input Parameters:
321: +  pc - the preconditioning context
322: -  DirichletBoundaries - sequential IS defining the subdomain part of Dirichlet boundaries (in local ordering)

324:    Level: intermediate

326:    Notes:

328: .seealso: PCBDDC
329: @*/
330: PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
331: {

337:   PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));
338:   return(0);
339: }
340: /* -------------------------------------------------------------------------- */

344: static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
345: {
346:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

350:   ISDestroy(&pcbddc->NeumannBoundaries);
351:   PetscObjectReference((PetscObject)NeumannBoundaries);
352:   pcbddc->NeumannBoundaries=NeumannBoundaries;
353:   pcbddc->recompute_topography = PETSC_TRUE;
354:   return(0);
355: }

359: /*@
360:  PCBDDCSetNeumannBoundaries - Set IS defining Neumann boundaries for the global problem.

362:    Not collective

364:    Input Parameters:
365: +  pc - the preconditioning context
366: -  NeumannBoundaries - sequential IS defining the subdomain part of Neumann boundaries (in local ordering)

368:    Level: intermediate

370:    Notes:

372: .seealso: PCBDDC
373: @*/
374: PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
375: {

381:   PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));
382:   return(0);
383: }
384: /* -------------------------------------------------------------------------- */

388: static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
389: {
390:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

393:   *DirichletBoundaries = pcbddc->DirichletBoundaries;
394:   return(0);
395: }

399: /*@
400:  PCBDDCGetDirichletBoundaries - Get IS for local Dirichlet boundaries

402:    Not collective

404:    Input Parameters:
405: .  pc - the preconditioning context

407:    Output Parameters:
408: .  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries

410:    Level: intermediate

412:    Notes:

414: .seealso: PCBDDC
415: @*/
416: PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
417: {

422:   PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));
423:   return(0);
424: }
425: /* -------------------------------------------------------------------------- */

429: static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
430: {
431:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

434:   *NeumannBoundaries = pcbddc->NeumannBoundaries;
435:   return(0);
436: }

440: /*@
441:  PCBDDCGetNeumannBoundaries - Get IS for local Neumann boundaries

443:    Not collective

445:    Input Parameters:
446: .  pc - the preconditioning context

448:    Output Parameters:
449: .  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries

451:    Level: intermediate

453:    Notes:

455: .seealso: PCBDDC
456: @*/
457: PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
458: {

463:   PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));
464:   return(0);
465: }
466: /* -------------------------------------------------------------------------- */

470: static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
471: {
472:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
473:   PCBDDCGraph    mat_graph = pcbddc->mat_graph;

477:   /* free old CSR */
478:   PCBDDCGraphResetCSR(mat_graph);
479:   /* TODO: PCBDDCGraphSetAdjacency */
480:   /* get CSR into graph structure */
481:   if (copymode == PETSC_COPY_VALUES) {
482:     PetscMalloc1((nvtxs+1),&mat_graph->xadj);
483:     PetscMalloc1(xadj[nvtxs],&mat_graph->adjncy);
484:     PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));
485:     PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));
486:   } else if (copymode == PETSC_OWN_POINTER) {
487:     mat_graph->xadj = (PetscInt*)xadj;
488:     mat_graph->adjncy = (PetscInt*)adjncy;
489:   }
490:   mat_graph->nvtxs_csr = nvtxs;
491:   return(0);
492: }

496: /*@
497:  PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local Neumann matrix

499:    Not collective

501:    Input Parameters:
502: +  pc - the preconditioning context
503: .  nvtxs - number of local vertices of the graph (i.e., the local size of your problem)
504: .  xadj, adjncy - the CSR graph
505: -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER.

507:    Level: intermediate

509:    Notes:

511: .seealso: PCBDDC,PetscCopyMode
512: @*/
513: PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
514: {
515:   void (*f)(void) = 0;

522:   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) {
523:     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__);
524:   }
525:   PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));
526:   /* free arrays if PCBDDC is not the PC type */
527:   PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);
528:   if (!f && copymode == PETSC_OWN_POINTER) {
529:     PetscFree(xadj);
530:     PetscFree(adjncy);
531:   }
532:   return(0);
533: }
534: /* -------------------------------------------------------------------------- */

538: static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
539: {
540:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
541:   PetscInt i;

545:   /* Destroy ISes if they were already set */
546:   for (i=0;i<pcbddc->n_ISForDofs;i++) {
547:     ISDestroy(&pcbddc->ISForDofs[i]);
548:   }
549:   PetscFree(pcbddc->ISForDofs);
550:   /* allocate space then set */
551:   PetscMalloc1(n_is,&pcbddc->ISForDofs);
552:   for (i=0;i<n_is;i++) {
553:     PetscObjectReference((PetscObject)ISForDofs[i]);
554:     pcbddc->ISForDofs[i]=ISForDofs[i];
555:   }
556:   pcbddc->n_ISForDofs=n_is;
557:   pcbddc->user_provided_isfordofs = PETSC_TRUE;
558:   return(0);
559: }

563: /*@
564:  PCBDDCSetDofsSplitting - Set index sets defining fields of the local Neumann matrix

566:    Not collective

568:    Input Parameters:
569: +  pc - the preconditioning context
570: -  n_is - number of index sets defining the fields
571: .  ISForDofs - array of IS describing the fields

573:    Level: intermediate

575:    Notes:

577: .seealso: PCBDDC
578: @*/
579: PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
580: {
581:   PetscInt       i;

586:   for (i=0;i<n_is;i++) {
588:   }
589:   PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));
590:   return(0);
591: }
592: /* -------------------------------------------------------------------------- */
595: /* -------------------------------------------------------------------------- */
596: /*
597:    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
598:                      guess if a transformation of basis approach has been selected.

600:    Input Parameter:
601: +  pc - the preconditioner contex

603:    Application Interface Routine: PCPreSolve()

605:    Notes:
606:    The interface routine PCPreSolve() is not usually called directly by
607:    the user, but instead is called by KSPSolve().
608: */
609: static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
610: {
612:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
613:   PC_IS          *pcis = (PC_IS*)(pc->data);
614:   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
615:   Mat            temp_mat;
616:   IS             dirIS;
617:   PetscInt       dirsize,i,*is_indices;
618:   PetscScalar    *array_x,*array_diagonal;
619:   Vec            used_vec;
620:   PetscBool      guess_nonzero,flg,bddc_has_dirichlet_boundaries;

623:   /* if we are working with cg, one dirichlet solve can be avoided during Krylov iterations */
624:   if (ksp) {
625:     PetscBool iscg;
626:     PetscObjectTypeCompare((PetscObject)ksp,KSPCG,&iscg);
627:     if (!iscg) {
628:       PCBDDCSetUseExactDirichlet(pc,PETSC_FALSE);
629:     }
630:   }
631:   /* Creates parallel work vectors used in presolve */
632:   if (!pcbddc->original_rhs) {
633:     VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);
634:   }
635:   if (!pcbddc->temp_solution) {
636:     VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);
637:   }
638:   if (x) {
639:     PetscObjectReference((PetscObject)x);
640:     used_vec = x;
641:   } else {
642:     PetscObjectReference((PetscObject)pcbddc->temp_solution);
643:     used_vec = pcbddc->temp_solution;
644:     VecSet(used_vec,0.0);
645:   }
646:   /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */
647:   if (ksp) {
648:     KSPGetInitialGuessNonzero(ksp,&guess_nonzero);
649:     if (!guess_nonzero) {
650:       VecSet(used_vec,0.0);
651:     }
652:   }

654:   /* TODO: remove when Dirichlet boundaries will be shared */
655:   PCBDDCGetDirichletBoundaries(pc,&dirIS);
656:   flg = PETSC_FALSE;
657:   if (dirIS) flg = PETSC_TRUE;
658:   MPI_Allreduce(&flg,&bddc_has_dirichlet_boundaries,1,MPIU_BOOL,MPI_LOR,PetscObjectComm((PetscObject)pc));

660:   /* store the original rhs */
661:   VecCopy(rhs,pcbddc->original_rhs);

663:   /* Take into account zeroed rows -> change rhs and store solution removed */
664:   if (rhs && bddc_has_dirichlet_boundaries) {
665:     MatGetDiagonal(pc->pmat,pcis->vec1_global);
666:     VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);
667:     VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);
668:     VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);
669:     VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
670:     VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
671:     if (dirIS) {
672:       ISGetSize(dirIS,&dirsize);
673:       VecGetArray(pcis->vec1_N,&array_x);
674:       VecGetArray(pcis->vec2_N,&array_diagonal);
675:       ISGetIndices(dirIS,(const PetscInt**)&is_indices);
676:       for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
677:       ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);
678:       VecRestoreArray(pcis->vec2_N,&array_diagonal);
679:       VecRestoreArray(pcis->vec1_N,&array_x);
680:     }
681:     VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);
682:     VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);

684:     /* remove the computed solution from the rhs */
685:     VecScale(used_vec,-1.0);
686:     MatMultAdd(pc->pmat,used_vec,rhs,rhs);
687:     VecScale(used_vec,-1.0);
688:   }

690:   /* store partially computed solution and set initial guess */
691:   if (x) {
692:     VecCopy(used_vec,pcbddc->temp_solution);
693:     VecSet(used_vec,0.0);
694:     if (pcbddc->use_exact_dirichlet_trick) {
695:       VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
696:       VecScatterEnd  (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
697:       KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);
698:       VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);
699:       VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);
700:       if (ksp) {
701:         KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
702:       }
703:     }
704:   }

706:   /* prepare MatMult and rhs for solver */
707:   if (pcbddc->use_change_of_basis) {
708:     /* swap pointers for local matrices */
709:     temp_mat = matis->A;
710:     matis->A = pcbddc->local_mat;
711:     pcbddc->local_mat = temp_mat;
712:     if (rhs) {
713:       /* Get local rhs and apply transformation of basis */
714:       VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
715:       VecScatterEnd  (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
716:       /* from original basis to modified basis */
717:       MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);
718:       /* put back modified values into the global vec using INSERT_VALUES copy mode */
719:       VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);
720:       VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);
721:     }
722:   }

724:   /* remove nullspace if present */
725:   if (ksp && pcbddc->NullSpace) {
726:     MatNullSpaceRemove(pcbddc->NullSpace,used_vec);
727:     MatNullSpaceRemove(pcbddc->NullSpace,rhs);
728:   }
729:   VecDestroy(&used_vec);
730:   return(0);
731: }
732: /* -------------------------------------------------------------------------- */
735: /* -------------------------------------------------------------------------- */
736: /*
737:    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
738:                      approach has been selected. Also, restores rhs to its original state.

740:    Input Parameter:
741: +  pc - the preconditioner contex

743:    Application Interface Routine: PCPostSolve()

745:    Notes:
746:    The interface routine PCPostSolve() is not usually called directly by
747:    the user, but instead is called by KSPSolve().
748: */
749: static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
750: {
752:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
753:   PC_IS          *pcis   = (PC_IS*)(pc->data);
754:   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
755:   Mat            temp_mat;

758:   if (pcbddc->use_change_of_basis) {
759:     /* swap pointers for local matrices */
760:     temp_mat = matis->A;
761:     matis->A = pcbddc->local_mat;
762:     pcbddc->local_mat = temp_mat;
763:   }
764:   if (pcbddc->use_change_of_basis && x) {
765:     /* Get Local boundary and apply transformation of basis to solution vector */
766:     VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
767:     VecScatterEnd  (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
768:     /* from modified basis to original basis */
769:     MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);
770:     /* put back modified values into the global vec using INSERT_VALUES copy mode */
771:     VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);
772:     VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);
773:   }
774:   /* add solution removed in presolve */
775:   if (x) {
776:     VecAXPY(x,1.0,pcbddc->temp_solution);
777:   }
778:   /* restore rhs to its original state */
779:   if (rhs) {
780:     VecCopy(pcbddc->original_rhs,rhs);
781:   }
782:   return(0);
783: }
784: /* -------------------------------------------------------------------------- */
787: /* -------------------------------------------------------------------------- */
788: /*
789:    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
790:                   by setting data structures and options.

792:    Input Parameter:
793: +  pc - the preconditioner context

795:    Application Interface Routine: PCSetUp()

797:    Notes:
798:    The interface routine PCSetUp() is not usually called directly by
799:    the user, but instead is called by PCApply() if necessary.
800: */
801: PetscErrorCode PCSetUp_BDDC(PC pc)
802: {
803:   PetscErrorCode   ierr;
804:   PC_BDDC*         pcbddc = (PC_BDDC*)pc->data;
805:   MatNullSpace     nearnullspace;
806:   MatStructure     flag;
807:   PetscBool        computeis,computetopography,computesolvers;
808:   PetscBool        new_nearnullspace_provided;

811:   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */
812:   /* PCIS does not support MatStructure flags different from SAME_PRECONDITIONER */
813:   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
814:      Also, BDDC directly build the Dirichlet problem */

816:   /* split work */
817:   if (pc->setupcalled) {
818:     computeis = PETSC_FALSE;
819:     PCGetOperators(pc,NULL,NULL,&flag);
820:     if (flag == SAME_PRECONDITIONER) {
821:       return(0);
822:     } else if (flag == SAME_NONZERO_PATTERN) {
823:       computetopography = PETSC_FALSE;
824:       computesolvers = PETSC_TRUE;
825:     } else { /* DIFFERENT_NONZERO_PATTERN */
826:       computetopography = PETSC_TRUE;
827:       computesolvers = PETSC_TRUE;
828:     }
829:   } else {
830:     computeis = PETSC_TRUE;
831:     computetopography = PETSC_TRUE;
832:     computesolvers = PETSC_TRUE;
833:   }
834:   if (pcbddc->recompute_topography) {
835:     computetopography = PETSC_TRUE;
836:   }

838:   /* Get stdout for dbg */
839:   if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) {
840:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);
841:     PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);
842:     if (pcbddc->current_level) {
843:       PetscViewerASCIIAddTab(pcbddc->dbg_viewer,2);
844:     }
845:   }

847:   /* Set up all the "iterative substructuring" common block without computing solvers */
848:   if (computeis) {
849:     /* HACK INTO PCIS */
850:     PC_IS* pcis = (PC_IS*)pc->data;
851:     pcis->computesolvers = PETSC_FALSE;
852:     PCISSetUp(pc);
853:     ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcbddc->BtoNmap);
854:   }

856:   /* Analyze interface */
857:   if (computetopography) {
858:     PCBDDCAnalyzeInterface(pc);
859:   }

861:   /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */
862:   new_nearnullspace_provided = PETSC_FALSE;
863:   MatGetNearNullSpace(pc->pmat,&nearnullspace);
864:   if (pcbddc->onearnullspace) { /* already used nearnullspace */
865:     if (!nearnullspace) { /* near null space attached to mat has been destroyed */
866:       new_nearnullspace_provided = PETSC_TRUE;
867:     } else {
868:       /* determine if the two nullspaces are different (should be lightweight) */
869:       if (nearnullspace != pcbddc->onearnullspace) {
870:         new_nearnullspace_provided = PETSC_TRUE;
871:       } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */
872:         PetscInt         i;
873:         const Vec        *nearnullvecs;
874:         PetscObjectState state;
875:         PetscInt         nnsp_size;
876:         MatNullSpaceGetVecs(nearnullspace,NULL,&nnsp_size,&nearnullvecs);
877:         for (i=0;i<nnsp_size;i++) {
878:           PetscObjectStateGet((PetscObject)nearnullvecs[i],&state);
879:           if (pcbddc->onearnullvecs_state[i] != state) {
880:             new_nearnullspace_provided = PETSC_TRUE;
881:             break;
882:           }
883:         }
884:       }
885:     }
886:   } else {
887:     if (!nearnullspace) { /* both nearnullspaces are null */
888:       new_nearnullspace_provided = PETSC_FALSE;
889:     } else { /* nearnullspace attached later */
890:       new_nearnullspace_provided = PETSC_TRUE;
891:     }
892:   }

894:   /* Setup constraints and related work vectors */
895:   /* reset primal space flags */
896:   pcbddc->new_primal_space = PETSC_FALSE;
897:   pcbddc->new_primal_space_local = PETSC_FALSE;
898:   if (computetopography || new_nearnullspace_provided) {
899:     /* It also sets the primal space flags */
900:     PCBDDCConstraintsSetUp(pc);
901:     /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */
902:     PCBDDCSetUpLocalWorkVectors(pc);
903:   }

905:   if (computesolvers || pcbddc->new_primal_space) {
906:     /* reset data */
907:     PCBDDCScalingDestroy(pc);
908:     /* Create coarse and local stuffs */
909:     PCBDDCSetUpSolvers(pc);
910:     PCBDDCScalingSetUp(pc);
911:   }
912:   if (pcbddc->dbg_flag && pcbddc->current_level) {
913:     PetscViewerASCIISubtractTab(pcbddc->dbg_viewer,2);
914:   }
915:   return(0);
916: }

918: /* -------------------------------------------------------------------------- */
919: /*
920:    PCApply_BDDC - Applies the BDDC preconditioner to a vector.

922:    Input Parameters:
923: .  pc - the preconditioner context
924: .  r - input vector (global)

926:    Output Parameter:
927: .  z - output vector (global)

929:    Application Interface Routine: PCApply()
930:  */
933: PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
934: {
935:   PC_IS             *pcis = (PC_IS*)(pc->data);
936:   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
937:   PetscErrorCode    ierr;
938:   const PetscScalar one = 1.0;
939:   const PetscScalar m_one = -1.0;
940:   const PetscScalar zero = 0.0;

942: /* This code is similar to that provided in nn.c for PCNN
943:    NN interface preconditioner changed to BDDC
944:    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static = PETSC_TRUE) */

947:   if (!pcbddc->use_exact_dirichlet_trick) {
948:     /* First Dirichlet solve */
949:     VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
950:     VecScatterEnd  (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
951:     KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);
952:     /*
953:       Assembling right hand side for BDDC operator
954:       - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE)
955:       - pcis->vec1_B the interface part of the global vector z
956:     */
957:     VecScale(pcis->vec2_D,m_one);
958:     MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);
959:     if (pcbddc->switch_static) { MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D); }
960:     VecScale(pcis->vec2_D,m_one);
961:     VecCopy(r,z);
962:     VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
963:     VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
964:     PCBDDCScalingRestriction(pc,z,pcis->vec1_B);
965:   } else {
966:     VecSet(pcis->vec1_D,zero);
967:     VecSet(pcis->vec2_D,zero);
968:     PCBDDCScalingRestriction(pc,r,pcis->vec1_B);
969:   }

971:   /* Apply interface preconditioner
972:      input/output vecs: pcis->vec1_B and pcis->vec1_D */
973:   PCBDDCApplyInterfacePreconditioner(pc);

975:   /* Apply transpose of partition of unity operator */
976:   PCBDDCScalingExtension(pc,pcis->vec1_B,z);

978:   /* Second Dirichlet solve and assembling of output */
979:   VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
980:   VecScatterEnd  (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
981:   MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);
982:   if (pcbddc->switch_static) { MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D); }
983:   KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcis->vec4_D);
984:   VecScale(pcis->vec4_D,m_one);
985:   if (pcbddc->switch_static) { VecAXPY (pcis->vec4_D,one,pcis->vec1_D); }
986:   VecAXPY (pcis->vec2_D,one,pcis->vec4_D);
987:   VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
988:   VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
989:   return(0);
990: }
991: /* -------------------------------------------------------------------------- */

995: PetscErrorCode PCDestroy_BDDC(PC pc)
996: {
997:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

1001:   /* free data created by PCIS */
1002:   PCISDestroy(pc);
1003:   /* free BDDC custom data  */
1004:   PCBDDCResetCustomization(pc);
1005:   /* destroy objects related to topography */
1006:   PCBDDCResetTopography(pc);
1007:   /* free allocated graph structure */
1008:   PetscFree(pcbddc->mat_graph);
1009:   /* free data for scaling operator */
1010:   PCBDDCScalingDestroy(pc);
1011:   /* free solvers stuff */
1012:   PCBDDCResetSolvers(pc);
1013:   /* free global vectors needed in presolve */
1014:   VecDestroy(&pcbddc->temp_solution);
1015:   VecDestroy(&pcbddc->original_rhs);
1016:   ISLocalToGlobalMappingDestroy(&pcbddc->BtoNmap);
1017:   /* remove functions */
1018:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);
1019:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);
1020:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",NULL);
1021:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",NULL);
1022:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",NULL);
1023:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);
1024:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);
1025:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);
1026:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);
1027:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);
1028:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);
1029:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);
1030:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);
1031:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);
1032:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);
1033:   /* Free the private data structure */
1034:   PetscFree(pc->data);
1035:   return(0);
1036: }
1037: /* -------------------------------------------------------------------------- */

1041: static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1042: {
1043:   FETIDPMat_ctx  mat_ctx;
1044:   PC_IS*         pcis;
1045:   PC_BDDC*       pcbddc;

1049:   MatShellGetContext(fetidp_mat,&mat_ctx);
1050:   pcis = (PC_IS*)mat_ctx->pc->data;
1051:   pcbddc = (PC_BDDC*)mat_ctx->pc->data;

1053:   /* change of basis for physical rhs if needed
1054:      It also changes the rhs in case of dirichlet boundaries */
1055:   PCPreSolve_BDDC(mat_ctx->pc,NULL,standard_rhs,NULL);
1056:   /* store vectors for computation of fetidp final solution */
1057:   VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);
1058:   VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);
1059:   /* scale rhs since it should be unassembled */
1060:   /* TODO use counter scaling? (also below) */
1061:   VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1062:   VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1063:   /* Apply partition of unity */
1064:   VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);
1065:   /* PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B); */
1066:   if (!pcbddc->switch_static) {
1067:     /* compute partially subassembled Schur complement right-hand side */
1068:     KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);
1069:     MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);
1070:     VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);
1071:     VecSet(standard_rhs,0.0);
1072:     VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);
1073:     VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);
1074:     /* PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B); */
1075:     VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1076:     VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1077:     VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);
1078:   }
1079:   /* BDDC rhs */
1080:   VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);
1081:   if (pcbddc->switch_static) {
1082:     VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);
1083:   }
1084:   /* apply BDDC */
1085:   PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);
1086:   /* Application of B_delta and assembling of rhs for fetidp fluxes */
1087:   VecSet(fetidp_flux_rhs,0.0);
1088:   MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);
1089:   VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);
1090:   VecScatterEnd  (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);
1091:   /* restore original rhs */
1092:   VecCopy(pcbddc->original_rhs,standard_rhs);
1093:   return(0);
1094: }

1098: /*@
1099:  PCBDDCMatFETIDPGetRHS - Compute the right-hand side for FETIDP linear system

1101:    Collective

1103:    Input Parameters:
1104: +  fetidp_mat   - the FETIDP matrix object obtained by calling PCBDDCCreateFETIDPOperators
1105: .  standard_rhs - the right-hand side for your linear system

1107:    Output Parameters:
1108: -  fetidp_flux_rhs   - the right-hand side for the FETIDP linear system

1110:    Level: developer

1112:    Notes:

1114: .seealso: PCBDDC,PCBDDCCreateFETIDPOperators
1115: @*/
1116: PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1117: {
1118:   FETIDPMat_ctx  mat_ctx;

1122:   MatShellGetContext(fetidp_mat,&mat_ctx);
1123:   PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));
1124:   return(0);
1125: }
1126: /* -------------------------------------------------------------------------- */

1130: static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1131: {
1132:   FETIDPMat_ctx  mat_ctx;
1133:   PC_IS*         pcis;
1134:   PC_BDDC*       pcbddc;

1138:   MatShellGetContext(fetidp_mat,&mat_ctx);
1139:   pcis = (PC_IS*)mat_ctx->pc->data;
1140:   pcbddc = (PC_BDDC*)mat_ctx->pc->data;

1142:   /* apply B_delta^T */
1143:   VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
1144:   VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
1145:   MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);
1146:   /* compute rhs for BDDC application */
1147:   VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);
1148:   if (pcbddc->switch_static) {
1149:     VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);
1150:   }
1151:   /* apply BDDC */
1152:   PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);
1153:   /* put values into standard global vector */
1154:   VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1155:   VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1156:   if (!pcbddc->switch_static) {
1157:     /* compute values into the interior if solved for the partially subassembled Schur complement */
1158:     MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);
1159:     VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);
1160:     KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);
1161:   }
1162:   VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1163:   VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1164:   /* final change of basis if needed
1165:      Is also sums the dirichlet part removed during RHS assembling */
1166:   PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol);
1167:   return(0);
1168: }

1172: /*@
1173:  PCBDDCMatFETIDPGetSolution - Compute the physical solution from the solution of the FETIDP linear system

1175:    Collective

1177:    Input Parameters:
1178: +  fetidp_mat        - the FETIDP matrix obtained by calling PCBDDCCreateFETIDPOperators
1179: .  fetidp_flux_sol - the solution of the FETIDP linear system

1181:    Output Parameters:
1182: -  standard_sol      - the solution defined on the physical domain

1184:    Level: developer

1186:    Notes:

1188: .seealso: PCBDDC,PCBDDCCreateFETIDPOperators
1189: @*/
1190: PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1191: {
1192:   FETIDPMat_ctx  mat_ctx;

1196:   MatShellGetContext(fetidp_mat,&mat_ctx);
1197:   PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));
1198:   return(0);
1199: }
1200: /* -------------------------------------------------------------------------- */

1202: extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1203: extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1204: extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1205: extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);

1209: static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1210: {

1212:   FETIDPMat_ctx  fetidpmat_ctx;
1213:   Mat            newmat;
1214:   FETIDPPC_ctx   fetidppc_ctx;
1215:   PC             newpc;
1216:   MPI_Comm       comm;

1220:   PetscObjectGetComm((PetscObject)pc,&comm);
1221:   /* FETIDP linear matrix */
1222:   PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);
1223:   PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);
1224:   MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);
1225:   MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);
1226:   MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);
1227:   MatSetUp(newmat);
1228:   /* FETIDP preconditioner */
1229:   PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);
1230:   PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);
1231:   PCCreate(comm,&newpc);
1232:   PCSetType(newpc,PCSHELL);
1233:   PCShellSetContext(newpc,fetidppc_ctx);
1234:   PCShellSetApply(newpc,FETIDPPCApply);
1235:   PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);
1236:   PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);
1237:   PCSetUp(newpc);
1238:   /* return pointers for objects created */
1239:   *fetidp_mat=newmat;
1240:   *fetidp_pc=newpc;
1241:   return(0);
1242: }

1246: /*@
1247:  PCBDDCCreateFETIDPOperators - Create operators for FETIDP

1249:    Collective

1251:    Input Parameters:
1252: +  pc - the BDDC preconditioning context already setup

1254:    Output Parameters:
1255: .  fetidp_mat - shell FETIDP matrix object
1256: .  fetidp_pc  - shell Dirichlet preconditioner for FETIDP matrix

1258:    Options Database Keys:
1259: -    -fetidp_fullyredundant: use or not a fully redundant set of Lagrange multipliers

1261:    Level: developer

1263:    Notes:
1264:      Currently the only operation provided for FETIDP matrix is MatMult

1266: .seealso: PCBDDC
1267: @*/
1268: PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1269: {

1274:   if (pc->setupcalled) {
1275:     PetscUseMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));
1276:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1277:   return(0);
1278: }
1279: /* -------------------------------------------------------------------------- */
1280: /*MC
1281:    PCBDDC - Balancing Domain Decomposition by Constraints.

1283:    An implementation of the BDDC preconditioner based on

1285: .vb
1286:    [1] C. R. Dohrmann. "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
1287:    [2] A. Klawonn and O. B. Widlund. "Dual-Primal FETI Methods for Linear Elasticity", http://cs.nyu.edu/csweb/Research/TechReports/TR2004-855/TR2004-855.pdf
1288:    [3] J. Mandel, B. Sousedik, C. R. Dohrmann. "Multispace and Multilevel BDDC", http://arxiv.org/abs/0712.3977
1289: .ve

1291:    The matrix to be preconditioned (Pmat) must be of type MATIS.

1293:    Currently works with MATIS matrices with local Neumann matrices of type MATSEQAIJ, MATSEQBAIJ or MATSEQSBAIJ, either with real or complex numbers.

1295:    It also works with unsymmetric and indefinite problems.

1297:    Unlike 'conventional' interface preconditioners, PCBDDC iterates over all degrees of freedom, not just those on the interface. This allows the use of approximate solvers on the subdomains.

1299:    Approximate local solvers are automatically adapted for singular linear problems (see [1]) if the user has provided the nullspace using PCBDDCSetNullSpace

1301:    Boundary nodes are split in vertices, edges and faces using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph

1303:    Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace.

1305:    Change of basis is performed similarly to [2] when requested. When more the one constraint is present on a single connected component (i.e. an edge or a face), a robust method based on local QR factorizations is used.

1307:    The PETSc implementation also supports multilevel BDDC [3]. Coarse grids are partitioned using MatPartitioning object.

1309:    Options Database Keys:

1311: .    -pc_bddc_use_vertices <1> - use or not vertices in primal space
1312: .    -pc_bddc_use_edges <1> - use or not edges in primal space
1313: .    -pc_bddc_use_faces <0> - use or not faces in primal space
1314: .    -pc_bddc_use_change_of_basis <0> - use change of basis approach (on edges only)
1315: .    -pc_bddc_use_change_on_faces <0> - use change of basis approach on faces if change of basis has been requested
1316: .    -pc_bddc_switch_static <0> - switches from M_2 to M_3 operator (see reference article [1])
1317: .    -pc_bddc_levels <0> - maximum number of levels for multilevel
1318: .    -pc_bddc_coarsening_ratio - H/h ratio at the coarser level
1319: -    -pc_bddc_check_level <0> - set verbosity level of debugging output

1321:    Options for Dirichlet, Neumann or coarse solver can be set with
1322: .vb
1323:       -pc_bddc_dirichlet_
1324:       -pc_bddc_neumann_
1325:       -pc_bddc_coarse_
1326: .ve
1327:    e.g -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg

1329:    When using a multilevel approach, solvers' options at the N-th level can be specified as
1330: .vb
1331:       -pc_bddc_dirichlet_N_
1332:       -pc_bddc_neumann_N_
1333:       -pc_bddc_coarse_N_
1334: .ve
1335:    Note that level number ranges from the finest 0 to the coarsest N

1337:    Level: intermediate

1339:    Developer notes:
1340:      Currently does not work with KSPBCGS and other KSPs requiring the specialization of PCApplyTranspose

1342:      New deluxe scaling operator will be available soon.

1344:    Contributed by Stefano Zampini

1346: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1347: M*/

1351: PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1352: {
1353:   PetscErrorCode      ierr;
1354:   PC_BDDC             *pcbddc;

1357:   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
1358:   PetscNewLog(pc,&pcbddc);
1359:   pc->data  = (void*)pcbddc;

1361:   /* create PCIS data structure */
1362:   PCISCreate(pc);

1364:   /* BDDC customization */
1365:   pcbddc->use_vertices        = PETSC_TRUE;
1366:   pcbddc->use_edges           = PETSC_TRUE;
1367:   pcbddc->use_faces           = PETSC_FALSE;
1368:   pcbddc->use_change_of_basis = PETSC_FALSE;
1369:   pcbddc->use_change_on_faces = PETSC_FALSE;
1370:   pcbddc->switch_static       = PETSC_FALSE;
1371:   pcbddc->use_nnsp_true       = PETSC_FALSE; /* not yet exposed */
1372:   pcbddc->dbg_flag            = 0;

1374:   pcbddc->BtoNmap                    = 0;
1375:   pcbddc->local_primal_size          = 0;
1376:   pcbddc->n_vertices                 = 0;
1377:   pcbddc->n_actual_vertices          = 0;
1378:   pcbddc->n_constraints              = 0;
1379:   pcbddc->primal_indices_local_idxs  = 0;
1380:   pcbddc->recompute_topography       = PETSC_FALSE;
1381:   pcbddc->coarse_size                = 0;
1382:   pcbddc->new_primal_space           = PETSC_FALSE;
1383:   pcbddc->new_primal_space_local     = PETSC_FALSE;
1384:   pcbddc->global_primal_indices      = 0;
1385:   pcbddc->onearnullspace             = 0;
1386:   pcbddc->onearnullvecs_state        = 0;
1387:   pcbddc->user_primal_vertices       = 0;
1388:   pcbddc->NullSpace                  = 0;
1389:   pcbddc->temp_solution              = 0;
1390:   pcbddc->original_rhs               = 0;
1391:   pcbddc->local_mat                  = 0;
1392:   pcbddc->ChangeOfBasisMatrix        = 0;
1393:   pcbddc->coarse_vec                 = 0;
1394:   pcbddc->coarse_rhs                 = 0;
1395:   pcbddc->coarse_ksp                 = 0;
1396:   pcbddc->coarse_phi_B               = 0;
1397:   pcbddc->coarse_phi_D               = 0;
1398:   pcbddc->coarse_psi_B               = 0;
1399:   pcbddc->coarse_psi_D               = 0;
1400:   pcbddc->vec1_P                     = 0;
1401:   pcbddc->vec1_R                     = 0;
1402:   pcbddc->vec2_R                     = 0;
1403:   pcbddc->local_auxmat1              = 0;
1404:   pcbddc->local_auxmat2              = 0;
1405:   pcbddc->R_to_B                     = 0;
1406:   pcbddc->R_to_D                     = 0;
1407:   pcbddc->ksp_D                      = 0;
1408:   pcbddc->ksp_R                      = 0;
1409:   pcbddc->NeumannBoundaries          = 0;
1410:   pcbddc->user_provided_isfordofs    = PETSC_FALSE;
1411:   pcbddc->n_ISForDofs                = 0;
1412:   pcbddc->ISForDofs                  = 0;
1413:   pcbddc->ConstraintMatrix           = 0;
1414:   pcbddc->use_exact_dirichlet_trick  = PETSC_TRUE;
1415:   pcbddc->coarse_loc_to_glob         = 0;
1416:   pcbddc->coarsening_ratio           = 8;
1417:   pcbddc->current_level              = 0;
1418:   pcbddc->max_levels                 = 0;

1420:   /* create local graph structure */
1421:   PCBDDCGraphCreate(&pcbddc->mat_graph);

1423:   /* scaling */
1424:   pcbddc->use_deluxe_scaling         = PETSC_FALSE;
1425:   pcbddc->work_scaling               = 0;

1427:   /* function pointers */
1428:   pc->ops->apply               = PCApply_BDDC;
1429:   pc->ops->applytranspose      = 0;
1430:   pc->ops->setup               = PCSetUp_BDDC;
1431:   pc->ops->destroy             = PCDestroy_BDDC;
1432:   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
1433:   pc->ops->view                = 0;
1434:   pc->ops->applyrichardson     = 0;
1435:   pc->ops->applysymmetricleft  = 0;
1436:   pc->ops->applysymmetricright = 0;
1437:   pc->ops->presolve            = PCPreSolve_BDDC;
1438:   pc->ops->postsolve           = PCPostSolve_BDDC;

1440:   /* composing function */
1441:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);
1442:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);
1443:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",PCBDDCSetLevel_BDDC);
1444:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",PCBDDCSetUseExactDirichlet_BDDC);
1445:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",PCBDDCSetLevels_BDDC);
1446:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);
1447:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);
1448:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);
1449:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);
1450:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);
1451:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);
1452:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);
1453:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);
1454:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);
1455:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);
1456:   return(0);
1457: }