Actual source code: pack.c

  1: #define PETSCDM_DLL
  2: 
 3:  #include petscda.h
 4:  #include private/dmimpl.h
 5:  #include petscmat.h

  7: typedef struct _DMCompositeOps *DMCompositeOps;
  8: struct _DMCompositeOps {
  9:   DMOPS(DMComposite)
 10: };

 12: /*
 13:    rstart is where an array/subvector starts in the global parallel vector, so arrays
 14:    rstarts are meaningless (and set to the previous one) except on the processor where the array lives
 15: */

 17: typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType;

 19: struct DMCompositeLink {
 20:   DMCompositeLinkType    type;
 21:   struct DMCompositeLink *next;
 22:   PetscInt               n,rstart;      /* rstart is relative to this process */
 23:   PetscInt               grstart;       /* grstart is relative to all processes */

 25:   /* only used for DMCOMPOSITE_DM */
 26:   PetscInt               *grstarts;     /* global row for first unknown of this DM on each process */
 27:   DM                     dm;

 29:   /* only used for DMCOMPOSITE_ARRAY */
 30:   PetscMPIInt            rank;          /* process where array unknowns live */
 31: };

 33: struct _p_DMComposite {
 34:   PETSCHEADER(struct _DMCompositeOps);
 35:   DMHEADER
 36:   PetscInt               n,N,rstart;           /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */
 37:   PetscInt               nghost;               /* number of all local entries include DA ghost points and any shared redundant arrays */
 38:   PetscInt               nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DMComposite (nmine is ones on this process) */
 39:   PetscTruth             setup;                /* after this is set, cannot add new links to the DMComposite */
 40:   struct DMCompositeLink *next;

 42:   PetscErrorCode (*FormCoupleLocations)(DMComposite,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt);
 43:   void                   *ctx;                 /* place for user to set information they may need in FormCoupleLocation */
 44: };

 48: /*@C
 49:     DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the 
 50:       seperate components (DA's and arrays) in a DMComposite to build the correct matrix nonzero structure.


 53:     Collective on MPI_Comm

 55:     Input Parameter:
 56: +   dmcomposite - the composite object
 57: -   formcouplelocations - routine to set the nonzero locations in the matrix

 59:     Level: advanced

 61:     Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into
 62:         this routine

 64: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
 65:          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
 66:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetContext(),
 67:          DMCompositeGetContext()

 69: @*/
 70: PetscErrorCode  DMCompositeSetCoupling(DMComposite dmcomposite,PetscErrorCode (*FormCoupleLocations)(DMComposite,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt))
 71: {
 73:   dmcomposite->FormCoupleLocations = FormCoupleLocations;
 74:   return(0);
 75: }

 79: /*@
 80:     DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they 
 81:       set with DMCompositeSetCoupling()


 84:     Not Collective

 86:     Input Parameter:
 87: +   dmcomposite - the composite object
 88: -   ctx - the user supplied context

 90:     Level: advanced

 92:     Notes: Use DMCompositeGetContext() to retrieve the context when needed.

 94: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
 95:          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
 96:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
 97:          DMCompositeGetContext()

 99: @*/
100: PetscErrorCode  DMCompositeSetContext(DMComposite dmcomposite,void *ctx)
101: {
103:   dmcomposite->ctx = ctx;
104:   return(0);
105: }

109: /*@
110:     DMCompositeGetContext - Access the context set with DMCompositeSetContext()


113:     Not Collective

115:     Input Parameter:
116: .   dmcomposite - the composite object

118:     Output Parameter:
119: .    ctx - the user supplied context

121:     Level: advanced

123:     Notes: Use DMCompositeGetContext() to retrieve the context when needed.

125: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
126:          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
127:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
128:          DMCompositeSetContext()

130: @*/
131: PetscErrorCode  DMCompositeGetContext(DMComposite dmcomposite,void **ctx)
132: {
134:   *ctx = dmcomposite->ctx;
135:   return(0);
136: }


141: /*@C
142:     DMCompositeCreate - Creates a vector packer, used to generate "composite"
143:       vectors made up of several subvectors.

145:     Collective on MPI_Comm

147:     Input Parameter:
148: .   comm - the processors that will share the global vector

150:     Output Parameters:
151: .   packer - the packer object

153:     Level: advanced

155: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
156:          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
157:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

159: @*/
160: PetscErrorCode  DMCompositeCreate(MPI_Comm comm,DMComposite *packer)
161: {
163:   DMComposite    p;

167:   *packer = PETSC_NULL;
168: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
169:   DMInitializePackage(PETSC_NULL);
170: #endif

172:   PetscHeaderCreate(p,_p_DMComposite,struct _DMCompositeOps,DM_COOKIE,0,"DM",comm,DMCompositeDestroy,0);
173:   PetscObjectChangeTypeName((PetscObject)p,"DMComposite");
174:   p->n            = 0;
175:   p->next         = PETSC_NULL;
176:   p->nredundant   = 0;
177:   p->nDM          = 0;

179:   p->ops->createglobalvector = DMCompositeCreateGlobalVector;
180:   p->ops->createlocalvector  = DMCompositeCreateLocalVector;
181:   p->ops->refine             = DMCompositeRefine;
182:   p->ops->getinterpolation   = DMCompositeGetInterpolation;
183:   p->ops->getmatrix          = DMCompositeGetMatrix;
184:   p->ops->getcoloring        = DMCompositeGetColoring;
185:   p->ops->globaltolocalbegin = DMCompositeGlobalToLocalBegin;
186:   p->ops->globaltolocalend   = DMCompositeGlobalToLocalEnd;
187:   p->ops->destroy            = DMCompositeDestroy;

189:   *packer = p;
190:   return(0);
191: }


197: /*@C
198:     DMCompositeDestroy - Destroys a vector packer.

200:     Collective on DMComposite

202:     Input Parameter:
203: .   packer - the packer object

205:     Level: advanced

207: .seealso DMCompositeCreate(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),DMCompositeGetEntries()
208:          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()

210: @*/
211: PetscErrorCode  DMCompositeDestroy(DMComposite packer)
212: {
213:   PetscErrorCode         ierr;
214:   struct DMCompositeLink *next, *prev;
215:   PetscTruth             done;

219:   DMDestroy_Private((DM)packer,&done);
220:   if (!done) return(0);

222:   next = packer->next;
223:   while (next) {
224:     prev = next;
225:     next = next->next;
226:     if (prev->type == DMCOMPOSITE_DM) {
227:       DMDestroy(prev->dm);
228:     }
229:     if (prev->grstarts) {
230:       PetscFree(prev->grstarts);
231:     }
232:     PetscFree(prev);
233:   }
234:   PetscHeaderDestroy(packer);
235:   return(0);
236: }

238: /* --------------------------------------------------------------------------------------*/
241: PetscErrorCode  DMCompositeSetUp(DMComposite packer)
242: {
243:   PetscErrorCode         ierr;
244:   PetscInt               nprev = 0;
245:   PetscMPIInt            rank,size;
246:   struct DMCompositeLink *next = packer->next;
247:   PetscMap               map;

250:   if (packer->setup) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup");
251:   PetscMapInitialize(((PetscObject)packer)->comm,&map);
252:   PetscMapSetLocalSize(&map,packer->n);
253:   PetscMapSetSize(&map,PETSC_DETERMINE);
254:   PetscMapSetBlockSize(&map,1);
255:   PetscMapSetUp(&map);
256:   PetscMapGetSize(&map,&packer->N);
257:   PetscMapGetRange(&map,&packer->rstart,PETSC_NULL);
258:   PetscFree(map.range);
259: 
260:   /* now set the rstart for each linked array/vector */
261:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
262:   MPI_Comm_size(((PetscObject)packer)->comm,&size);
263:   while (next) {
264:     next->rstart = nprev;
265:     if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n;
266:     next->grstart = packer->rstart + next->rstart;
267:     if (next->type == DMCOMPOSITE_ARRAY) {
268:       MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)packer)->comm);
269:     } else {
270:       PetscMalloc(size*sizeof(PetscInt),&next->grstarts);
271:       MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)packer)->comm);
272:     }
273:     next = next->next;
274:   }
275:   packer->setup = PETSC_TRUE;
276:   return(0);
277: }


282: PetscErrorCode DMCompositeGetAccess_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
283: {
285:   PetscScalar    *varray;
286:   PetscMPIInt    rank;

289:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
290:   if (array) {
291:     if (rank == mine->rank) {
292:       VecGetArray(vec,&varray);
293:       *array  = varray + mine->rstart;
294:       VecRestoreArray(vec,&varray);
295:     } else {
296:       *array = 0;
297:     }
298:   }
299:   return(0);
300: }

304: PetscErrorCode DMCompositeGetAccess_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec *global)
305: {
307:   PetscScalar    *array;

310:   if (global) {
311:     DMGetGlobalVector(mine->dm,global);
312:     VecGetArray(vec,&array);
313:     VecPlaceArray(*global,array+mine->rstart);
314:     VecRestoreArray(vec,&array);
315:   }
316:   return(0);
317: }

321: PetscErrorCode DMCompositeRestoreAccess_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
322: {
324:   return(0);
325: }

329: PetscErrorCode DMCompositeRestoreAccess_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec *global)
330: {

334:   if (global) {
335:     VecResetArray(*global);
336:     DMRestoreGlobalVector(mine->dm,global);
337:   }
338:   return(0);
339: }

343: PetscErrorCode DMCompositeScatter_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
344: {
346:   PetscScalar    *varray;
347:   PetscMPIInt    rank;

350:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
351:   if (rank == mine->rank) {
352:     VecGetArray(vec,&varray);
353:     PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));
354:     VecRestoreArray(vec,&varray);
355:   }
356:   MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)packer)->comm);
357:   return(0);
358: }

362: PetscErrorCode DMCompositeScatter_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec local)
363: {
365:   PetscScalar    *array;
366:   Vec            global;

369:   DMGetGlobalVector(mine->dm,&global);
370:   VecGetArray(vec,&array);
371:   VecPlaceArray(global,array+mine->rstart);
372:   DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);
373:   DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);
374:   VecRestoreArray(vec,&array);
375:   VecResetArray(global);
376:   DMRestoreGlobalVector(mine->dm,&global);
377:   return(0);
378: }

382: PetscErrorCode DMCompositeGather_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
383: {
385:   PetscScalar    *varray;
386:   PetscMPIInt    rank;

389:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
390:   if (rank == mine->rank) {
391:     VecGetArray(vec,&varray);
392:     if (varray+mine->rstart == array) SETERRQ(PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()");
393:     PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));
394:     VecRestoreArray(vec,&varray);
395:   }
396:   return(0);
397: }

401: PetscErrorCode DMCompositeGather_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec local)
402: {
404:   PetscScalar    *array;
405:   Vec            global;

408:   DMGetGlobalVector(mine->dm,&global);
409:   VecGetArray(vec,&array);
410:   VecPlaceArray(global,array+mine->rstart);
411:   DMLocalToGlobal(mine->dm,local,INSERT_VALUES,global);
412:   VecRestoreArray(vec,&array);
413:   VecResetArray(global);
414:   DMRestoreGlobalVector(mine->dm,&global);
415:   return(0);
416: }

418: /* ----------------------------------------------------------------------------------*/

420: #include <stdarg.h>

424: /*@C
425:     DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite
426:        representation.

428:     Collective on DMComposite

430:     Input Parameter:
431: .    packer - the packer object

433:     Output Parameter:
434: .     nDM - the number of DMs

436:     Level: beginner

438: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
439:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
440:          DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
441:          DMCompositeGetEntries()

443: @*/
444: PetscErrorCode  DMCompositeGetNumberDM(DMComposite packer,PetscInt *nDM)
445: {
448:   *nDM = packer->nDM;
449:   return(0);
450: }


455: /*@C
456:     DMCompositeGetAccess - Allows one to access the individual packed vectors in their global
457:        representation.

459:     Collective on DMComposite

461:     Input Parameter:
462: +    packer - the packer object
463: .    gvec - the global vector
464: -    ... - the individual sequential or parallel objects (arrays or vectors)

466:     Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them
467:  
468:     Level: advanced

470: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
471:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
472:          DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
473:          DMCompositeGetEntries()

475: @*/
476: PetscErrorCode  DMCompositeGetAccess(DMComposite packer,Vec gvec,...)
477: {
478:   va_list                Argp;
479:   PetscErrorCode         ierr;
480:   struct DMCompositeLink *next;

485:   next = packer->next;
486:   if (!packer->setup) {
487:     DMCompositeSetUp(packer);
488:   }

490:   /* loop over packed objects, handling one at at time */
491:   va_start(Argp,gvec);
492:   while (next) {
493:     if (next->type == DMCOMPOSITE_ARRAY) {
494:       PetscScalar **array;
495:       array = va_arg(Argp, PetscScalar**);
496:       DMCompositeGetAccess_Array(packer,next,gvec,array);
497:     } else if (next->type == DMCOMPOSITE_DM) {
498:       Vec *vec;
499:       vec  = va_arg(Argp, Vec*);
500:       DMCompositeGetAccess_DM(packer,next,gvec,vec);
501:     } else {
502:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
503:     }
504:     next = next->next;
505:   }
506:   va_end(Argp);
507:   return(0);
508: }

512: /*@C
513:     DMCompositeRestoreAccess - Returns the vectors obtained with DACompositeGetAccess()
514:        representation.

516:     Collective on DMComposite

518:     Input Parameter:
519: +    packer - the packer object
520: .    gvec - the global vector
521: -    ... - the individual sequential or parallel objects (arrays or vectors)
522:  
523:     Level: advanced

525: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
526:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
527:          DMCompositeRestoreAccess(), DACompositeGetAccess()

529: @*/
530: PetscErrorCode  DMCompositeRestoreAccess(DMComposite packer,Vec gvec,...)
531: {
532:   va_list                Argp;
533:   PetscErrorCode         ierr;
534:   struct DMCompositeLink *next;

539:   next = packer->next;
540:   if (!packer->setup) {
541:     DMCompositeSetUp(packer);
542:   }

544:   /* loop over packed objects, handling one at at time */
545:   va_start(Argp,gvec);
546:   while (next) {
547:     if (next->type == DMCOMPOSITE_ARRAY) {
548:       PetscScalar **array;
549:       array = va_arg(Argp, PetscScalar**);
550:       DMCompositeRestoreAccess_Array(packer,next,gvec,array);
551:     } else if (next->type == DMCOMPOSITE_DM) {
552:       Vec *vec;
553:       vec  = va_arg(Argp, Vec*);
554:       DMCompositeRestoreAccess_DM(packer,next,gvec,vec);
555:     } else {
556:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
557:     }
558:     next = next->next;
559:   }
560:   va_end(Argp);
561:   return(0);
562: }

566: /*@C
567:     DMCompositeScatter - Scatters from a global packed vector into its individual local vectors

569:     Collective on DMComposite

571:     Input Parameter:
572: +    packer - the packer object
573: .    gvec - the global vector
574: -    ... - the individual sequential objects (arrays or vectors)
575:  
576:     Level: advanced

578: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
579:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
580:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

582: @*/
583: PetscErrorCode  DMCompositeScatter(DMComposite packer,Vec gvec,...)
584: {
585:   va_list                Argp;
586:   PetscErrorCode         ierr;
587:   struct DMCompositeLink *next;
588:   PetscInt               cnt = 3;

593:   next = packer->next;
594:   if (!packer->setup) {
595:     DMCompositeSetUp(packer);
596:   }

598:   /* loop over packed objects, handling one at at time */
599:   va_start(Argp,gvec);
600:   while (next) {
601:     if (next->type == DMCOMPOSITE_ARRAY) {
602:       PetscScalar *array;
603:       array = va_arg(Argp, PetscScalar*);
604:       DMCompositeScatter_Array(packer,next,gvec,array);
605:     } else if (next->type == DMCOMPOSITE_DM) {
606:       Vec vec;
607:       vec = va_arg(Argp, Vec);
609:       DMCompositeScatter_DM(packer,next,gvec,vec);
610:     } else {
611:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
612:     }
613:     cnt++;
614:     next = next->next;
615:   }
616:   va_end(Argp);
617:   return(0);
618: }

622: /*@C
623:     DMCompositeGather - Gathers into a global packed vector from its individual local vectors

625:     Collective on DMComposite

627:     Input Parameter:
628: +    packer - the packer object
629: .    gvec - the global vector
630: -    ... - the individual sequential objects (arrays or vectors)
631:  
632:     Level: advanced

634: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
635:          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
636:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

638: @*/
639: PetscErrorCode  DMCompositeGather(DMComposite packer,Vec gvec,...)
640: {
641:   va_list                Argp;
642:   PetscErrorCode         ierr;
643:   struct DMCompositeLink *next;

648:   next = packer->next;
649:   if (!packer->setup) {
650:     DMCompositeSetUp(packer);
651:   }

653:   /* loop over packed objects, handling one at at time */
654:   va_start(Argp,gvec);
655:   while (next) {
656:     if (next->type == DMCOMPOSITE_ARRAY) {
657:       PetscScalar *array;
658:       array = va_arg(Argp, PetscScalar*);
659:       DMCompositeGather_Array(packer,next,gvec,array);
660:     } else if (next->type == DMCOMPOSITE_DM) {
661:       Vec vec;
662:       vec = va_arg(Argp, Vec);
664:       DMCompositeGather_DM(packer,next,gvec,vec);
665:     } else {
666:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
667:     }
668:     next = next->next;
669:   }
670:   va_end(Argp);
671:   return(0);
672: }

676: /*@C
677:     DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will 
678:        be stored in part of the array on process orank.

680:     Collective on DMComposite

682:     Input Parameter:
683: +    packer - the packer object
684: .    orank - the process on which the array entries officially live, this number must be
685:              the same on all processes.
686: -    n - the length of the array
687:  
688:     Level: advanced

690: .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
691:          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
692:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

694: @*/
695: PetscErrorCode  DMCompositeAddArray(DMComposite packer,PetscMPIInt orank,PetscInt n)
696: {
697:   struct DMCompositeLink *mine,*next;
698:   PetscErrorCode         ierr;
699:   PetscMPIInt            rank;

703:   next = packer->next;
704:   if (packer->setup) {
705:     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite");
706:   }
707: #if defined(PETSC_USE_DEBUG)
708:   {
709:     PetscMPIInt        orankmax;
710:     MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)packer)->comm);
711:     if (orank != orankmax) SETERRQ2(PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax);
712:   }
713: #endif

715:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
716:   /* create new link */
717:   PetscNew(struct DMCompositeLink,&mine);
718:   mine->n             = n;
719:   mine->rank          = orank;
720:   mine->dm            = PETSC_NULL;
721:   mine->type          = DMCOMPOSITE_ARRAY;
722:   mine->next          = PETSC_NULL;
723:   if (rank == mine->rank) {packer->n += n;packer->nmine++;}

725:   /* add to end of list */
726:   if (!next) {
727:     packer->next = mine;
728:   } else {
729:     while (next->next) next = next->next;
730:     next->next = mine;
731:   }
732:   packer->nredundant++;
733:   return(0);
734: }

738: /*@C
739:     DMCompositeAddDM - adds a DM (includes DA) vector to a DMComposite

741:     Collective on DMComposite

743:     Input Parameter:
744: +    packer - the packer object
745: -    dm - the DM object, if the DM is a da you will need to caste it with a (DM)
746:  
747:     Level: advanced

749: .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
750:          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
751:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

753: @*/
754: PetscErrorCode  DMCompositeAddDM(DMComposite packer,DM dm)
755: {
756:   PetscErrorCode         ierr;
757:   PetscInt               n;
758:   struct DMCompositeLink *mine,*next;
759:   Vec                    global;

764:   next = packer->next;
765:   if (packer->setup) {
766:     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DA once you have used the DMComposite");
767:   }

769:   /* create new link */
770:   PetscNew(struct DMCompositeLink,&mine);
771:   PetscObjectReference((PetscObject)dm);
772:   DMGetGlobalVector(dm,&global);
773:   VecGetLocalSize(global,&n);
774:   DMRestoreGlobalVector(dm,&global);
775:   mine->n      = n;
776:   mine->dm     = dm;
777:   mine->type   = DMCOMPOSITE_DM;
778:   mine->next   = PETSC_NULL;
779:   packer->n   += n;

781:   /* add to end of list */
782:   if (!next) {
783:     packer->next = mine;
784:   } else {
785:     while (next->next) next = next->next;
786:     next->next = mine;
787:   }
788:   packer->nDM++;
789:   packer->nmine++;
790:   return(0);
791: }

797: PetscErrorCode  VecView_DMComposite(Vec gvec,PetscViewer viewer)
798: {
799:   DMComposite            packer;
800:   PetscErrorCode         ierr;
801:   struct DMCompositeLink *next;
802:   PetscTruth             isdraw;

805:   PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&packer);
806:   if (!packer) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite");
807:   next = packer->next;

809:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
810:   if (!isdraw) {
811:     /* do I really want to call this? */
812:     VecView_MPI(gvec,viewer);
813:   } else {
814:     PetscInt cnt = 0;

816:     /* loop over packed objects, handling one at at time */
817:     while (next) {
818:       if (next->type == DMCOMPOSITE_ARRAY) {
819:         PetscScalar *array;
820:         DMCompositeGetAccess_Array(packer,next,gvec,&array);

822:         /*skip it for now */
823:       } else if (next->type == DMCOMPOSITE_DM) {
824:         Vec      vec;
825:         PetscInt bs;

827:         DMCompositeGetAccess_DM(packer,next,gvec,&vec);
828:         VecView(vec,viewer);
829:         VecGetBlockSize(vec,&bs);
830:         DMCompositeRestoreAccess_DM(packer,next,gvec,&vec);
831:         PetscViewerDrawBaseAdd(viewer,bs);
832:         cnt += bs;
833:       } else {
834:         SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
835:       }
836:       next = next->next;
837:     }
838:     PetscViewerDrawBaseAdd(viewer,-cnt);
839:   }
840:   return(0);
841: }


847: /*@C
848:     DMCompositeCreateGlobalVector - Creates a vector of the correct size to be gathered into 
849:         by the packer.

851:     Collective on DMComposite

853:     Input Parameter:
854: .    packer - the packer object

856:     Output Parameters:
857: .   gvec - the global vector

859:     Level: advanced

861:     Notes: Once this has been created you cannot add additional arrays or vectors to be packed.

863: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
864:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
865:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
866:          DMCompositeCreateLocalVector()

868: @*/
869: PetscErrorCode  DMCompositeCreateGlobalVector(DMComposite packer,Vec *gvec)
870: {
871:   PetscErrorCode         ierr;

875:   if (!packer->setup) {
876:     DMCompositeSetUp(packer);
877:   }
878:   VecCreateMPI(((PetscObject)packer)->comm,packer->n,packer->N,gvec);
879:   PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)packer);
880:   VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);
881:   return(0);
882: }

886: /*@C
887:     DMCompositeCreateLocalVector - Creates a vector of the correct size to contain all ghost points
888:         and redundant arrays.

890:     Collective on DMComposite

892:     Input Parameter:
893: .    packer - the packer object

895:     Output Parameters:
896: .   lvec - the local vector

898:     Level: advanced

900:     Notes: Once this has been created you cannot add additional arrays or vectors to be packed.

902: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
903:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
904:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
905:          DMCompositeCreateGlobalVector()

907: @*/
908: PetscErrorCode  DMCompositeCreateLocalVector(DMComposite packer,Vec *lvec)
909: {
910:   PetscErrorCode         ierr;

914:   if (!packer->setup) {
915:     DMCompositeSetUp(packer);
916:   }
917:   VecCreateSeq(((PetscObject)packer)->comm,packer->nghost,lvec);
918:   PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)packer);
919:   return(0);
920: }

924: /*@C
925:     DMCompositeGetLocalISs - gets an IS for each DM/array in the DMComposite, include ghost points

927:     Collective on DMComposite

929:     Input Parameter:
930: .    packer - the packer object

932:     Output Parameters:
933: .    is - the individual indices for each packed vector/array. Note that this includes
934:            all the ghost points that individual ghosted DA's may have. Also each process has an 
935:            is for EACH redundant array (not just the local redundant arrays).
936:  
937:     Level: advanced

939:     Notes:
940:        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()

942:        Use DMCompositeGetGlobalISs() for non-ghosted ISs.

944: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
945:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
946:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()

948: @*/
949: PetscErrorCode  DMCompositeGetGlobalIndices(DMComposite packer,IS *is[])
950: {
951:   PetscErrorCode         ierr;
952:   PetscInt               i,*idx,n,cnt;
953:   struct DMCompositeLink *next;
954:   Vec                    global,dglobal;
955:   PF                     pf;
956:   PetscScalar            *array;
957:   PetscMPIInt            rank;

961:   PetscMalloc(packer->nmine*sizeof(IS),is);
962:   next = packer->next;
963:   DMCompositeCreateGlobalVector(packer,&global);
964:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);

966:   /* put 0 to N-1 into the global vector */
967:   PFCreate(PETSC_COMM_WORLD,1,1,&pf);
968:   PFSetType(pf,PFIDENTITY,PETSC_NULL);
969:   PFApplyVec(pf,PETSC_NULL,global);
970:   PFDestroy(pf);

972:   /* loop over packed objects, handling one at at time */
973:   cnt = 0;
974:   while (next) {

976:     if (next->type == DMCOMPOSITE_ARRAY) {
977: 
978:       PetscMalloc(next->n*sizeof(PetscInt),&idx);
979:       if (rank == next->rank) {
980:         VecGetArray(global,&array);
981:         array += next->rstart;
982:         for (i=0; i<next->n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
983:         array -= next->rstart;
984:         VecRestoreArray(global,&array);
985:       }
986:       MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)packer)->comm);
987:       ISCreateGeneral(((PetscObject)packer)->comm,next->n,idx,&(*is)[cnt]);
988:       PetscFree(idx);
989:     } else if (next->type == DMCOMPOSITE_DM) {
990:       Vec local;

992:       DMCreateLocalVector(next->dm,&local);
993:       VecGetArray(global,&array);
994:       array += next->rstart;
995:       DMGetGlobalVector(next->dm,&dglobal);
996:       VecPlaceArray(dglobal,array);
997:       DMGlobalToLocalBegin(next->dm,dglobal,INSERT_VALUES,local);
998:       DMGlobalToLocalEnd(next->dm,dglobal,INSERT_VALUES,local);
999:       array -= next->rstart;
1000:       VecRestoreArray(global,&array);
1001:       VecResetArray(dglobal);
1002:       DMRestoreGlobalVector(next->dm,&dglobal);

1004:       VecGetArray(local,&array);
1005:       VecGetSize(local,&n);
1006:       PetscMalloc(n*sizeof(PetscInt),&idx);
1007:       for (i=0; i<n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
1008:       VecRestoreArray(local,&array);
1009:       VecDestroy(local);
1010:       ISCreateGeneral(((PetscObject)packer)->comm,next->n,idx,&(*is)[cnt]);
1011:       PetscFree(idx);

1013:     } else {
1014:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1015:     }
1016:     next = next->next;
1017:     cnt++;
1018:   }
1019:   VecDestroy(global);
1020:   return(0);
1021: }

1025: /*@C
1026:     DMCompositeGetGlobalISs - Gets the index sets for each composed object

1028:     Collective on DMComposite

1030:     Input Parameter:
1031: .    packer - the packer object

1033:     Output Parameters:
1034: .    is - the array of index sets
1035:  
1036:     Level: advanced

1038:     Notes:
1039:        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()

1041:        The number of IS on each process will/may be different when redundant arrays are used

1043:        These could be used to extract a subset of vector entries for a "multi-physics" preconditioner

1045:        Use DMCompositeGetLocalISs() for index sets that include ghost points

1047: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1048:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1049:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()

1051: @*/
1052: PetscErrorCode  DMCompositeGetGlobalISs(DMComposite packer,IS *is[])
1053: {
1054:   PetscErrorCode         ierr;
1055:   PetscInt               cnt = 0;
1056:   struct DMCompositeLink *next;
1057:   PetscMPIInt            rank;

1061:   PetscMalloc(packer->nmine*sizeof(IS),is);
1062:   next = packer->next;
1063:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);

1065:   /* loop over packed objects, handling one at at time */
1066:   while (next) {

1068:     if (next->type == DMCOMPOSITE_ARRAY) {
1069: 
1070:       if (rank == next->rank) {
1071:         ISCreateBlock(((PetscObject)packer)->comm,next->n,1,&next->grstart,&(*is)[cnt]);
1072:         cnt++;
1073:       }

1075:     } else if (next->type == DMCOMPOSITE_DM) {

1077:       ISCreateBlock(((PetscObject)packer)->comm,next->n,1,&next->grstart,&(*is)[cnt]);
1078:       cnt++;

1080:     } else {
1081:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1082:     }
1083:     next = next->next;
1084:   }
1085:   return(0);
1086: }

1088: /* -------------------------------------------------------------------------------------*/
1091: PetscErrorCode DMCompositeGetLocalVectors_Array(DMComposite packer,struct DMCompositeLink *mine,PetscScalar **array)
1092: {
1095:   if (array) {
1096:     PetscMalloc(mine->n*sizeof(PetscScalar),array);
1097:   }
1098:   return(0);
1099: }

1103: PetscErrorCode DMCompositeGetLocalVectors_DM(DMComposite packer,struct DMCompositeLink *mine,Vec *local)
1104: {
1107:   if (local) {
1108:     DMGetLocalVector(mine->dm,local);
1109:   }
1110:   return(0);
1111: }

1115: PetscErrorCode DMCompositeRestoreLocalVectors_Array(DMComposite packer,struct DMCompositeLink *mine,PetscScalar **array)
1116: {
1119:   if (array) {
1120:     PetscFree(*array);
1121:   }
1122:   return(0);
1123: }

1127: PetscErrorCode DMCompositeRestoreLocalVectors_DM(DMComposite packer,struct DMCompositeLink *mine,Vec *local)
1128: {
1131:   if (local) {
1132:     DMRestoreLocalVector(mine->dm,local);
1133:   }
1134:   return(0);
1135: }

1139: /*@C
1140:     DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite.'
1141:        Use DMCompositeRestoreLocalVectors() to return them.

1143:     Collective on DMComposite

1145:     Input Parameter:
1146: .    packer - the packer object
1147:  
1148:     Output Parameter:
1149: .    ... - the individual sequential objects (arrays or vectors)
1150:  
1151:     Level: advanced

1153: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1154:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(), 
1155:          DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()

1157: @*/
1158: PetscErrorCode  DMCompositeGetLocalVectors(DMComposite packer,...)
1159: {
1160:   va_list                Argp;
1161:   PetscErrorCode         ierr;
1162:   struct DMCompositeLink *next;

1166:   next = packer->next;
1167:   /* loop over packed objects, handling one at at time */
1168:   va_start(Argp,packer);
1169:   while (next) {
1170:     if (next->type == DMCOMPOSITE_ARRAY) {
1171:       PetscScalar **array;
1172:       array = va_arg(Argp, PetscScalar**);
1173:       DMCompositeGetLocalVectors_Array(packer,next,array);
1174:     } else if (next->type == DMCOMPOSITE_DM) {
1175:       Vec *vec;
1176:       vec = va_arg(Argp, Vec*);
1177:       DMCompositeGetLocalVectors_DM(packer,next,vec);
1178:     } else {
1179:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1180:     }
1181:     next = next->next;
1182:   }
1183:   va_end(Argp);
1184:   return(0);
1185: }

1189: /*@C
1190:     DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite.'
1191:        Use VecPakcRestoreLocalVectors() to return them.

1193:     Collective on DMComposite

1195:     Input Parameter:
1196: .    packer - the packer object
1197:  
1198:     Output Parameter:
1199: .    ... - the individual sequential objects (arrays or vectors)
1200:  
1201:     Level: advanced

1203: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1204:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(), 
1205:          DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()

1207: @*/
1208: PetscErrorCode  DMCompositeRestoreLocalVectors(DMComposite packer,...)
1209: {
1210:   va_list                Argp;
1211:   PetscErrorCode         ierr;
1212:   struct DMCompositeLink *next;

1216:   next = packer->next;
1217:   /* loop over packed objects, handling one at at time */
1218:   va_start(Argp,packer);
1219:   while (next) {
1220:     if (next->type == DMCOMPOSITE_ARRAY) {
1221:       PetscScalar **array;
1222:       array = va_arg(Argp, PetscScalar**);
1223:       DMCompositeRestoreLocalVectors_Array(packer,next,array);
1224:     } else if (next->type == DMCOMPOSITE_DM) {
1225:       Vec *vec;
1226:       vec = va_arg(Argp, Vec*);
1227:       DMCompositeRestoreLocalVectors_DM(packer,next,vec);
1228:     } else {
1229:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1230:     }
1231:     next = next->next;
1232:   }
1233:   va_end(Argp);
1234:   return(0);
1235: }

1237: /* -------------------------------------------------------------------------------------*/
1240: PetscErrorCode DMCompositeGetEntries_Array(DMComposite packer,struct DMCompositeLink *mine,PetscInt *n)
1241: {
1243:   if (n) *n = mine->n;
1244:   return(0);
1245: }

1249: PetscErrorCode DMCompositeGetEntries_DM(DMComposite packer,struct DMCompositeLink *mine,DM *dm)
1250: {
1252:   if (dm) *dm = mine->dm;
1253:   return(0);
1254: }

1258: /*@C
1259:     DMCompositeGetEntries - Gets the DA, redundant size, etc for each entry in a DMComposite.

1261:     Collective on DMComposite

1263:     Input Parameter:
1264: .    packer - the packer object
1265:  
1266:     Output Parameter:
1267: .    ... - the individual entries, DAs or integer sizes)
1268:  
1269:     Level: advanced

1271: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1272:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(), 
1273:          DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(),  DMCompositeScatter(),
1274:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors()

1276: @*/
1277: PetscErrorCode  DMCompositeGetEntries(DMComposite packer,...)
1278: {
1279:   va_list                Argp;
1280:   PetscErrorCode         ierr;
1281:   struct DMCompositeLink *next;

1285:   next = packer->next;
1286:   /* loop over packed objects, handling one at at time */
1287:   va_start(Argp,packer);
1288:   while (next) {
1289:     if (next->type == DMCOMPOSITE_ARRAY) {
1290:       PetscInt *n;
1291:       n = va_arg(Argp, PetscInt*);
1292:       DMCompositeGetEntries_Array(packer,next,n);
1293:     } else if (next->type == DMCOMPOSITE_DM) {
1294:       DM *dm;
1295:       dm = va_arg(Argp, DM*);
1296:       DMCompositeGetEntries_DM(packer,next,dm);
1297:     } else {
1298:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1299:     }
1300:     next = next->next;
1301:   }
1302:   va_end(Argp);
1303:   return(0);
1304: }

1308: /*@C
1309:     DMCompositeRefine - Refines a DMComposite by refining all of its DAs

1311:     Collective on DMComposite

1313:     Input Parameters:
1314: +    packer - the packer object
1315: -    comm - communicator to contain the new DM object, usually PETSC_NULL

1317:     Output Parameter:
1318: .    fine - new packer
1319:  
1320:     Level: advanced

1322: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1323:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1324:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),  DMCompositeScatter(),
1325:          DMCompositeGetEntries()

1327: @*/
1328: PetscErrorCode  DMCompositeRefine(DMComposite packer,MPI_Comm comm,DMComposite *fine)
1329: {
1330:   PetscErrorCode         ierr;
1331:   struct DMCompositeLink *next;
1332:   DM                     dm;

1336:   next = packer->next;
1337:   DMCompositeCreate(comm,fine);

1339:   /* loop over packed objects, handling one at at time */
1340:   while (next) {
1341:     if (next->type == DMCOMPOSITE_ARRAY) {
1342:       DMCompositeAddArray(*fine,next->rank,next->n);
1343:     } else if (next->type == DMCOMPOSITE_DM) {
1344:       DMRefine(next->dm,comm,&dm);
1345:       DMCompositeAddDM(*fine,dm);
1346:       PetscObjectDereference((PetscObject)dm);
1347:     } else {
1348:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1349:     }
1350:     next = next->next;
1351:   }
1352:   return(0);
1353: }

1355:  #include petscmat.h

1357: struct MatPackLink {
1358:   Mat                A;
1359:   struct MatPackLink *next;
1360: };

1362: struct MatPack {
1363:   DMComposite            right,left;
1364:   struct MatPackLink *next;
1365: };

1369: PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscTruth add)
1370: {
1371:   struct MatPack         *mpack;
1372:   struct DMCompositeLink *xnext,*ynext;
1373:   struct MatPackLink     *anext;
1374:   PetscScalar            *xarray,*yarray;
1375:   PetscErrorCode         ierr;
1376:   PetscInt               i;
1377:   Vec                    xglobal,yglobal;
1378:   PetscMPIInt            rank;

1381:   MatShellGetContext(A,(void**)&mpack);
1382:   MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);
1383:   xnext = mpack->right->next;
1384:   ynext = mpack->left->next;
1385:   anext = mpack->next;

1387:   while (xnext) {
1388:     if (xnext->type == DMCOMPOSITE_ARRAY) {
1389:       if (rank == xnext->rank) {
1390:         VecGetArray(x,&xarray);
1391:         VecGetArray(y,&yarray);
1392:         if (add) {
1393:           for (i=0; i<xnext->n; i++) {
1394:             yarray[ynext->rstart+i] += xarray[xnext->rstart+i];
1395:           }
1396:         } else {
1397:           PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));
1398:         }
1399:         VecRestoreArray(x,&xarray);
1400:         VecRestoreArray(y,&yarray);
1401:       }
1402:     } else if (xnext->type == DMCOMPOSITE_DM) {
1403:       VecGetArray(x,&xarray);
1404:       VecGetArray(y,&yarray);
1405:       DMGetGlobalVector(xnext->dm,&xglobal);
1406:       DMGetGlobalVector(ynext->dm,&yglobal);
1407:       VecPlaceArray(xglobal,xarray+xnext->rstart);
1408:       VecPlaceArray(yglobal,yarray+ynext->rstart);
1409:       if (add) {
1410:         MatMultAdd(anext->A,xglobal,yglobal,yglobal);
1411:       } else {
1412:         MatMult(anext->A,xglobal,yglobal);
1413:       }
1414:       VecRestoreArray(x,&xarray);
1415:       VecRestoreArray(y,&yarray);
1416:       VecResetArray(xglobal);
1417:       VecResetArray(yglobal);
1418:       DMRestoreGlobalVector(xnext->dm,&xglobal);
1419:       DMRestoreGlobalVector(ynext->dm,&yglobal);
1420:       anext = anext->next;
1421:     } else {
1422:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1423:     }
1424:     xnext = xnext->next;
1425:     ynext = ynext->next;
1426:   }
1427:   return(0);
1428: }

1432: PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z)
1433: {
1436:   if (z != y) SETERRQ(PETSC_ERR_SUP,"Handles y == z only");
1437:   MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);
1438:   return(0);
1439: }

1443: PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y)
1444: {
1447:   MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);
1448:   return(0);
1449: }

1453: PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y)
1454: {
1455:   struct MatPack         *mpack;
1456:   struct DMCompositeLink *xnext,*ynext;
1457:   struct MatPackLink     *anext;
1458:   PetscScalar            *xarray,*yarray;
1459:   PetscErrorCode         ierr;
1460:   Vec                    xglobal,yglobal;
1461:   PetscMPIInt            rank;

1464:   MatShellGetContext(A,(void**)&mpack);
1465:   MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);
1466:   xnext = mpack->left->next;
1467:   ynext = mpack->right->next;
1468:   anext = mpack->next;

1470:   while (xnext) {
1471:     if (xnext->type == DMCOMPOSITE_ARRAY) {
1472:       if (rank == ynext->rank) {
1473:         VecGetArray(x,&xarray);
1474:         VecGetArray(y,&yarray);
1475:         PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));
1476:         VecRestoreArray(x,&xarray);
1477:         VecRestoreArray(y,&yarray);
1478:       }
1479:     } else if (xnext->type == DMCOMPOSITE_DM) {
1480:       VecGetArray(x,&xarray);
1481:       VecGetArray(y,&yarray);
1482:       DMGetGlobalVector(xnext->dm,&xglobal);
1483:       DMGetGlobalVector(ynext->dm,&yglobal);
1484:       VecPlaceArray(xglobal,xarray+xnext->rstart);
1485:       VecPlaceArray(yglobal,yarray+ynext->rstart);
1486:       MatMultTranspose(anext->A,xglobal,yglobal);
1487:       VecRestoreArray(x,&xarray);
1488:       VecRestoreArray(y,&yarray);
1489:       VecResetArray(xglobal);
1490:       VecResetArray(yglobal);
1491:       DMRestoreGlobalVector(xnext->dm,&xglobal);
1492:       DMRestoreGlobalVector(ynext->dm,&yglobal);
1493:       anext = anext->next;
1494:     } else {
1495:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1496:     }
1497:     xnext = xnext->next;
1498:     ynext = ynext->next;
1499:   }
1500:   return(0);
1501: }

1505: PetscErrorCode MatDestroy_Shell_Pack(Mat A)
1506: {
1507:   struct MatPack     *mpack;
1508:   struct MatPackLink *anext,*oldanext;
1509:   PetscErrorCode     ierr;

1512:   MatShellGetContext(A,(void**)&mpack);
1513:   anext = mpack->next;

1515:   while (anext) {
1516:     MatDestroy(anext->A);
1517:     oldanext = anext;
1518:     anext    = anext->next;
1519:     PetscFree(oldanext);
1520:   }
1521:   PetscFree(mpack);
1522:   PetscObjectChangeTypeName((PetscObject)A,0);
1523:   return(0);
1524: }

1528: /*@C
1529:     DMCompositeGetInterpolation - GetInterpolations a DMComposite by refining all of its DAs

1531:     Collective on DMComposite

1533:     Input Parameters:
1534: +    coarse - coarse grid packer
1535: -    fine - fine grid packer

1537:     Output Parameter:
1538: +    A - interpolation matrix
1539: -    v - scaling vector
1540:  
1541:     Level: advanced

1543: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1544:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1545:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),  DMCompositeScatter(),DMCompositeGetEntries()

1547: @*/
1548: PetscErrorCode  DMCompositeGetInterpolation(DMComposite coarse,DMComposite fine,Mat *A,Vec *v)
1549: {
1550:   PetscErrorCode         ierr;
1551:   PetscInt               m,n,M,N;
1552:   struct DMCompositeLink *nextc;
1553:   struct DMCompositeLink *nextf;
1554:   struct MatPackLink     *nextmat,*pnextmat = 0;
1555:   struct MatPack         *mpack;
1556:   Vec                    gcoarse,gfine;

1561:   nextc = coarse->next;
1562:   nextf = fine->next;
1563:   /* use global vectors only for determining matrix layout */
1564:   DMCompositeCreateGlobalVector(coarse,&gcoarse);
1565:   DMCompositeCreateGlobalVector(fine,&gfine);
1566:   VecGetLocalSize(gcoarse,&n);
1567:   VecGetLocalSize(gfine,&m);
1568:   VecGetSize(gcoarse,&N);
1569:   VecGetSize(gfine,&M);
1570:   VecDestroy(gcoarse);
1571:   VecDestroy(gfine);

1573:   PetscNew(struct MatPack,&mpack);
1574:   mpack->right = coarse;
1575:   mpack->left  = fine;
1576:   MatCreate(((PetscObject)fine)->comm,A);
1577:   MatSetSizes(*A,m,n,M,N);
1578:   MatSetType(*A,MATSHELL);
1579:   MatShellSetContext(*A,mpack);
1580:   MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);
1581:   MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);
1582:   MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);
1583:   MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);

1585:   /* loop over packed objects, handling one at at time */
1586:   while (nextc) {
1587:     if (nextc->type != nextf->type) SETERRQ(PETSC_ERR_ARG_INCOMP,"Two DMComposite have different layout");

1589:     if (nextc->type == DMCOMPOSITE_ARRAY) {
1590:       ;
1591:     } else if (nextc->type == DMCOMPOSITE_DM) {
1592:       PetscNew(struct MatPackLink,&nextmat);
1593:       nextmat->next = 0;
1594:       if (pnextmat) {
1595:         pnextmat->next = nextmat;
1596:         pnextmat       = nextmat;
1597:       } else {
1598:         pnextmat    = nextmat;
1599:         mpack->next = nextmat;
1600:       }
1601:       DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);
1602:     } else {
1603:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1604:     }
1605:     nextc = nextc->next;
1606:     nextf = nextf->next;
1607:   }
1608:   return(0);
1609: }

1613: /*@C
1614:     DMCompositeGetMatrix - Creates a matrix with the correct parallel layout and nonzero structure required for 
1615:       computing the Jacobian on a function defined using the stencils set in the DA's and coupling in the array variables

1617:     Collective on DA

1619:     Input Parameter:
1620: +   packer - the distributed array
1621: -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ

1623:     Output Parameters:
1624: .   J  - matrix with the correct nonzero structure
1625:         (obviously without the correct Jacobian values)

1627:     Level: advanced

1629:     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 
1630:        do not need to do it yourself. 


1633: .seealso DAGetMatrix(), DMCompositeCreate()

1635: @*/
1636: PetscErrorCode  DMCompositeGetMatrix(DMComposite packer, const MatType mtype,Mat *J)
1637: {
1638:   PetscErrorCode         ierr;
1639:   struct DMCompositeLink *next = packer->next;
1640:   PetscInt               m,*dnz,*onz,i,j,mA;
1641:   Mat                    Atmp;
1642:   PetscMPIInt            rank;
1643:   PetscScalar            zero = 0.0;
1644:   PetscTruth             dense = PETSC_FALSE;


1649:   /* use global vector to determine layout needed for matrix */
1650:   m = packer->n;
1651:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1652:   MatCreate(((PetscObject)packer)->comm,J);
1653:   MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);
1654:   MatSetType(*J,MATAIJ);

1656:   /*
1657:      Extremely inefficient but will compute entire Jacobian for testing
1658:   */
1659:   PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);
1660:   if (dense) {
1661:     PetscInt    rstart,rend,*indices;
1662:     PetscScalar *values;

1664:     mA    = packer->N;
1665:     MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);
1666:     MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);

1668:     MatGetOwnershipRange(*J,&rstart,&rend);
1669:     PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);
1670:     PetscMemzero(values,mA*sizeof(PetscScalar));
1671:     for (i=0; i<mA; i++) indices[i] = i;
1672:     for (i=rstart; i<rend; i++) {
1673:       MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);
1674:     }
1675:     PetscFree2(values,indices);
1676:     MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
1677:     MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
1678:     return(0);
1679:   }

1681:   MatPreallocateInitialize(((PetscObject)packer)->comm,m,m,dnz,onz);
1682:   /* loop over packed objects, handling one at at time */
1683:   next = packer->next;
1684:   while (next) {
1685:     if (next->type == DMCOMPOSITE_ARRAY) {
1686:       if (rank == next->rank) {  /* zero the "little" block */
1687:         for (j=packer->rstart+next->rstart; j<packer->rstart+next->rstart+next->n; j++) {
1688:           for (i=packer->rstart+next->rstart; i<packer->rstart+next->rstart+next->n; i++) {
1689:             MatPreallocateSet(j,1,&i,dnz,onz);
1690:           }
1691:         }
1692:       }
1693:     } else if (next->type == DMCOMPOSITE_DM) {
1694:       PetscInt       nc,rstart,*ccols,maxnc;
1695:       const PetscInt *cols,*rstarts;
1696:       PetscMPIInt    proc;

1698:       DMGetMatrix(next->dm,mtype,&Atmp);
1699:       MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);
1700:       MatGetOwnershipRanges(Atmp,&rstarts);
1701:       MatGetLocalSize(Atmp,&mA,PETSC_NULL);

1703:       maxnc = 0;
1704:       for (i=0; i<mA; i++) {
1705:         MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1706:         MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1707:         maxnc = PetscMax(nc,maxnc);
1708:       }
1709:       PetscMalloc(maxnc*sizeof(PetscInt),&ccols);
1710:       for (i=0; i<mA; i++) {
1711:         MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);
1712:         /* remap the columns taking into how much they are shifted on each process */
1713:         for (j=0; j<nc; j++) {
1714:           proc = 0;
1715:           while (cols[j] >= rstarts[proc+1]) proc++;
1716:           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1717:         }
1718:         MatPreallocateSet(packer->rstart+next->rstart+i,nc,ccols,dnz,onz);
1719:         MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);
1720:       }
1721:       PetscFree(ccols);
1722:       MatDestroy(Atmp);
1723:     } else {
1724:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1725:     }
1726:     next = next->next;
1727:   }
1728:   if (packer->FormCoupleLocations) {
1729:     (*packer->FormCoupleLocations)(packer,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);
1730:   }
1731:   MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);
1732:   MatSeqAIJSetPreallocation(*J,0,dnz);
1733:   MatPreallocateFinalize(dnz,onz);

1735:   next = packer->next;
1736:   while (next) {
1737:     if (next->type == DMCOMPOSITE_ARRAY) {
1738:       if (rank == next->rank) {
1739:         for (j=packer->rstart+next->rstart; j<packer->rstart+next->rstart+next->n; j++) {
1740:           for (i=packer->rstart+next->rstart; i<packer->rstart+next->rstart+next->n; i++) {
1741:             MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);
1742:           }
1743:         }
1744:       }
1745:     } else if (next->type == DMCOMPOSITE_DM) {
1746:       PetscInt          nc,rstart,row,maxnc,*ccols;
1747:       const PetscInt    *cols,*rstarts;
1748:       const PetscScalar *values;
1749:       PetscMPIInt       proc;

1751:       DMGetMatrix(next->dm,mtype,&Atmp);
1752:       MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);
1753:       MatGetOwnershipRanges(Atmp,&rstarts);
1754:       MatGetLocalSize(Atmp,&mA,PETSC_NULL);
1755:       maxnc = 0;
1756:       for (i=0; i<mA; i++) {
1757:         MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1758:         MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1759:         maxnc = PetscMax(nc,maxnc);
1760:       }
1761:       PetscMalloc(maxnc*sizeof(PetscInt),&ccols);
1762:       for (i=0; i<mA; i++) {
1763:         MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);
1764:         for (j=0; j<nc; j++) {
1765:           proc = 0;
1766:           while (cols[j] >= rstarts[proc+1]) proc++;
1767:           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1768:         }
1769:         row  = packer->rstart+next->rstart+i;
1770:         MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);
1771:         MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);
1772:       }
1773:       PetscFree(ccols);
1774:       MatDestroy(Atmp);
1775:     } else {
1776:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1777:     }
1778:     next = next->next;
1779:   }
1780:   if (packer->FormCoupleLocations) {
1781:     PetscInt __rstart;
1782:     MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);
1783:     (*packer->FormCoupleLocations)(packer,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);
1784:   }
1785:   MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
1786:   MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
1787:   return(0);
1788: }

1792: /*@
1793:     DMCompositeGetColoring - Gets the coloring required for computing the Jacobian via
1794:     finite differences on a function defined using a DMComposite "grid"

1796:     Collective on DA

1798:     Input Parameter:
1799: +   dmcomposite - the DMComposite object
1800: -   ctype - IS_COLORING_GLOBAL or IS_COLORING_GHOSTED

1802:     Output Parameters:
1803: .   coloring - matrix coloring for use in computing Jacobians (or PETSC_NULL if not needed)

1805:     Level: advanced

1807:     Notes: This colors each diagonal block (associated with a single DM) with a different set of colors;
1808:       this it will compute the diagonal blocks of the Jacobian correctly. The off diagonal blocks are
1809:       not computed, hence the Jacobian computed is not the entire Jacobian. If -dmcomposite_dense_jacobian
1810:       is used then each column of the Jacobian is given a different color so the full Jacobian is computed
1811:       correctly.

1813:     Notes: These compute the graph coloring of the graph of A^{T}A. The coloring used 
1814:    for efficient (parallel or thread based) triangular solves etc is NOT yet 
1815:    available. 


1818: .seealso ISColoringView(), ISColoringGetIS(), MatFDColoringCreate(), ISColoringType, ISColoring, DAGetColoring()

1820: @*/
1821: PetscErrorCode  DMCompositeGetColoring(DMComposite dmcomposite,ISColoringType ctype,ISColoring *coloring)
1822: {
1823:   PetscErrorCode         ierr;
1824:   PetscInt               n,i,cnt;
1825:   ISColoringValue        *colors;
1826:   PetscTruth             dense = PETSC_FALSE;
1827:   ISColoringValue        maxcol = 0;

1831:   if (ctype == IS_COLORING_GHOSTED) {
1832:     SETERRQ(PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" );
1833:   } else if (ctype == IS_COLORING_GLOBAL) {
1834:     n = dmcomposite->n;
1835:   } else SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType");
1836:   PetscMalloc(n*sizeof(ISColoringValue),&colors); /* freed in ISColoringDestroy() */

1838:   PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);
1839:   if (dense) {
1840:     for (i=0; i<n; i++) {
1841:       colors[i] = (ISColoringValue)(dmcomposite->rstart + i);
1842:     }
1843:     maxcol = dmcomposite->N;
1844:   } else {
1845:     struct DMCompositeLink *next = dmcomposite->next;
1846:     PetscMPIInt            rank;
1847: 
1848:     MPI_Comm_rank(((PetscObject)dmcomposite)->comm,&rank);
1849:     cnt  = 0;
1850:     while (next) {
1851:       if (next->type == DMCOMPOSITE_ARRAY) {
1852:         if (rank == next->rank) {  /* each column gets is own color */
1853:           for (i=dmcomposite->rstart+next->rstart; i<dmcomposite->rstart+next->rstart+next->n; i++) {
1854:             colors[cnt++] = maxcol++;
1855:           }
1856:         }
1857:         MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dmcomposite)->comm);
1858:       } else if (next->type == DMCOMPOSITE_DM) {
1859:         ISColoring     lcoloring;

1861:         DMGetColoring(next->dm,IS_COLORING_GLOBAL,&lcoloring);
1862:         for (i=0; i<lcoloring->N; i++) {
1863:           colors[cnt++] = maxcol + lcoloring->colors[i];
1864:         }
1865:         maxcol += lcoloring->n;
1866:         ISColoringDestroy(lcoloring);
1867:       } else {
1868:         SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1869:       }
1870:       next = next->next;
1871:     }
1872:   }
1873:   ISColoringCreate(((PetscObject)dmcomposite)->comm,maxcol,n,colors,coloring);
1874:   return(0);
1875: }

1879: /*@C
1880:     DMCompositeGlobalToLocalBegin - begin update of single local vector from global vector

1882:     Collective on DMComposite

1884:     Input Parameter:
1885: +    packer - the packer object
1886: .    gvec - the global vector
1887: -    lvec - single local vector
1888:  
1889:     Level: advanced

1891: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1892:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1893:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

1895: @*/
1896: PetscErrorCode  DMCompositeGlobalToLocalBegin(DMComposite packer,Vec gvec,InsertMode mode,Vec lvec)
1897: {
1898:   PetscErrorCode         ierr;
1899:   struct DMCompositeLink *next;
1900:   PetscInt               cnt = 3;
1901:   PetscMPIInt            rank;
1902:   PetscScalar            *garray,*larray;

1907:   next = packer->next;
1908:   if (!packer->setup) {
1909:     DMCompositeSetUp(packer);
1910:   }
1911:   MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1912:   VecGetArray(gvec,&garray);
1913:   VecGetArray(lvec,&larray);

1915:   /* loop over packed objects, handling one at at time */
1916:   while (next) {
1917:     if (next->type == DMCOMPOSITE_ARRAY) {
1918:       if (rank == next->rank) {
1919:         PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));
1920:         garray += next->n;
1921:       }
1922:       /* does not handle ADD_VALUES */
1923:       MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)packer)->comm);
1924:       larray += next->n;
1925:     } else if (next->type == DMCOMPOSITE_DM) {
1926:       Vec      local,global;
1927:       PetscInt N;

1929:       DMGetGlobalVector(next->dm,&global);
1930:       VecGetLocalSize(global,&N);
1931:       VecPlaceArray(global,garray);
1932:       DMGetLocalVector(next->dm,&local);
1933:       VecPlaceArray(local,larray);
1934:       DMGlobalToLocalBegin(next->dm,global,mode,local);
1935:       DMGlobalToLocalEnd(next->dm,global,mode,local);
1936:       VecResetArray(global);
1937:       VecResetArray(local);
1938:       DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr)
1939:       DMRestoreGlobalVector(next->dm,&local);
1940:       larray += next->n;
1941:     } else {
1942:       SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1943:     }
1944:     cnt++;
1945:     next = next->next;
1946:   }

1948:   VecRestoreArray(gvec,PETSC_NULL);
1949:   VecRestoreArray(lvec,PETSC_NULL);
1950:   return(0);
1951: }

1955: /*@C
1956:     DMCompositeGlobalToLocalEnd - All communication is handled in the Begin phase

1958:     Collective on DMComposite

1960:     Input Parameter:
1961: +    packer - the packer object
1962: .    gvec - the global vector
1963: -    lvec - single local vector
1964:  
1965:     Level: advanced

1967: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1968:          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1969:          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()

1971: @*/
1972: PetscErrorCode  DMCompositeGlobalToLocalEnd(DMComposite packer,Vec gvec,InsertMode mode,Vec lvec)
1973: {
1975:   return(0);
1976: }