Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include ../src/vec/vec/impls/mpi/pvecimpl.h

  7: #if 0
 10: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 11: {
 13:   return(0);
 14: }
 15: #endif

 19: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 20: {
 21:   PetscScalar    sum,work;

 25:   VecDot_Seq(xin,yin,&work);
 26:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
 27:   *z = sum;
 28:   return(0);
 29: }

 33: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 34: {
 35:   PetscScalar    sum,work;

 39:   VecTDot_Seq(xin,yin,&work);
 40:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
 41:   *z   = sum;
 42:   return(0);
 43: }

 47: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
 48: {
 50:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 51:     v->stash.donotstash = flag;
 52:   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
 53:     v->stash.ignorenegidx = flag;
 54:   }
 55:   return(0);
 56: }
 57: 
 58: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 60: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 65: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 66: {
 68:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 71:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 72:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 73:   v->array = (PetscScalar *)a;
 74:   if (v->localrep) {
 75:     VecPlaceArray(v->localrep,a);
 76:   }
 77:   return(0);
 78: }

 82: PetscErrorCode VecResetArray_MPI(Vec vin)
 83: {
 84:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 88:   v->array         = v->unplacedarray;
 89:   v->unplacedarray = 0;
 90:   if (v->localrep) {
 91:     VecResetArray(v->localrep);
 92:   }
 93:   return(0);
 94: }

 96: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
 97: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 99: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
100:             VecDuplicateVecs_Default,
101:             VecDestroyVecs_Default,
102:             VecDot_MPI,
103:             VecMDot_MPI,
104:             VecNorm_MPI,
105:             VecTDot_MPI,
106:             VecMTDot_MPI,
107:             VecScale_Seq,
108:             VecCopy_Seq, /* 10 */
109:             VecSet_Seq,
110:             VecSwap_Seq,
111:             VecAXPY_Seq,
112:             VecAXPBY_Seq,
113:             VecMAXPY_Seq,
114:             VecAYPX_Seq,
115:             VecWAXPY_Seq,
116:             VecAXPBYPCZ_Seq,
117:             VecPointwiseMult_Seq,
118:             VecPointwiseDivide_Seq,
119:             VecSetValues_MPI, /* 20 */
120:             VecAssemblyBegin_MPI,
121:             VecAssemblyEnd_MPI,
122:             VecGetArray_Seq,
123:             VecGetSize_MPI,
124:             VecGetSize_Seq,
125:             VecRestoreArray_Seq,
126:             VecMax_MPI,
127:             VecMin_MPI,
128:             VecSetRandom_Seq,
129:             VecSetOption_MPI,
130:             VecSetValuesBlocked_MPI,
131:             VecDestroy_MPI,
132:             VecView_MPI,
133:             VecPlaceArray_MPI,
134:             VecReplaceArray_Seq,
135:             VecDot_Seq,
136:             VecTDot_Seq,
137:             VecNorm_Seq,
138:             VecMDot_Seq,
139:             VecMTDot_Seq,
140:             VecLoadIntoVector_Default,
141:             0, /* VecLoadIntoVectorNative */
142:             VecReciprocal_Default,
143:             0, /* VecViewNative... */
144:             VecConjugate_Seq,
145:             0,
146:             0,
147:             VecResetArray_MPI,
148:             0,
149:             VecMaxPointwiseDivide_Seq,
150:             VecLoad_Binary,
151:             VecPointwiseMax_Seq,
152:             VecPointwiseMaxAbs_Seq,
153:             VecPointwiseMin_Seq,
154:             VecGetValues_MPI};

158: /*
159:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
160:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
161:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()

163:     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
164:     no space is allocated.
165: */
166: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
167: {
168:   Vec_MPI        *s;


173:   PetscNewLog(v,Vec_MPI,&s);
174:   v->data        = (void*)s;
175:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
176:   s->nghost      = nghost;
177:   v->mapping     = 0;
178:   v->bmapping    = 0;
179:   v->petscnative = PETSC_TRUE;

181:   if (v->map->bs == -1) v->map->bs = 1;
182:   PetscMapSetUp(v->map);
183:   s->array           = (PetscScalar *)array;
184:   s->array_allocated = 0;
185:   if (alloc && !array) {
186:     PetscInt n         = v->map->n+nghost;
187:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
188:     PetscLogObjectMemory(v,n*sizeof(PetscScalar));
189:     PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
190:     s->array_allocated = s->array;
191:   }

193:   /* By default parallel vectors do not have local representation */
194:   s->localrep    = 0;
195:   s->localupdate = 0;

197:   v->stash.insertmode  = NOT_SET_VALUES;
198:   /* create the stashes. The block-size for bstash is set later when 
199:      VecSetValuesBlocked is called.
200:   */
201:   VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
202:   VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);
203: 
204: #if defined(PETSC_HAVE_MATLAB_ENGINE)
205:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
206:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
207: #endif
208:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
209:   PetscPublishAll(v);
210:   return(0);
211: }

213: /*MC
214:    VECMPI - VECMPI = "mpi" - The basic parallel vector

216:    Options Database Keys:
217: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

219:   Level: beginner

221: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
222: M*/

227: PetscErrorCode  VecCreate_MPI(Vec vv)
228: {

232:   VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
233:   return(0);
234: }

239: /*@C
240:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
241:    where the user provides the array space to store the vector values.

243:    Collective on MPI_Comm

245:    Input Parameters:
246: +  comm  - the MPI communicator to use
247: .  n     - local vector length, cannot be PETSC_DECIDE
248: .  N     - global vector length (or PETSC_DECIDE to have calculated)
249: -  array - the user provided array to store the vector values

251:    Output Parameter:
252: .  vv - the vector
253:  
254:    Notes:
255:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
256:    same type as an existing vector.

258:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
259:    at a later stage to SET the array for storing the vector values.

261:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
262:    The user should not free the array until the vector is destroyed.

264:    Level: intermediate

266:    Concepts: vectors^creating with array

268: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
269:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

271: @*/
272: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
273: {

277:   if (n == PETSC_DECIDE) {
278:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
279:   }
280:   PetscSplitOwnership(comm,&n,&N);
281:   VecCreate(comm,vv);
282:   VecSetSizes(*vv,n,N);
283:   VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
284:   return(0);
285: }

289: /*@
290:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
291:     a parallel vector created with VecCreateGhost().

293:     Not Collective

295:     Input Parameter:
296: .   g - the global vector. Vector must be have been obtained with either
297:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

299:     Output Parameter:
300: .   l - the local (ghosted) representation

302:     Notes:
303:     This routine does not actually update the ghost values, but rather it
304:     returns a sequential vector that includes the locations for the ghost
305:     values and their current values. The returned vector and the original
306:     vector passed in share the same array that contains the actual vector data.

308:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
309:     finished using the object.

311:     Level: advanced

313:    Concepts: vectors^ghost point access

315: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

317: @*/
318: PetscErrorCode  VecGhostGetLocalForm(Vec g,Vec *l)
319: {
321:   PetscTruth     isseq,ismpi;


327:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
328:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
329:   if (ismpi) {
330:     Vec_MPI *v  = (Vec_MPI*)g->data;
331:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
332:     *l = v->localrep;
333:   } else if (isseq) {
334:     *l = g;
335:   } else {
336:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
337:   }
338:   PetscObjectReference((PetscObject)*l);
339:   return(0);
340: }

344: /*@
345:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
346:     a parallel vector obtained with VecGhostGetLocalForm().

348:     Not Collective

350:     Input Parameter:
351: +   g - the global vector
352: -   l - the local (ghosted) representation

354:     Notes:
355:     This routine does not actually update the ghost values, but rather it
356:     returns a sequential vector that includes the locations for the ghost values
357:     and their current values.

359:     Level: advanced

361: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
362: @*/
363: PetscErrorCode  VecGhostRestoreLocalForm(Vec g,Vec *l)
364: {
366:   PetscObjectDereference((PetscObject)*l);
367:   return(0);
368: }

372: /*@
373:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
374:    local representation to global or global representation to local.

376:    Collective on Vec

378:    Input Parameters:
379: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
380: .  insertmode - one of ADD_VALUES or INSERT_VALUES
381: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

383:    Notes:
384:    Use the following to update the ghost regions with correct values from the owning process
385: .vb
386:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
387:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
388: .ve

390:    Use the following to accumulate the ghost region values onto the owning processors
391: .vb
392:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
393:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
394: .ve

396:    To accumulate the ghost region values onto the owning processors and then update
397:    the ghost regions correctly, call the later followed by the former, i.e.,
398: .vb
399:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
400:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
401:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
402:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
403: .ve

405:    Level: advanced

407: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
408:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

410: @*/
411: PetscErrorCode  VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
412: {
413:   Vec_MPI        *v;


419:   v  = (Vec_MPI*)g->data;
420:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
421:   if (!v->localupdate) return(0);
422: 
423:   if (scattermode == SCATTER_REVERSE) {
424:     VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);
425:   } else {
426:     VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);
427:   }
428:   return(0);
429: }

433: /*@
434:    VecGhostUpdateEnd - End the vector scatter to update the vector from
435:    local representation to global or global representation to local.

437:    Collective on Vec

439:    Input Parameters:
440: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
441: .  insertmode - one of ADD_VALUES or INSERT_VALUES
442: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

444:    Notes:

446:    Use the following to update the ghost regions with correct values from the owning process
447: .vb
448:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
449:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
450: .ve

452:    Use the following to accumulate the ghost region values onto the owning processors
453: .vb
454:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
455:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
456: .ve

458:    To accumulate the ghost region values onto the owning processors and then update
459:    the ghost regions correctly, call the later followed by the former, i.e.,
460: .vb
461:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
462:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
463:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
464:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
465: .ve

467:    Level: advanced

469: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
470:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

472: @*/
473: PetscErrorCode  VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
474: {
475:   Vec_MPI        *v;


481:   v  = (Vec_MPI*)g->data;
482:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
483:   if (!v->localupdate) return(0);

485:   if (scattermode == SCATTER_REVERSE) {
486:     VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);
487:   } else {
488:     VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);
489:   }
490:   return(0);
491: }

495: /*@C
496:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
497:    the caller allocates the array space.

499:    Collective on MPI_Comm

501:    Input Parameters:
502: +  comm - the MPI communicator to use
503: .  n - local vector length 
504: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
505: .  nghost - number of local ghost points
506: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
507: -  array - the space to store the vector values (as long as n + nghost)

509:    Output Parameter:
510: .  vv - the global vector representation (without ghost points as part of vector)
511:  
512:    Notes:
513:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
514:    of the vector.

516:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

518:    Level: advanced

520:    Concepts: vectors^creating with array
521:    Concepts: vectors^ghosted

523: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
524:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
525:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

527: @*/
528: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
529: {
530:   PetscErrorCode         ierr;
531:   Vec_MPI                *w;
532:   PetscScalar            *larray;
533:   IS                     from,to;
534:   ISLocalToGlobalMapping ltog;
535:   PetscInt               rstart,i,*indices;

538:   *vv = 0;

540:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
541:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
542:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
543:   PetscSplitOwnership(comm,&n,&N);
544:   /* Create global representation */
545:   VecCreate(comm,vv);
546:   VecSetSizes(*vv,n,N);
547:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
548:   w    = (Vec_MPI *)(*vv)->data;
549:   /* Create local representation */
550:   VecGetArray(*vv,&larray);
551:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
552:   PetscLogObjectParent(*vv,w->localrep);
553:   VecRestoreArray(*vv,&larray);

555:   /*
556:        Create scatter context for scattering (updating) ghost values 
557:   */
558:   ISCreateGeneral(comm,nghost,ghosts,&from);
559:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
560:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
561:   PetscLogObjectParent(*vv,w->localupdate);
562:   ISDestroy(to);
563:   ISDestroy(from);

565:   /* set local to global mapping for ghosted vector */
566:   PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
567:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
568:   for (i=0; i<n; i++) {
569:     indices[i] = rstart + i;
570:   }
571:   for (i=0; i<nghost; i++) {
572:     indices[n+i] = ghosts[i];
573:   }
574:   ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);
575:   PetscFree(indices);
576:   VecSetLocalToGlobalMapping(*vv,ltog);
577:   ISLocalToGlobalMappingDestroy(ltog);
578:   PetscFree(indices);
579:   return(0);
580: }

584: /*@
585:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

587:    Collective on MPI_Comm

589:    Input Parameters:
590: +  comm - the MPI communicator to use
591: .  n - local vector length 
592: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
593: .  nghost - number of local ghost points
594: -  ghosts - global indices of ghost points

596:    Output Parameter:
597: .  vv - the global vector representation (without ghost points as part of vector)
598:  
599:    Notes:
600:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
601:    of the vector.

603:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

605:    Level: advanced

607:    Concepts: vectors^ghosted

609: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
610:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
611:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
612:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

614: @*/
615: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
616: {

620:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
621:   return(0);
622: }

626: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
627: {
629:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
630:   PetscScalar    *array;

633:   VecCreate(((PetscObject)win)->comm,v);

635:   /* use the map that exists aleady in win */
636:   PetscMapDestroy((*v)->map);
637:   (*v)->map = win->map;
638:   win->map->refcnt++;

640:   VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
641:   vw   = (Vec_MPI *)(*v)->data;
642:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

644:   /* save local representation of the parallel vector (and scatter) if it exists */
645:   if (w->localrep) {
646:     VecGetArray(*v,&array);
647:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);
648:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
649:     VecRestoreArray(*v,&array);
650:     PetscLogObjectParent(*v,vw->localrep);
651:     vw->localupdate = w->localupdate;
652:     if (vw->localupdate) {
653:       PetscObjectReference((PetscObject)vw->localupdate);
654:     }
655:   }

657:   /* New vector should inherit stashing property of parent */
658:   (*v)->stash.donotstash = win->stash.donotstash;
659:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
660: 
661:   PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
662:   PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
663:   if (win->mapping) {
664:     PetscObjectReference((PetscObject)win->mapping);
665:     (*v)->mapping = win->mapping;
666:   }
667:   if (win->bmapping) {
668:     PetscObjectReference((PetscObject)win->bmapping);
669:     (*v)->bmapping = win->bmapping;
670:   }
671:   (*v)->map->bs    = win->map->bs;
672:   (*v)->bstash.bs = win->bstash.bs;

674:   return(0);
675: }

677: /* ------------------------------------------------------------------------------------------*/
680: /*@C
681:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
682:    the caller allocates the array space. Indices in the ghost region are based on blocks.

684:    Collective on MPI_Comm

686:    Input Parameters:
687: +  comm - the MPI communicator to use
688: .  bs - block size
689: .  n - local vector length 
690: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
691: .  nghost - number of local ghost blocks
692: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
693: -  array - the space to store the vector values (as long as n + nghost*bs)

695:    Output Parameter:
696: .  vv - the global vector representation (without ghost points as part of vector)
697:  
698:    Notes:
699:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
700:    of the vector.

702:    n is the local vector size (total local size not the number of blocks) while nghost
703:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
704:    portion is bs*nghost

706:    Level: advanced

708:    Concepts: vectors^creating ghosted
709:    Concepts: vectors^creating with array

711: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
712:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
713:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

715: @*/
716: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
717: {
719:   Vec_MPI        *w;
720:   PetscScalar    *larray;
721:   IS             from,to;
722:   ISLocalToGlobalMapping ltog;
723:   PetscInt       rstart,i,nb,*indices;

726:   *vv = 0;

728:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
729:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
730:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
731:   if (n % bs)                 SETERRQ(PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
732:   PetscSplitOwnership(comm,&n,&N);
733:   /* Create global representation */
734:   VecCreate(comm,vv);
735:   VecSetSizes(*vv,n,N);
736:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
737:   VecSetBlockSize(*vv,bs);
738:   w    = (Vec_MPI *)(*vv)->data;
739:   /* Create local representation */
740:   VecGetArray(*vv,&larray);
741:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
742:   VecSetBlockSize(w->localrep,bs);
743:   PetscLogObjectParent(*vv,w->localrep);
744:   VecRestoreArray(*vv,&larray);

746:   /*
747:        Create scatter context for scattering (updating) ghost values 
748:   */
749:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
750:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
751:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
752:   PetscLogObjectParent(*vv,w->localupdate);
753:   ISDestroy(to);
754:   ISDestroy(from);

756:   /* set local to global mapping for ghosted vector */
757:   nb = n/bs;
758:   PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);
759:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
760:   for (i=0; i<nb; i++) {
761:     indices[i] = rstart + i*bs;
762:   }
763:   for (i=0; i<nghost; i++) {
764:     indices[nb+i] = ghosts[i];
765:   }
766:   ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,&ltog);
767:   PetscFree(indices);
768:   VecSetLocalToGlobalMappingBlock(*vv,ltog);
769:   ISLocalToGlobalMappingDestroy(ltog);
770:   PetscFree(indices);

772:   return(0);
773: }

777: /*@
778:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
779:         The indicing of the ghost points is done with blocks.

781:    Collective on MPI_Comm

783:    Input Parameters:
784: +  comm - the MPI communicator to use
785: .  bs - the block size
786: .  n - local vector length 
787: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
788: .  nghost - number of local ghost blocks
789: -  ghosts - global indices of ghost blocks

791:    Output Parameter:
792: .  vv - the global vector representation (without ghost points as part of vector)
793:  
794:    Notes:
795:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
796:    of the vector.

798:    n is the local vector size (total local size not the number of blocks) while nghost
799:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
800:    portion is bs*nghost

802:    Level: advanced

804:    Concepts: vectors^ghosted

806: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
807:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
808:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

810: @*/
811: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
812: {

816:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
817:   return(0);
818: }

820: /*
821:     These introduce a ghosted vector where the ghosting is determined by the call to 
822:   VecSetLocalToGlobalMapping()
823: */

827: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
828: {
830:   Vec_MPI        *v = (Vec_MPI *)vv->data;

833:   v->nghost = map->n - vv->map->n;

835:   /* we need to make longer the array space that was allocated when the vector was created */
836:   PetscFree(v->array_allocated);
837:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
838:   v->array = v->array_allocated;
839: 
840:   /* Create local representation */
841:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
842:   PetscLogObjectParent(vv,v->localrep);
843:   return(0);
844: }


849: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
850: {
852:   Vec_MPI        *v = (Vec_MPI *)vv->data;

855:   VecSetValues(v->localrep,n,ix,values,mode);
856:   return(0);
857: }

862: PetscErrorCode  VecCreate_FETI(Vec vv)
863: {

867:   VecSetType(vv,VECMPI);
868: 
869:   /* overwrite the functions to handle setting values locally */
870:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
871:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
872:   vv->ops->assemblybegin           = 0;
873:   vv->ops->assemblyend             = 0;
874:   vv->ops->setvaluesblocked        = 0;
875:   vv->ops->setvaluesblocked        = 0;

877:   return(0);
878: }