Actual source code: da2.c

  1: #define PETSCDM_DLL
  2: 
 3:  #include ../src/dm/da/daimpl.h

  7: /*@C
  8:       DAGetNeighbors - Gets an array containing the MPI rank of all the current
  9:         processes neighbors.

 11:     Not Collective

 13:    Input Parameter:
 14: .     da - the DA object

 16:    Output Parameters:
 17: .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
 18:               this process itself is in the list

 20:    Notes: In 2d the array is of length 9, in 3d of length 27
 21:           Not supported in 1d
 22:           Do not free the array, it is freed when the DA is destroyed.

 24:    Fortran Notes: In fortran you must pass in an array of the appropriate length.

 26:    Level: intermediate

 28: @*/
 29: PetscErrorCode  DAGetNeighbors(DA da,const PetscMPIInt *ranks[])
 30: {
 33:   *ranks = da->neighbors;
 34:   return(0);
 35: }

 39: /*@C
 40:       DMGetElements - Gets an array containing the indices (in local coordinates) 
 41:                  of all the local elements

 43:     Not Collective

 45:    Input Parameter:
 46: .     dm - the DM object

 48:    Output Parameters:
 49: +     n - number of local elements
 50: -     e - the indices of the elements vertices

 52:    Level: intermediate

 54: .seealso: DMElementType, DMSetElementType(), DMRestoreElements()
 55: @*/
 56: PetscErrorCode  DMGetElements(DM dm,PetscInt *n,const PetscInt *e[])
 57: {
 61:   (dm->ops->getelements)(dm,n,e);
 62:   return(0);
 63: }

 67: /*@C
 68:       DMRestoreElements - Returns an array containing the indices (in local coordinates) 
 69:                  of all the local elements obtained with DMGetElements()

 71:     Not Collective

 73:    Input Parameter:
 74: +     dm - the DM object
 75: .     n - number of local elements
 76: -     e - the indices of the elements vertices

 78:    Level: intermediate

 80: .seealso: DMElementType, DMSetElementType(), DMGetElements()
 81: @*/
 82: PetscErrorCode  DMRestoreElements(DM dm,PetscInt *n,const PetscInt *e[])
 83: {
 87:   if (dm->ops->restoreelements) {
 88:     (dm->ops->restoreelements)(dm,n,e);
 89:   }
 90:   return(0);
 91: }

 95: /*@C
 96:       DAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process

 98:     Not Collective

100:    Input Parameter:
101: .     da - the DA object

103:    Output Parameter:
104: +     lx - ownership along x direction (optional)
105: .     ly - ownership along y direction (optional)
106: -     lz - ownership along z direction (optional)

108:    Level: intermediate

110:     Note: these correspond to the optional final arguments passed to DACreate(), DACreate2d(), DACreate3d()

112:     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
113:     eighth arguments from DAGetInfo()

115:      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
116:     DA they came from still exists (has not been destroyed).

118: .seealso: DAGetCorners(), DAGetGhostCorners(), DACreate(), DACreate1d(), DACreate2d(), DACreate3d(), VecGetOwnershipRanges()
119: @*/
120: PetscErrorCode  DAGetOwnershipRanges(DA da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
121: {
124:   if (lx) *lx = da->lx;
125:   if (ly) *ly = da->ly;
126:   if (lz) *lz = da->lz;
127:   return(0);
128: }

132: PetscErrorCode DAView_2d(DA da,PetscViewer viewer)
133: {
135:   PetscMPIInt    rank;
136:   PetscTruth     iascii,isdraw;

139:   MPI_Comm_rank(((PetscObject)da)->comm,&rank);

141:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
142:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
143:   if (iascii) {
144:     PetscViewerFormat format;

146:     PetscViewerGetFormat(viewer, &format);
147:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
148:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,da->M,
149:                                                 da->N,da->m,da->n,da->w,da->s);
150:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",da->xs,da->xe,da->ys,da->ye);
151:       PetscViewerFlush(viewer);
152:     }
153:   } else if (isdraw) {
154:     PetscDraw       draw;
155:     double     ymin = -1*da->s-1,ymax = da->N+da->s;
156:     double     xmin = -1*da->s-1,xmax = da->M+da->s;
157:     double     x,y;
158:     PetscInt   base,*idx;
159:     char       node[10];
160:     PetscTruth isnull;
161: 
162:     PetscViewerDrawGetDraw(viewer,0,&draw);
163:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
164:     if (!da->coordinates) {
165:       PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
166:     }
167:     PetscDrawSynchronizedClear(draw);

169:     /* first processor draw all node lines */
170:     if (!rank) {
171:       ymin = 0.0; ymax = da->N - 1;
172:       for (xmin=0; xmin<da->M; xmin++) {
173:         PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);
174:       }
175:       xmin = 0.0; xmax = da->M - 1;
176:       for (ymin=0; ymin<da->N; ymin++) {
177:         PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
178:       }
179:     }
180:     PetscDrawSynchronizedFlush(draw);
181:     PetscDrawPause(draw);

183:     /* draw my box */
184:     ymin = da->ys; ymax = da->ye - 1; xmin = da->xs/da->w;
185:     xmax =(da->xe-1)/da->w;
186:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
187:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
188:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
189:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

191:     /* put in numbers */
192:     base = (da->base)/da->w;
193:     for (y=ymin; y<=ymax; y++) {
194:       for (x=xmin; x<=xmax; x++) {
195:         sprintf(node,"%d",(int)base++);
196:         PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);
197:       }
198:     }

200:     PetscDrawSynchronizedFlush(draw);
201:     PetscDrawPause(draw);
202:     /* overlay ghost numbers, useful for error checking */
203:     /* put in numbers */

205:     base = 0; idx = da->idx;
206:     ymin = da->Ys; ymax = da->Ye; xmin = da->Xs; xmax = da->Xe;
207:     for (y=ymin; y<ymax; y++) {
208:       for (x=xmin; x<xmax; x++) {
209:         if ((base % da->w) == 0) {
210:           sprintf(node,"%d",(int)(idx[base]/da->w));
211:           PetscDrawString(draw,x/da->w,y,PETSC_DRAW_BLUE,node);
212:         }
213:         base++;
214:       }
215:     }
216:     PetscDrawSynchronizedFlush(draw);
217:     PetscDrawPause(draw);
218:   } else {
219:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA2d",((PetscObject)viewer)->type_name);
220:   }
221:   return(0);
222: }

224: #if 0
227: PetscErrorCode DAPublish_Petsc(PetscObject obj)
228: {
230:   return(0);
231: }
232: #endif

236: PetscErrorCode DAGetElements_2d_P1(DA da,PetscInt *n,const PetscInt *e[])
237: {
239:   PetscInt       i,j,cnt,xs,xe = da->xe,ys,ye = da->ye,Xs = da->Xs, Xe = da->Xe, Ys = da->Ys;

242:   if (!da->e) {
243:     if (da->xs == Xs) xs = da->xs; else xs = da->xs - 1;
244:     if (da->ys == Ys) ys = da->ys; else ys = da->ys - 1;
245:     da->ne = 2*(xe - xs - 1)*(ye - ys - 1);
246:     PetscMalloc((1 + 3*da->ne)*sizeof(PetscInt),&da->e);
247:     cnt    = 0;
248:     for (j=ys; j<ye-1; j++) {
249:       for (i=xs; i<xe-1; i++) {
250:         da->e[cnt]   = i - Xs + (j - Ys)*(Xe - Xs);
251:         da->e[cnt+1] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
252:         da->e[cnt+2] = i - Xs + (j - Ys + 1)*(Xe - Xs);

254:         da->e[cnt+3] = i - Xs + 1 + (j - Ys + 1)*(Xe - Xs);
255:         da->e[cnt+4] = i - Xs + (j - Ys + 1)*(Xe - Xs);
256:         da->e[cnt+5] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
257:         cnt += 6;
258:       }
259:     }
260:   }
261:   *n = da->ne;
262:   *e = da->e;
263:   return(0);
264: }


269: /*@C
270:    DACreate2d -  Creates an object that will manage the communication of  two-dimensional 
271:    regular array data that is distributed across some processors.

273:    Collective on MPI_Comm

275:    Input Parameters:
276: +  comm - MPI communicator
277: .  wrap - type of periodicity should the array have. 
278:          Use one of DA_NONPERIODIC, DA_XPERIODIC, DA_YPERIODIC, or DA_XYPERIODIC.
279: .  stencil_type - stencil type.  Use either DA_STENCIL_BOX or DA_STENCIL_STAR.
280: .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 
281:             from the command line with -da_grid_x <M> -da_grid_y <N>)
282: .  m,n - corresponding number of processors in each dimension 
283:          (or PETSC_DECIDE to have calculated)
284: .  dof - number of degrees of freedom per node
285: .  s - stencil width
286: -  lx, ly - arrays containing the number of nodes in each cell along
287:            the x and y coordinates, or PETSC_NULL. If non-null, these
288:            must be of length as m and n, and the corresponding
289:            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
290:            must be M, and the sum of the ly[] entries must be N.

292:    Output Parameter:
293: .  inra - the resulting distributed array object

295:    Options Database Key:
296: +  -da_view - Calls DAView() at the conclusion of DACreate2d()
297: .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
298: .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
299: .  -da_processors_x <nx> - number of processors in x direction
300: .  -da_processors_y <ny> - number of processors in y direction
301: .  -da_refine_x - refinement ratio in x direction
302: -  -da_refine_y - refinement ratio in y direction

304:    Level: beginner

306:    Notes:
307:    The stencil type DA_STENCIL_STAR with width 1 corresponds to the 
308:    standard 5-pt stencil, while DA_STENCIL_BOX with width 1 denotes
309:    the standard 9-pt stencil.

311:    The array data itself is NOT stored in the DA, it is stored in Vec objects;
312:    The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
313:    and DACreateLocalVector() and calls to VecDuplicate() if more are needed.

315: .keywords: distributed array, create, two-dimensional

317: .seealso: DADestroy(), DAView(), DACreate1d(), DACreate3d(), DAGlobalToLocalBegin(), DAGetRefinementFactor(),
318:           DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DASetRefinementFactor(),
319:           DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRanges()

321: @*/
322: PetscErrorCode  DACreate2d(MPI_Comm comm,DAPeriodicType wrap,DAStencilType stencil_type,
323:                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DA *inra)
324: {
326:   PetscMPIInt    rank,size;
327:   PetscInt       xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end;
328:   PetscInt       up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn;
329:   PetscInt       xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
330:   PetscInt       s_x,s_y; /* s proportionalized to w */
331:   PetscInt       *flx = 0,*fly = 0;
332:   PetscInt       sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0,refine_x = 2, refine_y = 2,tM = M,tN = N;
333:   DA             da;
334:   Vec            local,global;
335:   VecScatter     ltog,gtol;
336:   IS             to,from;

340:   *inra = 0;
341: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
342:   DMInitializePackage(PETSC_NULL);
343: #endif

345:   if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
346:   if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);

348:   PetscOptionsBegin(comm,PETSC_NULL,"2d DA Options","DA");
349:     if (M < 0){
350:       tM = -M;
351:       PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate2d",tM,&tM,PETSC_NULL);
352:     }
353:     if (N < 0){
354:       tN = -N;
355:       PetscOptionsInt("-da_grid_y","Number of grid points in y direction","DACreate2d",tN,&tN,PETSC_NULL);
356:     }
357:     PetscOptionsInt("-da_processors_x","Number of processors in x direction","DACreate2d",m,&m,PETSC_NULL);
358:     PetscOptionsInt("-da_processors_y","Number of processors in y direction","DACreate2d",n,&n,PETSC_NULL);
359:     PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DASetRefinementFactor",refine_x,&refine_x,PETSC_NULL);
360:     PetscOptionsInt("-da_refine_y","Refinement ratio in y direction","DASetRefinementFactor",refine_y,&refine_y,PETSC_NULL);
361:   PetscOptionsEnd();
362:   M = tM; N = tN;

364:   PetscHeaderCreate(da,_p_DA,struct _DAOps,DM_COOKIE,0,"DM",comm,DADestroy,DAView);
365:   PetscObjectChangeTypeName((PetscObject)da,"DA");
366:   da->ops->createglobalvector = DACreateGlobalVector;
367:   da->ops->createlocalvector  = DACreateLocalVector;
368:   da->ops->globaltolocalbegin = DAGlobalToLocalBegin;
369:   da->ops->globaltolocalend   = DAGlobalToLocalEnd;
370:   da->ops->localtoglobal      = DALocalToGlobal;
371:   da->ops->getinterpolation   = DAGetInterpolation;
372:   da->ops->getcoloring        = DAGetColoring;
373:   da->ops->getmatrix          = DAGetMatrix;
374:   da->ops->refine             = DARefine;
375:   da->ops->coarsen            = DACoarsen;
376:   da->ops->getinjection       = DAGetInjection;
377:   da->ops->getaggregates      = DAGetAggregates;
378:   da->ops->getelements        = DAGetElements_2d_P1;
379:   da->ops->destroy            = DADestroy;
380:   da->elementtype             = DA_ELEMENT_P1;

382:   da->dim        = 2;
383:   da->interptype = DA_Q1;
384:   da->refine_x   = refine_x;
385:   da->refine_y   = refine_y;
386:   PetscMalloc(dof*sizeof(char*),&da->fieldname);
387:   PetscMemzero(da->fieldname,dof*sizeof(char*));

389:   MPI_Comm_size(comm,&size);
390:   MPI_Comm_rank(comm,&rank);

392:   if (m != PETSC_DECIDE) {
393:     if (m < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);}
394:     else if (m > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);}
395:   }
396:   if (n != PETSC_DECIDE) {
397:     if (n < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);}
398:     else if (n > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);}
399:   }

401:   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
402:     if (n != PETSC_DECIDE) {
403:       m = size/n;
404:     } else if (m != PETSC_DECIDE) {
405:       n = size/m;
406:     } else {
407:       /* try for squarish distribution */
408:       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
409:       if (!m) m = 1;
410:       while (m > 0) {
411:         n = size/m;
412:         if (m*n == size) break;
413:         m--;
414:       }
415:       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
416:     }
417:     if (m*n != size) SETERRQ(PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
418:   } else if (m*n != size) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");

420:   if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
421:   if (N < n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);

423:   /* 
424:      Determine locally owned region 
425:      xs is the first local node number, x is the number of local nodes 
426:   */
427:   PetscMalloc(m*sizeof(PetscInt),&flx);
428:   if (lx) { /* user sets distribution */
429:     PetscMemcpy(flx,lx,m*sizeof(PetscInt));
430:   } else {
431:     for (i=0; i<m; i++) {
432:       flx[i] = M/m + ((M % m) > i);
433:     }
434:   }
435:   x  = flx[rank % m];
436:   xs = 0;
437:   for (i=0; i<(rank % m); i++) {
438:     xs += flx[i];
439:   }
440: #if defined(PETSC_USE_DEBUG)
441:   left = xs;
442:   for (i=(rank % m); i<m; i++) {
443:     left += flx[i];
444:   }
445:   if (left != M) {
446:     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
447:   }
448: #endif

450:   /* 
451:      Determine locally owned region 
452:      ys is the first local node number, y is the number of local nodes 
453:   */
454:   PetscMalloc(n*sizeof(PetscInt),&fly);
455:   if (ly) { /* user sets distribution */
456:     PetscMemcpy(fly,ly,n*sizeof(PetscInt));
457:   } else {
458:     for (i=0; i<n; i++) {
459:       fly[i] = N/n + ((N % n) > i);
460:     }
461:   }
462:   y  = fly[rank/m];
463:   ys = 0;
464:   for (i=0; i<(rank/m); i++) {
465:     ys += fly[i];
466:   }
467: #if defined(PETSC_USE_DEBUG)
468:   left = ys;
469:   for (i=(rank/m); i<n; i++) {
470:     left += fly[i];
471:   }
472:   if (left != N) {
473:     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
474:   }
475: #endif

477:   if (x < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
478:   if (y < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
479:   xe = xs + x;
480:   ye = ys + y;

482:   /* determine ghost region */
483:   /* Assume No Periodicity */
484:   if (xs-s > 0) Xs = xs - s; else Xs = 0;
485:   if (ys-s > 0) Ys = ys - s; else Ys = 0;
486:   if (xe+s <= M) Xe = xe + s; else Xe = M;
487:   if (ye+s <= N) Ye = ye + s; else Ye = N;

489:   /* X Periodic */
490:   if (DAXPeriodic(wrap)){
491:     Xs = xs - s;
492:     Xe = xe + s;
493:   }

495:   /* Y Periodic */
496:   if (DAYPeriodic(wrap)){
497:     Ys = ys - s;
498:     Ye = ye + s;
499:   }

501:   /* Resize all X parameters to reflect w */
502:   x   *= dof;
503:   xs  *= dof;
504:   xe  *= dof;
505:   Xs  *= dof;
506:   Xe  *= dof;
507:   s_x = s*dof;
508:   s_y = s;

510:   /* determine starting point of each processor */
511:   nn    = x*y;
512:   PetscMalloc((2*size+1)*sizeof(PetscInt),&bases);
513:   ldims = bases+size+1;
514:   MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);
515:   bases[0] = 0;
516:   for (i=1; i<=size; i++) {
517:     bases[i] = ldims[i-1];
518:   }
519:   for (i=1; i<=size; i++) {
520:     bases[i] += bases[i-1];
521:   }

523:   /* allocate the base parallel and sequential vectors */
524:   da->Nlocal = x*y;
525:   VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
526:   VecSetBlockSize(global,dof);
527:   da->nlocal = (Xe-Xs)*(Ye-Ys);
528:   VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
529:   VecSetBlockSize(local,dof);


532:   /* generate appropriate vector scatters */
533:   /* local to global inserts non-ghost point region into global */
534:   VecGetOwnershipRange(global,&start,&end);
535:   ISCreateStride(comm,x*y,start,1,&to);

537:   left  = xs - Xs; down  = ys - Ys; up    = down + y;
538:   PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);
539:   count = 0;
540:   for (i=down; i<up; i++) {
541:     for (j=0; j<x/dof; j++) {
542:       idx[count++] = left + i*(Xe-Xs) + j*dof;
543:     }
544:   }
545:   ISCreateBlock(comm,dof,count,idx,&from);
546:   PetscFree(idx);

548:   VecScatterCreate(local,from,global,to,&ltog);
549:   PetscLogObjectParent(da,to);
550:   PetscLogObjectParent(da,from);
551:   PetscLogObjectParent(da,ltog);
552:   ISDestroy(from);
553:   ISDestroy(to);

555:   /* global to local must include ghost points */
556:   if (stencil_type == DA_STENCIL_BOX) {
557:     ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);
558:   } else {
559:     /* must drop into cross shape region */
560:     /*       ---------|
561:             |  top    |
562:          |---         ---|
563:          |   middle      |
564:          |               |
565:          ----         ----
566:             | bottom  |
567:             -----------
568:         Xs xs        xe  Xe */
569:     /* bottom */
570:     left  = xs - Xs; down = ys - Ys; up    = down + y;
571:     count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs);
572:     PetscMalloc(count*sizeof(PetscInt)/dof,&idx);
573:     count = 0;
574:     for (i=0; i<down; i++) {
575:       for (j=0; j<xe-xs; j += dof) {
576:         idx[count++] = left + i*(Xe-Xs) + j;
577:       }
578:     }
579:     /* middle */
580:     for (i=down; i<up; i++) {
581:       for (j=0; j<Xe-Xs; j += dof) {
582:         idx[count++] = i*(Xe-Xs) + j;
583:       }
584:     }
585:     /* top */
586:     for (i=up; i<Ye-Ys; i++) {
587:       for (j=0; j<xe-xs; j += dof) {
588:         idx[count++] = left + i*(Xe-Xs) + j;
589:       }
590:     }
591:     ISCreateBlock(comm,dof,count,idx,&to);
592:     PetscFree(idx);
593:   }


596:   /* determine who lies on each side of us stored in    n6 n7 n8
597:                                                         n3    n5
598:                                                         n0 n1 n2
599:   */

601:   /* Assume the Non-Periodic Case */
602:   n1 = rank - m;
603:   if (rank % m) {
604:     n0 = n1 - 1;
605:   } else {
606:     n0 = -1;
607:   }
608:   if ((rank+1) % m) {
609:     n2 = n1 + 1;
610:     n5 = rank + 1;
611:     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
612:   } else {
613:     n2 = -1; n5 = -1; n8 = -1;
614:   }
615:   if (rank % m) {
616:     n3 = rank - 1;
617:     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
618:   } else {
619:     n3 = -1; n6 = -1;
620:   }
621:   n7 = rank + m; if (n7 >= m*n) n7 = -1;


624:   /* Modify for Periodic Cases */
625:   if (wrap == DA_YPERIODIC) {  /* Handle Top and Bottom Sides */
626:     if (n1 < 0) n1 = rank + m * (n-1);
627:     if (n7 < 0) n7 = rank - m * (n-1);
628:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
629:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
630:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
631:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
632:   } else if (wrap == DA_XPERIODIC) { /* Handle Left and Right Sides */
633:     if (n3 < 0) n3 = rank + (m-1);
634:     if (n5 < 0) n5 = rank - (m-1);
635:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
636:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
637:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
638:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
639:   } else if (wrap == DA_XYPERIODIC) {

641:     /* Handle all four corners */
642:     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
643:     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
644:     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
645:     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;

647:     /* Handle Top and Bottom Sides */
648:     if (n1 < 0) n1 = rank + m * (n-1);
649:     if (n7 < 0) n7 = rank - m * (n-1);
650:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
651:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
652:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
653:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;

655:     /* Handle Left and Right Sides */
656:     if (n3 < 0) n3 = rank + (m-1);
657:     if (n5 < 0) n5 = rank - (m-1);
658:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
659:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
660:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
661:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
662:   }
663:   PetscMalloc(9*sizeof(PetscInt),&da->neighbors);
664:   da->neighbors[0] = n0;
665:   da->neighbors[1] = n1;
666:   da->neighbors[2] = n2;
667:   da->neighbors[3] = n3;
668:   da->neighbors[4] = rank;
669:   da->neighbors[5] = n5;
670:   da->neighbors[6] = n6;
671:   da->neighbors[7] = n7;
672:   da->neighbors[8] = n8;

674:   if (stencil_type == DA_STENCIL_STAR) {
675:     /* save corner processor numbers */
676:     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
677:     n0 = n2 = n6 = n8 = -1;
678:   }

680:   PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);
681:   PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));
682:   nn = 0;

684:   xbase = bases[rank];
685:   for (i=1; i<=s_y; i++) {
686:     if (n0 >= 0) { /* left below */
687:       x_t = flx[n0 % m]*dof;
688:       y_t = fly[(n0/m)];
689:       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
690:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
691:     }
692:     if (n1 >= 0) { /* directly below */
693:       x_t = x;
694:       y_t = fly[(n1/m)];
695:       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
696:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
697:     }
698:     if (n2 >= 0) { /* right below */
699:       x_t = flx[n2 % m]*dof;
700:       y_t = fly[(n2/m)];
701:       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
702:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
703:     }
704:   }

706:   for (i=0; i<y; i++) {
707:     if (n3 >= 0) { /* directly left */
708:       x_t = flx[n3 % m]*dof;
709:       /* y_t = y; */
710:       s_t = bases[n3] + (i+1)*x_t - s_x;
711:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
712:     }

714:     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

716:     if (n5 >= 0) { /* directly right */
717:       x_t = flx[n5 % m]*dof;
718:       /* y_t = y; */
719:       s_t = bases[n5] + (i)*x_t;
720:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
721:     }
722:   }

724:   for (i=1; i<=s_y; i++) {
725:     if (n6 >= 0) { /* left above */
726:       x_t = flx[n6 % m]*dof;
727:       /* y_t = fly[(n6/m)]; */
728:       s_t = bases[n6] + (i)*x_t - s_x;
729:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
730:     }
731:     if (n7 >= 0) { /* directly above */
732:       x_t = x;
733:       /* y_t = fly[(n7/m)]; */
734:       s_t = bases[n7] + (i-1)*x_t;
735:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
736:     }
737:     if (n8 >= 0) { /* right above */
738:       x_t = flx[n8 % m]*dof;
739:       /* y_t = fly[(n8/m)]; */
740:       s_t = bases[n8] + (i-1)*x_t;
741:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
742:     }
743:   }

745:   base = bases[rank];
746:   {
747:     PetscInt nnn = nn/dof,*iidx;
748:     PetscMalloc(nnn*sizeof(PetscInt),&iidx);
749:     for (i=0; i<nnn; i++) {
750:       iidx[i] = idx[dof*i];
751:     }
752:     ISCreateBlock(comm,dof,nnn,iidx,&from);
753:     PetscFree(iidx);
754:   }
755:   VecScatterCreate(global,from,local,to,&gtol);
756:   PetscLogObjectParent(da,to);
757:   PetscLogObjectParent(da,from);
758:   PetscLogObjectParent(da,gtol);
759:   ISDestroy(to);
760:   ISDestroy(from);

762:   if (stencil_type == DA_STENCIL_STAR) {
763:     /*
764:         Recompute the local to global mappings, this time keeping the 
765:       information about the cross corner processor numbers.
766:     */
767:     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
768:     nn = 0;
769:     xbase = bases[rank];
770:     for (i=1; i<=s_y; i++) {
771:       if (n0 >= 0) { /* left below */
772:         x_t = flx[n0 % m]*dof;
773:         y_t = fly[(n0/m)];
774:         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
775:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
776:       }
777:       if (n1 >= 0) { /* directly below */
778:         x_t = x;
779:         y_t = fly[(n1/m)];
780:         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
781:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
782:       }
783:       if (n2 >= 0) { /* right below */
784:         x_t = flx[n2 % m]*dof;
785:         y_t = fly[(n2/m)];
786:         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
787:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
788:       }
789:     }

791:     for (i=0; i<y; i++) {
792:       if (n3 >= 0) { /* directly left */
793:         x_t = flx[n3 % m]*dof;
794:         /* y_t = y; */
795:         s_t = bases[n3] + (i+1)*x_t - s_x;
796:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
797:       }

799:       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

801:       if (n5 >= 0) { /* directly right */
802:         x_t = flx[n5 % m]*dof;
803:         /* y_t = y; */
804:         s_t = bases[n5] + (i)*x_t;
805:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
806:       }
807:     }

809:     for (i=1; i<=s_y; i++) {
810:       if (n6 >= 0) { /* left above */
811:         x_t = flx[n6 % m]*dof;
812:         /* y_t = fly[(n6/m)]; */
813:         s_t = bases[n6] + (i)*x_t - s_x;
814:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
815:       }
816:       if (n7 >= 0) { /* directly above */
817:         x_t = x;
818:         /* y_t = fly[(n7/m)]; */
819:         s_t = bases[n7] + (i-1)*x_t;
820:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
821:       }
822:       if (n8 >= 0) { /* right above */
823:         x_t = flx[n8 % m]*dof;
824:         /* y_t = fly[(n8/m)]; */
825:         s_t = bases[n8] + (i-1)*x_t;
826:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
827:       }
828:     }
829:   }
830:   PetscFree(bases);

832:   da->M  = M;  da->N  = N;  da->m  = m;  da->n  = n;  da->w = dof;  da->s = s;
833:   da->xs = xs; da->xe = xe; da->ys = ys; da->ye = ye; da->zs = 0; da->ze = 1;
834:   da->Xs = Xs; da->Xe = Xe; da->Ys = Ys; da->Ye = Ye; da->Zs = 0; da->Ze = 1;
835:   da->P  = 1;  da->p  = 1;

837:   VecDestroy(local);
838:   VecDestroy(global);

840:   da->gtol         = gtol;
841:   da->ltog         = ltog;
842:   da->idx          = idx;
843:   da->Nl           = nn;
844:   da->base         = base;
845:   da->wrap         = wrap;
846:   da->ops->view    = DAView_2d;
847:   da->stencil_type = stencil_type;

849:   /* 
850:      Set the local to global ordering in the global vector, this allows use
851:      of VecSetValuesLocal().
852:   */
853:   ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
854:   ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
855:   PetscLogObjectParent(da,da->ltogmap);

857:   *inra = da;

859:   da->ltol = PETSC_NULL;
860:   da->ao   = PETSC_NULL;

862:   da->lx = flx;
863:   da->ly = fly;
864:   DAView_Private(da);
865:   PetscPublishAll(da);
866:   return(0);
867: }

871: /*@
872:    DARefine - Creates a new distributed array that is a refinement of a given
873:    distributed array.

875:    Collective on DA

877:    Input Parameter:
878: +  da - initial distributed array
879: -  comm - communicator to contain refined DA, must be either same as the da communicator or include the 
880:           da communicator and be 2, 4, or 8 times larger. Currently ignored

882:    Output Parameter:
883: .  daref - refined distributed array

885:    Level: advanced

887:    Note:
888:    Currently, refinement consists of just doubling the number of grid spaces
889:    in each dimension of the DA.

891: .keywords:  distributed array, refine

893: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetOwnershipRanges()
894: @*/
895: PetscErrorCode  DARefine(DA da,MPI_Comm comm,DA *daref)
896: {
898:   PetscInt       M,N,P;
899:   DA             da2;


905:   if (DAXPeriodic(da->wrap) || da->interptype == DA_Q0){
906:     M = da->refine_x*da->M;
907:   } else {
908:     M = 1 + da->refine_x*(da->M - 1);
909:   }
910:   if (DAYPeriodic(da->wrap) || da->interptype == DA_Q0){
911:     N = da->refine_y*da->N;
912:   } else {
913:     N = 1 + da->refine_y*(da->N - 1);
914:   }
915:   if (DAZPeriodic(da->wrap) || da->interptype == DA_Q0){
916:     P = da->refine_z*da->P;
917:   } else {
918:     P = 1 + da->refine_z*(da->P - 1);
919:   }
920:   DACreate(((PetscObject)da)->comm,da->dim,da->wrap,da->stencil_type,M,N,P,da->m,da->n,da->p,da->w,da->s,0,0,0,&da2);

922:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
923:   da2->ops->getmatrix        = da->ops->getmatrix;
924:   da2->ops->getinterpolation = da->ops->getinterpolation;
925:   da2->ops->getcoloring      = da->ops->getcoloring;
926:   da2->interptype            = da->interptype;
927: 
928:   /* copy fill information if given */
929:   if (da->dfill) {
930:     PetscMalloc((da->dfill[da->w]+da->w+1)*sizeof(PetscInt),&da2->dfill);
931:     PetscMemcpy(da2->dfill,da->dfill,(da->dfill[da->w]+da->w+1)*sizeof(PetscInt));
932:   }
933:   if (da->ofill) {
934:     PetscMalloc((da->ofill[da->w]+da->w+1)*sizeof(PetscInt),&da2->ofill);
935:     PetscMemcpy(da2->ofill,da->ofill,(da->ofill[da->w]+da->w+1)*sizeof(PetscInt));
936:   }
937:   /* copy the refine information */
938:   da2->refine_x = da->refine_x;
939:   da2->refine_y = da->refine_y;
940:   da2->refine_z = da->refine_z;
941:   *daref = da2;
942:   return(0);
943: }

947: /*@
948:    DACoarsen - Creates a new distributed array that is a coarsenment of a given
949:    distributed array.

951:    Collective on DA

953:    Input Parameter:
954: +  da - initial distributed array
955: -  comm - communicator to contain coarsend DA. Currently ignored

957:    Output Parameter:
958: .  daref - coarsend distributed array

960:    Level: advanced

962:    Note:
963:    Currently, coarsenment consists of just dividing the number of grid spaces
964:    in each dimension of the DA by refinex_x, refinex_y, ....

966: .keywords:  distributed array, coarsen

968: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetOwnershipRanges()
969: @*/
970: PetscErrorCode  DACoarsen(DA da, MPI_Comm comm,DA *daref)
971: {
973:   PetscInt       M,N,P;
974:   DA             da2;


980:   if (DAXPeriodic(da->wrap) || da->interptype == DA_Q0){
981:     if(da->refine_x)
982:       M = da->M / da->refine_x;
983:     else
984:       M = da->M;
985:   } else {
986:     if(da->refine_x)
987:       M = 1 + (da->M - 1) / da->refine_x;
988:     else
989:       M = da->M;
990:   }
991:   if (DAYPeriodic(da->wrap) || da->interptype == DA_Q0){
992:     if(da->refine_y)
993:       N = da->N / da->refine_y;
994:     else
995:       N = da->N;
996:   } else {
997:     if(da->refine_y)
998:       N = 1 + (da->N - 1) / da->refine_y;
999:     else
1000:       N = da->M;
1001:   }
1002:   if (DAZPeriodic(da->wrap) || da->interptype == DA_Q0){
1003:     if(da->refine_z)
1004:       P = da->P / da->refine_z;
1005:     else
1006:       P = da->P;
1007:   } else {
1008:     if(da->refine_z)
1009:       P = 1 + (da->P - 1) / da->refine_z;
1010:     else
1011:       P = da->P;
1012:   }
1013:   DACreate(((PetscObject)da)->comm,da->dim,da->wrap,da->stencil_type,M,N,P,da->m,da->n,da->p,da->w,da->s,0,0,0,&da2);

1015:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1016:   da2->ops->getmatrix        = da->ops->getmatrix;
1017:   da2->ops->getinterpolation = da->ops->getinterpolation;
1018:   da2->ops->getcoloring      = da->ops->getcoloring;
1019:   da2->interptype            = da->interptype;
1020: 
1021:   /* copy fill information if given */
1022:   if (da->dfill) {
1023:     PetscMalloc((da->dfill[da->w]+da->w+1)*sizeof(PetscInt),&da2->dfill);
1024:     PetscMemcpy(da2->dfill,da->dfill,(da->dfill[da->w]+da->w+1)*sizeof(PetscInt));
1025:   }
1026:   if (da->ofill) {
1027:     PetscMalloc((da->ofill[da->w]+da->w+1)*sizeof(PetscInt),&da2->ofill);
1028:     PetscMemcpy(da2->ofill,da->ofill,(da->ofill[da->w]+da->w+1)*sizeof(PetscInt));
1029:   }
1030:   /* copy the refine information */
1031:   da2->refine_x = da->refine_x;
1032:   da2->refine_y = da->refine_y;
1033:   da2->refine_z = da->refine_z;
1034:   *daref = da2;
1035:   return(0);
1036: }

1038: /*@
1039:      DASetRefinementFactor - Set the ratios that the DA grid is refined

1041:     Collective on DA

1043:   Input Parameters:
1044: +    da - the DA object
1045: .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
1046: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
1047: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

1049:   Options Database:
1050: +  -da_refine_x - refinement ratio in x direction
1051: .  -da_refine_y - refinement ratio in y direction
1052: -  -da_refine_y - refinement ratio in z direction

1054:   Level: intermediate

1056:     Notes: Pass PETSC_IGNORE to leave a value unchanged

1058: .seealso: DARefine(), DAGetRefinementFactor()
1059: @*/
1060: PetscErrorCode  DASetRefinementFactor(DA da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
1061: {
1063:   if (refine_x > 0) da->refine_x = refine_x;
1064:   if (refine_y > 0) da->refine_y = refine_y;
1065:   if (refine_z > 0) da->refine_z = refine_z;
1066:   return(0);
1067: }

1069: /*@C
1070:      DAGetRefinementFactor - Gets the ratios that the DA grid is refined

1072:     Not Collective

1074:   Input Parameter:
1075: .    da - the DA object

1077:   Output Parameters:
1078: +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
1079: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
1080: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

1082:   Level: intermediate

1084:     Notes: Pass PETSC_NULL for values you do not need

1086: .seealso: DARefine(), DASetRefinementFactor()
1087: @*/
1088: PetscErrorCode  DAGetRefinementFactor(DA da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
1089: {
1091:   if (refine_x) *refine_x = da->refine_x;
1092:   if (refine_y) *refine_y = da->refine_y;
1093:   if (refine_z) *refine_z = da->refine_z;
1094:   return(0);
1095: }

1097: /*@C
1098:      DASetGetMatrix - Sets the routine used by the DA to allocate a matrix.

1100:     Collective on DA

1102:   Input Parameters:
1103: +    da - the DA object
1104: -    f - the function that allocates the matrix for that specific DA

1106:   Level: developer

1108:    Notes: See DASetBlockFills() that provides a simple way to provide the nonzero structure for 
1109:        the diagonal and off-diagonal blocks of the matrix

1111: .seealso: DAGetMatrix(), DASetBlockFills()
1112: @*/
1113: PetscErrorCode  DASetGetMatrix(DA da,PetscErrorCode (*f)(DA, const MatType,Mat*))
1114: {
1116:   da->ops->getmatrix = f;
1117:   return(0);
1118: }

1120: /*
1121:       M is number of grid points 
1122:       m is number of processors

1124: */
1127: PetscErrorCode  DASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
1128: {
1130:   PetscInt       m,n = 0,x = 0,y = 0;
1131:   PetscMPIInt    size,csize,rank;

1134:   MPI_Comm_size(comm,&size);
1135:   MPI_Comm_rank(comm,&rank);

1137:   csize = 4*size;
1138:   do {
1139:     if (csize % 4) SETERRQ4(PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
1140:     csize   = csize/4;
1141: 
1142:     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
1143:     if (!m) m = 1;
1144:     while (m > 0) {
1145:       n = csize/m;
1146:       if (m*n == csize) break;
1147:       m--;
1148:     }
1149:     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}

1151:     x = M/m + ((M % m) > ((csize-1) % m));
1152:     y = (N + (csize-1)/m)/n;
1153:   } while ((x < 4 || y < 4) && csize > 1);
1154:   if (size != csize) {
1155:     MPI_Group    entire_group,sub_group;
1156:     PetscMPIInt  i,*groupies;

1158:     MPI_Comm_group(comm,&entire_group);
1159:     PetscMalloc(csize*sizeof(PetscInt),&groupies);
1160:     for (i=0; i<csize; i++) {
1161:       groupies[i] = (rank/csize)*csize + i;
1162:     }
1163:     MPI_Group_incl(entire_group,csize,groupies,&sub_group);
1164:     PetscFree(groupies);
1165:     MPI_Comm_create(comm,sub_group,outcomm);
1166:     MPI_Group_free(&entire_group);
1167:     MPI_Group_free(&sub_group);
1168:     PetscInfo1(0,"DASplitComm2d:Creating redundant coarse problems of size %d\n",csize);
1169:   } else {
1170:     *outcomm = comm;
1171:   }
1172:   return(0);
1173: }

1177: /*@C
1178:        DASetLocalFunction - Caches in a DA a local function. 

1180:    Collective on DA

1182:    Input Parameter:
1183: +  da - initial distributed array
1184: -  lf - the local function

1186:    Level: intermediate

1188:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

1190: .keywords:  distributed array, refine

1192: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunctioni()
1193: @*/
1194: PetscErrorCode  DASetLocalFunction(DA da,DALocalFunction1 lf)
1195: {
1198:   da->lf    = lf;
1199:   return(0);
1200: }

1204: /*@C
1205:        DASetLocalFunctioni - Caches in a DA a local function that evaluates a single component

1207:    Collective on DA

1209:    Input Parameter:
1210: +  da - initial distributed array
1211: -  lfi - the local function

1213:    Level: intermediate

1215: .keywords:  distributed array, refine

1217: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1218: @*/
1219: PetscErrorCode  DASetLocalFunctioni(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1220: {
1223:   da->lfi = lfi;
1224:   return(0);
1225: }

1229: /*@C
1230:        DASetLocalFunctionib - Caches in a DA a block local function that evaluates a single component

1232:    Collective on DA

1234:    Input Parameter:
1235: +  da - initial distributed array
1236: -  lfi - the local function

1238:    Level: intermediate

1240: .keywords:  distributed array, refine

1242: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1243: @*/
1244: PetscErrorCode  DASetLocalFunctionib(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1245: {
1248:   da->lfib = lfi;
1249:   return(0);
1250: }

1254: PetscErrorCode DASetLocalAdicFunction_Private(DA da,DALocalFunction1 ad_lf)
1255: {
1258:   da->adic_lf = ad_lf;
1259:   return(0);
1260: }

1262: /*MC
1263:        DASetLocalAdicFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR

1265:    Collective on DA

1267:    Synopsis:
1268:    PetscErrorCode DASetLocalAdicFunctioni(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1269:    
1270:    Input Parameter:
1271: +  da - initial distributed array
1272: -  ad_lfi - the local function as computed by ADIC/ADIFOR

1274:    Level: intermediate

1276: .keywords:  distributed array, refine

1278: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1279:           DASetLocalJacobian(), DASetLocalFunctioni()
1280: M*/

1284: PetscErrorCode DASetLocalAdicFunctioni_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1285: {
1288:   da->adic_lfi = ad_lfi;
1289:   return(0);
1290: }

1292: /*MC
1293:        DASetLocalAdicMFFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR

1295:    Collective on DA

1297:    Synopsis:
1298:    PetscErrorCode  DASetLocalAdicFunctioni(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1299:    
1300:    Input Parameter:
1301: +  da - initial distributed array
1302: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

1304:    Level: intermediate

1306: .keywords:  distributed array, refine

1308: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1309:           DASetLocalJacobian(), DASetLocalFunctioni()
1310: M*/

1314: PetscErrorCode DASetLocalAdicMFFunctioni_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1315: {
1318:   da->adicmf_lfi = admf_lfi;
1319:   return(0);
1320: }

1322: /*MC
1323:        DASetLocalAdicFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR

1325:    Collective on DA

1327:    Synopsis:
1328:    PetscErrorCode DASetLocalAdicFunctionib(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1329:    
1330:    Input Parameter:
1331: +  da - initial distributed array
1332: -  ad_lfi - the local function as computed by ADIC/ADIFOR

1334:    Level: intermediate

1336: .keywords:  distributed array, refine

1338: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1339:           DASetLocalJacobian(), DASetLocalFunctionib()
1340: M*/

1344: PetscErrorCode DASetLocalAdicFunctionib_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1345: {
1348:   da->adic_lfib = ad_lfi;
1349:   return(0);
1350: }

1352: /*MC
1353:        DASetLocalAdicMFFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR

1355:    Collective on DA

1357:    Synopsis:
1358:    PetscErrorCode  DASetLocalAdicFunctionib(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1359:    
1360:    Input Parameter:
1361: +  da - initial distributed array
1362: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

1364:    Level: intermediate

1366: .keywords:  distributed array, refine

1368: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1369:           DASetLocalJacobian(), DASetLocalFunctionib()
1370: M*/

1374: PetscErrorCode DASetLocalAdicMFFunctionib_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1375: {
1378:   da->adicmf_lfib = admf_lfi;
1379:   return(0);
1380: }

1382: /*MC
1383:        DASetLocalAdicMFFunction - Caches in a DA a local function computed by ADIC/ADIFOR

1385:    Collective on DA

1387:    Synopsis:
1388:    PetscErrorCode DASetLocalAdicMFFunction(DA da,DALocalFunction1 ad_lf)
1389:    
1390:    Input Parameter:
1391: +  da - initial distributed array
1392: -  ad_lf - the local function as computed by ADIC/ADIFOR

1394:    Level: intermediate

1396: .keywords:  distributed array, refine

1398: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1399:           DASetLocalJacobian()
1400: M*/

1404: PetscErrorCode DASetLocalAdicMFFunction_Private(DA da,DALocalFunction1 ad_lf)
1405: {
1408:   da->adicmf_lf = ad_lf;
1409:   return(0);
1410: }

1412: /*@C
1413:        DASetLocalJacobian - Caches in a DA a local Jacobian

1415:    Collective on DA

1417:    
1418:    Input Parameter:
1419: +  da - initial distributed array
1420: -  lj - the local Jacobian

1422:    Level: intermediate

1424:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

1426: .keywords:  distributed array, refine

1428: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1429: @*/
1432: PetscErrorCode  DASetLocalJacobian(DA da,DALocalFunction1 lj)
1433: {
1436:   da->lj    = lj;
1437:   return(0);
1438: }

1442: /*@C
1443:        DAGetLocalFunction - Gets from a DA a local function and its ADIC/ADIFOR Jacobian

1445:    Collective on DA

1447:    Input Parameter:
1448: .  da - initial distributed array

1450:    Output Parameter:
1451: .  lf - the local function

1453:    Level: intermediate

1455: .keywords:  distributed array, refine

1457: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalJacobian(), DASetLocalFunction()
1458: @*/
1459: PetscErrorCode  DAGetLocalFunction(DA da,DALocalFunction1 *lf)
1460: {
1463:   if (lf)       *lf = da->lf;
1464:   return(0);
1465: }

1469: /*@C
1470:        DAGetLocalJacobian - Gets from a DA a local jacobian

1472:    Collective on DA

1474:    Input Parameter:
1475: .  da - initial distributed array

1477:    Output Parameter:
1478: .  lj - the local jacobian

1480:    Level: intermediate

1482: .keywords:  distributed array, refine

1484: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalJacobian()
1485: @*/
1486: PetscErrorCode  DAGetLocalJacobian(DA da,DALocalFunction1 *lj)
1487: {
1490:   if (lj) *lj = da->lj;
1491:   return(0);
1492: }

1496: /*@
1497:     DAFormFunction - Evaluates a user provided function on each processor that 
1498:         share a DA

1500:    Input Parameters:
1501: +    da - the DA that defines the grid
1502: .    vu - input vector
1503: .    vfu - output vector 
1504: -    w - any user data

1506:     Notes: Does NOT do ghost updates on vu upon entry

1508:            This should eventually replace DAFormFunction1

1510:     Level: advanced

1512: .seealso: DAComputeJacobian1WithAdic()

1514: @*/
1515: PetscErrorCode  DAFormFunction(DA da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
1516: {
1518:   void           *u,*fu;
1519:   DALocalInfo    info;
1520:   PetscErrorCode (*f)(DALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DALocalInfo*,void*,void*,void*))lf;
1521: 
1523:   DAGetLocalInfo(da,&info);
1524:   DAVecGetArray(da,vu,&u);
1525:   DAVecGetArray(da,vfu,&fu);

1527:   (*f)(&info,u,fu,w);
1528:   if (PetscExceptionValue(ierr)) {
1529:     PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1530:     pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1531:   }
1532: 

1534:   DAVecRestoreArray(da,vu,&u);
1535:   DAVecRestoreArray(da,vfu,&fu);
1536:   return(0);
1537: }

1541: /*@C 
1542:    DAFormFunctionLocal - This is a universal function evaluation routine for
1543:    a local DA function.

1545:    Collective on DA

1547:    Input Parameters:
1548: +  da - the DA context
1549: .  func - The local function
1550: .  X - input vector
1551: .  F - function vector
1552: -  ctx - A user context

1554:    Level: intermediate

1556: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1557:           SNESSetFunction(), SNESSetJacobian()

1559: @*/
1560: PetscErrorCode  DAFormFunctionLocal(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1561: {
1562:   Vec            localX;
1563:   DALocalInfo    info;
1564:   void          *u;
1565:   void          *fu;

1569:   DAGetLocalVector(da,&localX);
1570:   /*
1571:      Scatter ghost points to local vector, using the 2-step process
1572:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1573:   */
1574:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1575:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1576:   DAGetLocalInfo(da,&info);
1577:   DAVecGetArray(da,localX,&u);
1578:   DAVecGetArray(da,F,&fu);
1579:   (*func)(&info,u,fu,ctx);
1580:   if (PetscExceptionValue(ierr)) {
1581:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1582:     pDAVecRestoreArray(da,F,&fu);CHKERRQ(pierr);
1583:   }
1584: 
1585:   DAVecRestoreArray(da,localX,&u);
1586:   DAVecRestoreArray(da,F,&fu);
1587:   if (PetscExceptionValue(ierr)) {
1588:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1589:   }
1590: 
1591:   DARestoreLocalVector(da,&localX);
1592:   return(0);
1593: }

1597: /*@C 
1598:    DAFormFunctionLocalGhost - This is a universal function evaluation routine for
1599:    a local DA function, but the ghost values of the output are communicated and added.

1601:    Collective on DA

1603:    Input Parameters:
1604: +  da - the DA context
1605: .  func - The local function
1606: .  X - input vector
1607: .  F - function vector
1608: -  ctx - A user context

1610:    Level: intermediate

1612: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1613:           SNESSetFunction(), SNESSetJacobian()

1615: @*/
1616: PetscErrorCode  DAFormFunctionLocalGhost(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1617: {
1618:   Vec            localX, localF;
1619:   DALocalInfo    info;
1620:   void          *u;
1621:   void          *fu;

1625:   DAGetLocalVector(da,&localX);
1626:   DAGetLocalVector(da,&localF);
1627:   /*
1628:      Scatter ghost points to local vector, using the 2-step process
1629:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1630:   */
1631:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1632:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1633:   VecSet(F, 0.0);
1634:   VecSet(localF, 0.0);
1635:   DAGetLocalInfo(da,&info);
1636:   DAVecGetArray(da,localX,&u);
1637:   DAVecGetArray(da,localF,&fu);
1638:   (*func)(&info,u,fu,ctx);
1639:   if (PetscExceptionValue(ierr)) {
1640:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1641:     pDAVecRestoreArray(da,localF,&fu);CHKERRQ(pierr);
1642:   }
1643: 
1644:   DALocalToGlobalBegin(da,localF,F);
1645:   DALocalToGlobalEnd(da,localF,F);
1646:   DAVecRestoreArray(da,localX,&u);
1647:   DAVecRestoreArray(da,localF,&fu);
1648:   if (PetscExceptionValue(ierr)) {
1649:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1650:   DARestoreLocalVector(da,&localF);
1651:   }
1652: 
1653:   DARestoreLocalVector(da,&localX);
1654:   DARestoreLocalVector(da,&localF);
1655:   return(0);
1656: }

1660: /*@
1661:     DAFormFunction1 - Evaluates a user provided function on each processor that 
1662:         share a DA

1664:    Input Parameters:
1665: +    da - the DA that defines the grid
1666: .    vu - input vector
1667: .    vfu - output vector 
1668: -    w - any user data

1670:     Notes: Does NOT do ghost updates on vu upon entry

1672:     Level: advanced

1674: .seealso: DAComputeJacobian1WithAdic()

1676: @*/
1677: PetscErrorCode  DAFormFunction1(DA da,Vec vu,Vec vfu,void *w)
1678: {
1680:   void           *u,*fu;
1681:   DALocalInfo    info;
1682: 

1685:   DAGetLocalInfo(da,&info);
1686:   DAVecGetArray(da,vu,&u);
1687:   DAVecGetArray(da,vfu,&fu);

1689:   CHKMEMQ;
1690:   (*da->lf)(&info,u,fu,w);
1691:   if (PetscExceptionValue(ierr)) {
1692:     PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1693:     pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1694:   }
1695: 
1696:   CHKMEMQ;

1698:   DAVecRestoreArray(da,vu,&u);
1699:   DAVecRestoreArray(da,vfu,&fu);
1700:   return(0);
1701: }

1705: PetscErrorCode  DAFormFunctioniTest1(DA da,void *w)
1706: {
1707:   Vec            vu,fu,fui;
1709:   PetscInt       i,n;
1710:   PetscScalar    *ui;
1711:   PetscRandom    rnd;
1712:   PetscReal      norm;

1715:   DAGetLocalVector(da,&vu);
1716:   PetscRandomCreate(PETSC_COMM_SELF,&rnd);
1717:   PetscRandomSetFromOptions(rnd);
1718:   VecSetRandom(vu,rnd);
1719:   PetscRandomDestroy(rnd);

1721:   DAGetGlobalVector(da,&fu);
1722:   DAGetGlobalVector(da,&fui);
1723: 
1724:   DAFormFunction1(da,vu,fu,w);

1726:   VecGetArray(fui,&ui);
1727:   VecGetLocalSize(fui,&n);
1728:   for (i=0; i<n; i++) {
1729:     DAFormFunctioni1(da,i,vu,ui+i,w);
1730:   }
1731:   VecRestoreArray(fui,&ui);

1733:   VecAXPY(fui,-1.0,fu);
1734:   VecNorm(fui,NORM_2,&norm);
1735:   PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);
1736:   VecView(fu,0);
1737:   VecView(fui,0);

1739:   DARestoreLocalVector(da,&vu);
1740:   DARestoreGlobalVector(da,&fu);
1741:   DARestoreGlobalVector(da,&fui);
1742:   return(0);
1743: }

1747: /*@
1748:     DAFormFunctioni1 - Evaluates a user provided point-wise function

1750:    Input Parameters:
1751: +    da - the DA that defines the grid
1752: .    i - the component of the function we wish to compute (must be local)
1753: .    vu - input vector
1754: .    vfu - output value
1755: -    w - any user data

1757:     Notes: Does NOT do ghost updates on vu upon entry

1759:     Level: advanced

1761: .seealso: DAComputeJacobian1WithAdic()

1763: @*/
1764: PetscErrorCode  DAFormFunctioni1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1765: {
1767:   void           *u;
1768:   DALocalInfo    info;
1769:   MatStencil     stencil;
1770: 

1773:   DAGetLocalInfo(da,&info);
1774:   DAVecGetArray(da,vu,&u);

1776:   /* figure out stencil value from i */
1777:   stencil.c = i % info.dof;
1778:   stencil.i = (i % (info.xm*info.dof))/info.dof;
1779:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1780:   stencil.k = i/(info.xm*info.ym*info.dof);

1782:   (*da->lfi)(&info,&stencil,u,vfu,w);

1784:   DAVecRestoreArray(da,vu,&u);
1785:   return(0);
1786: }

1790: /*@
1791:     DAFormFunctionib1 - Evaluates a user provided point-block function

1793:    Input Parameters:
1794: +    da - the DA that defines the grid
1795: .    i - the component of the function we wish to compute (must be local)
1796: .    vu - input vector
1797: .    vfu - output value
1798: -    w - any user data

1800:     Notes: Does NOT do ghost updates on vu upon entry

1802:     Level: advanced

1804: .seealso: DAComputeJacobian1WithAdic()

1806: @*/
1807: PetscErrorCode  DAFormFunctionib1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1808: {
1810:   void           *u;
1811:   DALocalInfo    info;
1812:   MatStencil     stencil;
1813: 
1815:   DAGetLocalInfo(da,&info);
1816:   DAVecGetArray(da,vu,&u);

1818:   /* figure out stencil value from i */
1819:   stencil.c = i % info.dof;
1820:   if (stencil.c) SETERRQ(PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
1821:   stencil.i = (i % (info.xm*info.dof))/info.dof;
1822:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1823:   stencil.k = i/(info.xm*info.ym*info.dof);

1825:   (*da->lfib)(&info,&stencil,u,vfu,w);

1827:   DAVecRestoreArray(da,vu,&u);
1828:   return(0);
1829: }

1831: #if defined(new)
1834: /*
1835:   DAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
1836:     function lives on a DA

1838:         y ~= (F(u + ha) - F(u))/h, 
1839:   where F = nonlinear function, as set by SNESSetFunction()
1840:         u = current iterate
1841:         h = difference interval
1842: */
1843: PetscErrorCode DAGetDiagonal_MFFD(DA da,Vec U,Vec a)
1844: {
1845:   PetscScalar    h,*aa,*ww,v;
1846:   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
1848:   PetscInt       gI,nI;
1849:   MatStencil     stencil;
1850:   DALocalInfo    info;
1851: 
1853:   (*ctx->func)(0,U,a,ctx->funcctx);
1854:   (*ctx->funcisetbase)(U,ctx->funcctx);

1856:   VecGetArray(U,&ww);
1857:   VecGetArray(a,&aa);
1858: 
1859:   nI = 0;
1860:     h  = ww[gI];
1861:     if (h == 0.0) h = 1.0;
1862: #if !defined(PETSC_USE_COMPLEX)
1863:     if (h < umin && h >= 0.0)      h = umin;
1864:     else if (h < 0.0 && h > -umin) h = -umin;
1865: #else
1866:     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
1867:     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
1868: #endif
1869:     h     *= epsilon;
1870: 
1871:     ww[gI += h;
1872:     (*ctx->funci)(i,w,&v,ctx->funcctx);
1873:     aa[nI]  = (v - aa[nI])/h;
1874:     ww[gI] -= h;
1875:     nI++;
1876:   }
1877:   VecRestoreArray(U,&ww);
1878:   VecRestoreArray(a,&aa);
1879:   return(0);
1880: }
1881: #endif

1883: #if defined(PETSC_HAVE_ADIC)
1885: #include "adic/ad_utils.h"

1890: /*@C
1891:     DAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 
1892:         share a DA

1894:    Input Parameters:
1895: +    da - the DA that defines the grid
1896: .    vu - input vector (ghosted)
1897: .    J - output matrix
1898: -    w - any user data

1900:    Level: advanced

1902:     Notes: Does NOT do ghost updates on vu upon entry

1904: .seealso: DAFormFunction1()

1906: @*/
1907: PetscErrorCode  DAComputeJacobian1WithAdic(DA da,Vec vu,Mat J,void *w)
1908: {
1910:   PetscInt       gtdof,tdof;
1911:   PetscScalar    *ustart;
1912:   DALocalInfo    info;
1913:   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
1914:   ISColoring     iscoloring;

1917:   DAGetLocalInfo(da,&info);

1919:   PetscADResetIndep();

1921:   /* get space for derivative objects.  */
1922:   DAGetAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,&gtdof);
1923:   DAGetAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1924:   VecGetArray(vu,&ustart);
1925:   DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);

1927:   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);

1929:   VecRestoreArray(vu,&ustart);
1930:   ISColoringDestroy(iscoloring);
1931:   PetscADIncrementTotalGradSize(iscoloring->n);
1932:   PetscADSetIndepDone();

1934:   PetscLogEventBegin(DA_LocalADFunction,0,0,0,0);
1935:   (*da->adic_lf)(&info,ad_u,ad_f,w);
1936:   PetscLogEventEnd(DA_LocalADFunction,0,0,0,0);

1938:   /* stick the values into the matrix */
1939:   MatSetValuesAdic(J,(PetscScalar**)ad_fstart);

1941:   /* return space for derivative objects.  */
1942:   DARestoreAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,&gtdof);
1943:   DARestoreAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1944:   return(0);
1945: }

1949: /*@C
1950:     DAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 
1951:     each processor that shares a DA.

1953:     Input Parameters:
1954: +   da - the DA that defines the grid
1955: .   vu - Jacobian is computed at this point (ghosted)
1956: .   v - product is done on this vector (ghosted)
1957: .   fu - output vector = J(vu)*v (not ghosted)
1958: -   w - any user data

1960:     Notes: 
1961:     This routine does NOT do ghost updates on vu upon entry.

1963:    Level: advanced

1965: .seealso: DAFormFunction1()

1967: @*/
1968: PetscErrorCode  DAMultiplyByJacobian1WithAdic(DA da,Vec vu,Vec v,Vec f,void *w)
1969: {
1971:   PetscInt       i,gtdof,tdof;
1972:   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
1973:   DALocalInfo    info;
1974:   void           *ad_vu,*ad_f;

1977:   DAGetLocalInfo(da,&info);

1979:   /* get space for derivative objects.  */
1980:   DAGetAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,&gtdof);
1981:   DAGetAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);

1983:   /* copy input vector into derivative object */
1984:   VecGetArray(vu,&avu);
1985:   VecGetArray(v,&av);
1986:   for (i=0; i<gtdof; i++) {
1987:     ad_vustart[2*i]   = avu[i];
1988:     ad_vustart[2*i+1] = av[i];
1989:   }
1990:   VecRestoreArray(vu,&avu);
1991:   VecRestoreArray(v,&av);

1993:   PetscADResetIndep();
1994:   PetscADIncrementTotalGradSize(1);
1995:   PetscADSetIndepDone();

1997:   (*da->adicmf_lf)(&info,ad_vu,ad_f,w);

1999:   /* stick the values into the vector */
2000:   VecGetArray(f,&af);
2001:   for (i=0; i<tdof; i++) {
2002:     af[i] = ad_fstart[2*i+1];
2003:   }
2004:   VecRestoreArray(f,&af);

2006:   /* return space for derivative objects.  */
2007:   DARestoreAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,&gtdof);
2008:   DARestoreAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);
2009:   return(0);
2010: }
2011: #endif

2015: /*@
2016:     DAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 
2017:         share a DA

2019:    Input Parameters:
2020: +    da - the DA that defines the grid
2021: .    vu - input vector (ghosted)
2022: .    J - output matrix
2023: -    w - any user data

2025:     Notes: Does NOT do ghost updates on vu upon entry

2027:     Level: advanced

2029: .seealso: DAFormFunction1()

2031: @*/
2032: PetscErrorCode  DAComputeJacobian1(DA da,Vec vu,Mat J,void *w)
2033: {
2035:   void           *u;
2036:   DALocalInfo    info;

2039:   DAGetLocalInfo(da,&info);
2040:   DAVecGetArray(da,vu,&u);
2041:   (*da->lj)(&info,u,J,w);
2042:   DAVecRestoreArray(da,vu,&u);
2043:   return(0);
2044: }


2049: /*
2050:     DAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 
2051:         share a DA

2053:    Input Parameters:
2054: +    da - the DA that defines the grid
2055: .    vu - input vector (ghosted)
2056: .    J - output matrix
2057: -    w - any user data

2059:     Notes: Does NOT do ghost updates on vu upon entry

2061: .seealso: DAFormFunction1()

2063: */
2064: PetscErrorCode  DAComputeJacobian1WithAdifor(DA da,Vec vu,Mat J,void *w)
2065: {
2066:   PetscErrorCode  ierr;
2067:   PetscInt        i,Nc,N;
2068:   ISColoringValue *color;
2069:   DALocalInfo     info;
2070:   PetscScalar     *u,*g_u,*g_f,*f,*p_u;
2071:   ISColoring      iscoloring;
2072:   void            (*lf)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
2073:                   (void (*)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*da->adifor_lf;

2076:   DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);
2077:   Nc   = iscoloring->n;
2078:   DAGetLocalInfo(da,&info);
2079:   N    = info.gxm*info.gym*info.gzm*info.dof;

2081:   /* get space for derivative objects.  */
2082:   PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);
2083:   PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));
2084:   p_u   = g_u;
2085:   color = iscoloring->colors;
2086:   for (i=0; i<N; i++) {
2087:     p_u[*color++] = 1.0;
2088:     p_u          += Nc;
2089:   }
2090:   ISColoringDestroy(iscoloring);
2091:   PetscMalloc(Nc*info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&g_f);
2092:   PetscMalloc(info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&f);

2094:   /* Seed the input array g_u with coloring information */
2095: 
2096:   VecGetArray(vu,&u);
2097:   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);
2098:   VecRestoreArray(vu,&u);

2100:   /* stick the values into the matrix */
2101:   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
2102:   MatSetValuesAdifor(J,Nc,g_f);

2104:   /* return space for derivative objects.  */
2105:   PetscFree(g_u);
2106:   PetscFree(g_f);
2107:   PetscFree(f);
2108:   return(0);
2109: }

2113: /*@C 
2114:    DAFormjacobianLocal - This is a universal Jacobian evaluation routine for
2115:    a local DA function.

2117:    Collective on DA

2119:    Input Parameters:
2120: +  da - the DA context
2121: .  func - The local function
2122: .  X - input vector
2123: .  J - Jacobian matrix
2124: -  ctx - A user context

2126:    Level: intermediate

2128: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
2129:           SNESSetFunction(), SNESSetJacobian()

2131: @*/
2132: PetscErrorCode  DAFormJacobianLocal(DA da, DALocalFunction1 func, Vec X, Mat J, void *ctx)
2133: {
2134:   Vec            localX;
2135:   DALocalInfo    info;
2136:   void          *u;

2140:   DAGetLocalVector(da,&localX);
2141:   /*
2142:      Scatter ghost points to local vector, using the 2-step process
2143:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
2144:   */
2145:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
2146:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
2147:   DAGetLocalInfo(da,&info);
2148:   DAVecGetArray(da,localX,&u);
2149:   (*func)(&info,u,J,ctx);
2150:   if (PetscExceptionValue(ierr)) {
2151:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
2152:   }
2153: 
2154:   DAVecRestoreArray(da,localX,&u);
2155:   if (PetscExceptionValue(ierr)) {
2156:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
2157:   }
2158: 
2159:   DARestoreLocalVector(da,&localX);
2160:   return(0);
2161: }

2165: /*@C
2166:     DAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
2167:     to a vector on each processor that shares a DA.

2169:    Input Parameters:
2170: +    da - the DA that defines the grid
2171: .    vu - Jacobian is computed at this point (ghosted)
2172: .    v - product is done on this vector (ghosted)
2173: .    fu - output vector = J(vu)*v (not ghosted)
2174: -    w - any user data

2176:     Notes: 
2177:     This routine does NOT do ghost updates on vu and v upon entry.
2178:            
2179:     Automatically calls DAMultiplyByJacobian1WithAdifor() or DAMultiplyByJacobian1WithAdic()
2180:     depending on whether DASetLocalAdicMFFunction() or DASetLocalAdiforMFFunction() was called.

2182:    Level: advanced

2184: .seealso: DAFormFunction1(), DAMultiplyByJacobian1WithAdifor(), DAMultiplyByJacobian1WithAdic()

2186: @*/
2187: PetscErrorCode  DAMultiplyByJacobian1WithAD(DA da,Vec u,Vec v,Vec f,void *w)
2188: {

2192:   if (da->adicmf_lf) {
2193: #if defined(PETSC_HAVE_ADIC)
2194:     DAMultiplyByJacobian1WithAdic(da,u,v,f,w);
2195: #else
2196:     SETERRQ(PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
2197: #endif
2198:   } else if (da->adiformf_lf) {
2199:     DAMultiplyByJacobian1WithAdifor(da,u,v,f,w);
2200:   } else {
2201:     SETERRQ(PETSC_ERR_ORDER,"Must call DASetLocalAdiforMFFunction() or DASetLocalAdicMFFunction() before using");
2202:   }
2203:   return(0);
2204: }


2209: /*@C
2210:     DAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 
2211:         share a DA to a vector

2213:    Input Parameters:
2214: +    da - the DA that defines the grid
2215: .    vu - Jacobian is computed at this point (ghosted)
2216: .    v - product is done on this vector (ghosted)
2217: .    fu - output vector = J(vu)*v (not ghosted)
2218: -    w - any user data

2220:     Notes: Does NOT do ghost updates on vu and v upon entry

2222:    Level: advanced

2224: .seealso: DAFormFunction1()

2226: @*/
2227: PetscErrorCode  DAMultiplyByJacobian1WithAdifor(DA da,Vec u,Vec v,Vec f,void *w)
2228: {
2230:   PetscScalar    *au,*av,*af,*awork;
2231:   Vec            work;
2232:   DALocalInfo    info;
2233:   void           (*lf)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
2234:                  (void (*)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*da->adiformf_lf;

2237:   DAGetLocalInfo(da,&info);

2239:   DAGetGlobalVector(da,&work);
2240:   VecGetArray(u,&au);
2241:   VecGetArray(v,&av);
2242:   VecGetArray(f,&af);
2243:   VecGetArray(work,&awork);
2244:   (lf)(&info,au,av,awork,af,w,&ierr);
2245:   VecRestoreArray(u,&au);
2246:   VecRestoreArray(v,&av);
2247:   VecRestoreArray(f,&af);
2248:   VecRestoreArray(work,&awork);
2249:   DARestoreGlobalVector(da,&work);

2251:   return(0);
2252: }

2256: /*@
2257:        DASetInterpolationType - Sets the type of interpolation that will be 
2258:           returned by DAGetInterpolation()

2260:    Collective on DA

2262:    Input Parameter:
2263: +  da - initial distributed array
2264: .  ctype - DA_Q1 and DA_Q0 are currently the only supported forms

2266:    Level: intermediate

2268:    Notes: you should call this on the coarser of the two DAs you pass to DAGetInterpolation()

2270: .keywords:  distributed array, interpolation

2272: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DA, DAInterpolationType
2273: @*/
2274: PetscErrorCode  DASetInterpolationType(DA da,DAInterpolationType ctype)
2275: {
2278:   da->interptype = ctype;
2279:   return(0);
2280: }