Actual source code: da1.c

  1: #define PETSCDM_DLL
  2: /* 
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95    
  5: */

 7:  #include ../src/dm/da/daimpl.h

  9: const char *DAPeriodicTypes[] = {"NONPERIODIC","XPERIODIC","YPERIODIC","XYPERIODIC",
 10:                                  "XYZPERIODIC","XZPERIODIC","YZPERIODIC","ZPERIODIC","XYZGHOSTED","DAPeriodicType","DA_",0};

 14: PetscErrorCode DAView_1d(DA da,PetscViewer viewer)
 15: {
 17:   PetscMPIInt    rank;
 18:   PetscTruth     iascii,isdraw;

 21:   MPI_Comm_rank(((PetscObject)da)->comm,&rank);

 23:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 24:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
 25:   if (iascii) {
 26:     PetscViewerFormat format;

 28:     PetscViewerGetFormat(viewer, &format);
 29:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
 30:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,da->M,
 31:                                                 da->m,da->w,da->s);
 32:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",da->xs,da->xe);
 33:       PetscViewerFlush(viewer);
 34:     }
 35:   } else if (isdraw) {
 36:     PetscDraw  draw;
 37:     double     ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
 38:     PetscInt   base;
 39:     char       node[10];
 40:     PetscTruth isnull;

 42:     PetscViewerDrawGetDraw(viewer,0,&draw);
 43:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

 45:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 46:     PetscDrawSynchronizedClear(draw);

 48:     /* first processor draws all node lines */
 49:     if (!rank) {
 50:       PetscInt xmin_tmp;
 51:       ymin = 0.0; ymax = 0.3;
 52: 
 53:       /* ADIC doesn't like doubles in a for loop */
 54:       for (xmin_tmp =0; xmin_tmp < da->M; xmin_tmp++) {
 55:          PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 56:       }

 58:       xmin = 0.0; xmax = da->M - 1;
 59:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 60:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 61:     }

 63:     PetscDrawSynchronizedFlush(draw);
 64:     PetscDrawPause(draw);

 66:     /* draw my box */
 67:     ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w)  - 1;
 68:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 69:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 70:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 71:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 73:     /* Put in index numbers */
 74:     base = da->base / da->w;
 75:     for (x=xmin; x<=xmax; x++) {
 76:       sprintf(node,"%d",(int)base++);
 77:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 78:     }

 80:     PetscDrawSynchronizedFlush(draw);
 81:     PetscDrawPause(draw);
 82:   } else {
 83:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
 84:   }
 85:   return(0);
 86: }

 88: #if 0
 89: EXTERN PetscErrorCode DAPublish_Petsc(PetscObject);
 90: #endif

 94: /*@C
 95:    DACreate1d - Creates an object that will manage the communication of  one-dimensional 
 96:    regular array data that is distributed across some processors.

 98:    Collective on MPI_Comm

100:    Input Parameters:
101: +  comm - MPI communicator
102: .  wrap - type of periodicity should the array have, if any. Use 
103:           either DA_NONPERIODIC or DA_XPERIODIC
104: .  M - global dimension of the array (use -M to indicate that it may be set to a different value 
105:             from the command line with -da_grid_x <M>)
106: .  dof - number of degrees of freedom per node
107: .  s - stencil width
108: -  lc - array containing number of nodes in the X direction on each processor, 
109:         or PETSC_NULL. If non-null, must be of length as m.

111:    Output Parameter:
112: .  inra - the resulting distributed array object

114:    Options Database Key:
115: +  -da_view - Calls DAView() at the conclusion of DACreate1d()
116: .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
117: -  -da_refine_x - refinement factor 

119:    Level: beginner

121:    Notes:
122:    The array data itself is NOT stored in the DA, it is stored in Vec objects;
123:    The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
124:    and DACreateLocalVector() and calls to VecDuplicate() if more are needed.


127: .keywords: distributed array, create, one-dimensional

129: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(), DASetRefinementFactor(),
130:           DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetRefinementFactor(),
131:           DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRanges()

133: @*/
134: PetscErrorCode  DACreate1d(MPI_Comm comm,DAPeriodicType wrap,PetscInt M,PetscInt dof,PetscInt s,const PetscInt lc[],DA *inra)
135: {
137:   PetscMPIInt    rank,size;
138:   PetscInt       i,*idx,nn,left,refine_x = 2,tM = M,xs,xe,x,Xs,Xe,start,end,m;
139:   PetscTruth     flg1,flg2;
140:   DA             da;
141:   Vec            local,global;
142:   VecScatter     ltog,gtol;
143:   IS             to,from;

147:   *inra = 0;
148: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
149:   DMInitializePackage(PETSC_NULL);
150: #endif

152:   if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
153:   if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);

155:   PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
156:     if (M < 0) {
157:       tM   = -M;
158:       PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
159:     }
160:     PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DASetRefinementFactor",refine_x,&refine_x,PETSC_NULL);
161:   PetscOptionsEnd();
162:   M = tM;

164:   PetscHeaderCreate(da,_p_DA,struct _DAOps,DM_COOKIE,0,"DM",comm,DADestroy,DAView);
165:   PetscObjectChangeTypeName((PetscObject)da,"DA");
166:   da->ops->globaltolocalbegin = DAGlobalToLocalBegin;
167:   da->ops->globaltolocalend   = DAGlobalToLocalEnd;
168:   da->ops->localtoglobal      = DALocalToGlobal;
169:   da->ops->createglobalvector = DACreateGlobalVector;
170:   da->ops->createlocalvector  = DACreateLocalVector;
171:   da->ops->getinterpolation   = DAGetInterpolation;
172:   da->ops->getcoloring        = DAGetColoring;
173:   da->ops->getmatrix          = DAGetMatrix;
174:   da->ops->refine             = DARefine;
175:   da->ops->coarsen            = DACoarsen;
176:   da->ops->getaggregates      = DAGetAggregates;
177:   da->ops->destroy            = DADestroy;
178:   da->dim                     = 1;
179:   da->interptype              = DA_Q1;
180:   da->refine_x                = refine_x;
181:   PetscMalloc(dof*sizeof(char*),&da->fieldname);
182:   PetscMemzero(da->fieldname,dof*sizeof(char*));
183:   MPI_Comm_size(comm,&size);
184:   MPI_Comm_rank(comm,&rank);

186:   m = size;

188:   if (M < m)     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %D %D",m,M);
189:   if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);

191:   /* 
192:      Determine locally owned region 
193:      xs is the first local node number, x is the number of local nodes 
194:   */
195:   if (!lc) {
196:     PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
197:     PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
198:     if (flg1) {      /* Block Comm type Distribution */
199:       xs = rank*M/m;
200:       x  = (rank + 1)*M/m - xs;
201:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
202:       x = (M + rank)/m;
203:       if (M/m == x) { xs = rank*x; }
204:       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
205:     } else { /* The odd nodes are evenly distributed across the first k nodes */
206:       /* Regular PETSc Distribution */
207:       x = M/m + ((M % m) > rank);
208:       if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
209:       else                 {xs = rank * (PetscInt)(M/m) + rank;}
210:     }
211:   } else {
212:     x  = lc[rank];
213:     xs = 0;
214:     for (i=0; i<rank; i++) {
215:       xs += lc[i];
216:     }
217:     /* verify that data user provided is consistent */
218:     left = xs;
219:     for (i=rank; i<size; i++) {
220:       left += lc[i];
221:     }
222:     if (left != M) {
223:       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %D %D",left,M);
224:     }
225:   }

227:   /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
228:   x  *= dof;
229:   s  *= dof;  /* NOTE: here change s to be absolute stencil distance */
230:   xs *= dof;
231:   xe = xs + x;

233:   /* determine ghost region */
234:   if (wrap == DA_XPERIODIC || wrap == DA_XYZGHOSTED) {
235:     Xs = xs - s;
236:     Xe = xe + s;
237:   } else {
238:     if ((xs-s) >= 0)   Xs = xs-s;  else Xs = 0;
239:     if ((xe+s) <= M*dof) Xe = xe+s;  else Xe = M*dof;
240:   }

242:   /* allocate the base parallel and sequential vectors */
243:   da->Nlocal = x;
244:   VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
245:   VecSetBlockSize(global,dof);
246:   da->nlocal = (Xe-Xs);
247:   VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
248:   VecSetBlockSize(local,dof);
249: 
250:   /* Create Local to Global Vector Scatter Context */
251:   /* local to global inserts non-ghost point region into global */
252:   VecGetOwnershipRange(global,&start,&end);
253:   ISCreateStride(comm,x,start,1,&to);
254:   ISCreateStride(comm,x,xs-Xs,1,&from);
255:   VecScatterCreate(local,from,global,to,&ltog);
256:   PetscLogObjectParent(da,to);
257:   PetscLogObjectParent(da,from);
258:   PetscLogObjectParent(da,ltog);
259:   ISDestroy(from);
260:   ISDestroy(to);

262:   /* Create Global to Local Vector Scatter Context */
263:   /* global to local must retrieve ghost points */
264:   if  (wrap == DA_XYZGHOSTED) {
265:     if (size == 1) {
266:       ISCreateStride(comm,(xe-xs),s,1,&to);
267:     } else if (!rank) {
268:       ISCreateStride(comm,(Xe-xs),s,1,&to);
269:     } else if (rank == size-1) {
270:       ISCreateStride(comm,(xe-Xs),0,1,&to);
271:     } else {
272:       ISCreateStride(comm,(Xe-Xs),0,1,&to);
273:     }
274:   } else {
275:     ISCreateStride(comm,(Xe-Xs),0,1,&to);
276:   }
277: 
278:   PetscMalloc((x+2*s)*sizeof(PetscInt),&idx);
279:   PetscLogObjectMemory(da,(x+2*s)*sizeof(PetscInt));

281:   nn = 0;
282:   if (wrap == DA_XPERIODIC) {    /* Handle all cases with wrap first */

284:     for (i=0; i<s; i++) {  /* Left ghost points */
285:       if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
286:       else             { idx[nn++] = M*dof+(xs-s+i);}
287:     }

289:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */
290: 
291:     for (i=0; i<s; i++) { /* Right ghost points */
292:       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
293:       else              { idx [nn++] = (xe+i) - M*dof;}
294:     }
295:   } else if (wrap == DA_XYZGHOSTED) {

297:     if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}

299:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
300: 
301:     if ((xe+s)<=M*dof) {for (i=0;  i<s;     i++) {idx[nn++]=xe+i;}}

303:   } else {      /* Now do all cases with no wrapping */

305:     if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
306:     else         {for (i=0; i<xs;  i++) {idx[nn++] = i;}}

308:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
309: 
310:     if ((xe+s)<=M*dof) {for (i=0;  i<s;     i++) {idx[nn++]=xe+i;}}
311:     else               {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
312:   }

314:   ISCreateGeneral(comm,nn,idx,&from);
315:   VecScatterCreate(global,from,local,to,&gtol);
316:   PetscLogObjectParent(da,to);
317:   PetscLogObjectParent(da,from);
318:   PetscLogObjectParent(da,gtol);
319:   ISDestroy(to);
320:   ISDestroy(from);
321:   VecDestroy(local);
322:   VecDestroy(global);

324:   da->M  = M;  da->N  = 1;  da->m  = m; da->n = 1;
325:   da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
326:   da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
327:   da->P  = 1;  da->p  = 1;  da->w = dof; da->s = s/dof;

329:   da->gtol         = gtol;
330:   da->ltog         = ltog;
331:   da->base         = xs;
332:   da->ops->view    = DAView_1d;
333:   da->wrap         = wrap;
334:   da->stencil_type = DA_STENCIL_STAR;

336:   /* 
337:      Set the local to global ordering in the global vector, this allows use
338:      of VecSetValuesLocal().
339:   */
340:   if (wrap == DA_XYZGHOSTED) {
341:     PetscInt *tmpidx;
342:     if (size == 1) {
343:       PetscMalloc((nn+2*s)*sizeof(PetscInt),&tmpidx);
344:       for (i=0; i<s; i++) tmpidx[i] = -1;
345:       PetscMemcpy(tmpidx+s,idx,nn*sizeof(PetscInt));
346:       for (i=nn+s; i<nn+2*s; i++) tmpidx[i] = -1;
347:       PetscFree(idx);
348:       idx  = tmpidx;
349:       nn  += 2*s;
350:     } else if (!rank) { /* must preprend -1 marker for ghost location that have no global value */
351:       PetscMalloc((nn+s)*sizeof(PetscInt),&tmpidx);
352:       for (i=0; i<s; i++) tmpidx[i] = -1;
353:       PetscMemcpy(tmpidx+s,idx,nn*sizeof(PetscInt));
354:       PetscFree(idx);
355:       idx  = tmpidx;
356:       nn  += s;
357:     } else if (rank  == size-1) { /* must postpend -1 marker for ghost location that have no global value */
358:       PetscMalloc((nn+s)*sizeof(PetscInt),&tmpidx);
359:       PetscMemcpy(tmpidx,idx,nn*sizeof(PetscInt));
360:       for (i=nn; i<nn+s; i++) tmpidx[i] = -1;
361:       PetscFree(idx);
362:       idx  = tmpidx;
363:       nn  += s;
364:     }
365:   }
366:   ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
367:   ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
368:   PetscLogObjectParent(da,da->ltogmap);

370:   da->idx = idx;
371:   da->Nl  = nn;

373:   da->ltol = PETSC_NULL;
374:   da->ao   = PETSC_NULL;

376:   DAView_Private(da);
377:   *inra = da;
378:   PetscPublishAll(da);
379:   return(0);
380: }

384: /*
385:     Processes command line options to determine if/how a DA
386:   is to be viewed. Called by DACreateXX()
387: */
388: PetscErrorCode DAView_Private(DA da)
389: {
391:   PetscTruth     flg1;
392:   PetscViewer    view;

395:   PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"Distributed array (DA) options","DA");
396:     PetscOptionsTruth("-da_view","Print information about the DA's distribution","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
397:     if (flg1) {
398:       PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);
399:       DAView(da,view);
400:     }
401:     PetscOptionsTruth("-da_view_draw","Draw how the DA is distributed","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
402:     if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));}
403:   PetscOptionsEnd();
404:   return(0);
405: }