Actual source code: pdvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
 5:  #include ../src/vec/vec/impls/mpi/pvecimpl.h
  6: #if defined(PETSC_HAVE_PNETCDF)
  8: #include "pnetcdf.h"
 10: #endif

 14: PetscErrorCode VecDestroy_MPI(Vec v)
 15: {
 16:   Vec_MPI        *x = (Vec_MPI*)v->data;

 20: #if defined(PETSC_USE_LOG)
 21:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 22: #endif  
 23:   PetscFree(x->array_allocated);

 25:   /* Destroy local representation of vector if it exists */
 26:   if (x->localrep) {
 27:     VecDestroy(x->localrep);
 28:     if (x->localupdate) {VecScatterDestroy(x->localupdate);}
 29:   }
 30:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 31:   VecStashDestroy_Private(&v->bstash);
 32:   VecStashDestroy_Private(&v->stash);
 33:   PetscFree(x);
 34:   return(0);
 35: }

 39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 40: {
 41:   PetscErrorCode    ierr;
 42:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 43:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 44:   MPI_Status        status;
 45:   PetscScalar       *values,*xarray;
 46:   const char        *name;
 47:   PetscViewerFormat format;

 50:   VecGetArray(xin,&xarray);
 51:   /* determine maximum message to arrive */
 52:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
 53:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
 54:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

 56:   if (!rank) {
 57:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
 58:     PetscViewerGetFormat(viewer,&format);
 59:     /*
 60:         Matlab format and ASCII format are very similar except 
 61:         Matlab uses %18.16e format while ASCII uses %g
 62:     */
 63:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 64:       PetscObjectGetName((PetscObject)xin,&name);
 65:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 66:       for (i=0; i<xin->map->n; i++) {
 67: #if defined(PETSC_USE_COMPLEX)
 68:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 70:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 72:         } else {
 73:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 74:         }
 75: #else
 76:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 77: #endif
 78:       }
 79:       /* receive and print messages */
 80:       for (j=1; j<size; j++) {
 81:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
 82:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 83:         for (i=0; i<n; i++) {
 84: #if defined(PETSC_USE_COMPLEX)
 85:           if (PetscImaginaryPart(values[i]) > 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 87:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 89:           } else {
 90:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 91:           }
 92: #else
 93:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 94: #endif
 95:         }
 96:       }
 97:       PetscViewerASCIIPrintf(viewer,"];\n");

 99:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100:       for (i=0; i<xin->map->n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106:       }
107:       /* receive and print messages */
108:       for (j=1; j<size; j++) {
109:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
110:         MPI_Get_count(&status,MPIU_SCALAR,&n);
111:         for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117:         }
118:       }
119:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120:       /*
121:         state 0: No header has been output
122:         state 1: Only POINT_DATA has been output
123:         state 2: Only CELL_DATA has been output
124:         state 3: Output both, POINT_DATA last
125:         state 4: Output both, CELL_DATA last
126:       */
127:       static PetscInt stateId = -1;
128:       int outputState = 0;
129:       PetscTruth hasState;
130:       int doOutput = 0;
131:       PetscInt bs, b;

133:       if (stateId < 0) {
134:         PetscObjectComposedDataRegister(&stateId);
135:       }
136:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137:       if (!hasState) {
138:         outputState = 0;
139:       }
140:       PetscObjectGetName((PetscObject) xin, &name);
141:       VecGetLocalSize(xin, &nLen);
142:       n    = PetscMPIIntCast(nLen);
143:       VecGetBlockSize(xin, &bs);
144:       if (format == PETSC_VIEWER_ASCII_VTK) {
145:         if (outputState == 0) {
146:           outputState = 1;
147:           doOutput = 1;
148:         } else if (outputState == 1) {
149:           doOutput = 0;
150:         } else if (outputState == 2) {
151:           outputState = 3;
152:           doOutput = 1;
153:         } else if (outputState == 3) {
154:           doOutput = 0;
155:         } else if (outputState == 4) {
156:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
157:         }
158:         if (doOutput) {
159:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
160:         }
161:       } else {
162:         if (outputState == 0) {
163:           outputState = 2;
164:           doOutput = 1;
165:         } else if (outputState == 1) {
166:           outputState = 4;
167:           doOutput = 1;
168:         } else if (outputState == 2) {
169:           doOutput = 0;
170:         } else if (outputState == 3) {
171:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
172:         } else if (outputState == 4) {
173:           doOutput = 0;
174:         }
175:         if (doOutput) {
176:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
177:         }
178:       }
179:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
180:       if (name) {
181:         PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
182:       } else {
183:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
184:       }
185:       PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
186:       for (i=0; i<n/bs; i++) {
187:         for (b=0; b<bs; b++) {
188:           if (b > 0) {
189:             PetscViewerASCIIPrintf(viewer," ");
190:           }
191:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
192:         }
193:         PetscViewerASCIIPrintf(viewer,"\n");
194:       }
195:       for (j=1; j<size; j++) {
196:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
197:         MPI_Get_count(&status,MPIU_SCALAR,&n);
198:         for (i=0; i<n/bs; i++) {
199:           for (b=0; b<bs; b++) {
200:             if (b > 0) {
201:               PetscViewerASCIIPrintf(viewer," ");
202:             }
203:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
204:           }
205:           PetscViewerASCIIPrintf(viewer,"\n");
206:         }
207:       }
208:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
209:       PetscInt bs, b;

211:       VecGetLocalSize(xin, &nLen);
212:       n    = PetscMPIIntCast(nLen);
213:       VecGetBlockSize(xin, &bs);
214:       if ((bs < 1) || (bs > 3)) {
215:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
216:       }
217:       for (i=0; i<n/bs; i++) {
218:         for (b=0; b<bs; b++) {
219:           if (b > 0) {
220:             PetscViewerASCIIPrintf(viewer," ");
221:           }
222:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
223:         }
224:         for (b=bs; b<3; b++) {
225:           PetscViewerASCIIPrintf(viewer," 0.0");
226:         }
227:         PetscViewerASCIIPrintf(viewer,"\n");
228:       }
229:       for (j=1; j<size; j++) {
230:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
231:         MPI_Get_count(&status,MPIU_SCALAR,&n);
232:         for (i=0; i<n/bs; i++) {
233:           for (b=0; b<bs; b++) {
234:             if (b > 0) {
235:               PetscViewerASCIIPrintf(viewer," ");
236:             }
237:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
238:           }
239:           for (b=bs; b<3; b++) {
240:             PetscViewerASCIIPrintf(viewer," 0.0");
241:           }
242:           PetscViewerASCIIPrintf(viewer,"\n");
243:         }
244:       }
245:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
246:       PetscInt bs, b, vertexCount = 1;

248:       VecGetLocalSize(xin, &nLen);
249:       n    = PetscMPIIntCast(nLen);
250:       VecGetBlockSize(xin, &bs);
251:       if ((bs < 1) || (bs > 3)) {
252:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
253:       }
254:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
255:       for (i=0; i<n/bs; i++) {
256:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
257:         for (b=0; b<bs; b++) {
258:           if (b > 0) {
259:             PetscViewerASCIIPrintf(viewer," ");
260:           }
261: #if !defined(PETSC_USE_COMPLEX)
262:           PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
263: #endif
264:         }
265:         PetscViewerASCIIPrintf(viewer,"\n");
266:       }
267:       for (j=1; j<size; j++) {
268:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
269:         MPI_Get_count(&status,MPIU_SCALAR,&n);
270:         for (i=0; i<n/bs; i++) {
271:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
272:           for (b=0; b<bs; b++) {
273:             if (b > 0) {
274:               PetscViewerASCIIPrintf(viewer," ");
275:             }
276: #if !defined(PETSC_USE_COMPLEX)
277:             PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
278: #endif
279:           }
280:           PetscViewerASCIIPrintf(viewer,"\n");
281:         }
282:       }
283:     } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
284:       PetscInt bs, b, vertexCount = 1;

286:       VecGetLocalSize(xin, &nLen);
287:       n    = PetscMPIIntCast(nLen);
288:       VecGetBlockSize(xin, &bs);
289:       if (bs != 3) {
290:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
291:       }
292:       PetscViewerASCIIPrintf(viewer,"#\n");
293:       PetscViewerASCIIPrintf(viewer,"#  Node      X-coord           Y-coord           Z-coord\n");
294:       PetscViewerASCIIPrintf(viewer,"#\n");
295:       for (i=0; i<n/bs; i++) {
296:         PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
297:         for (b=0; b<bs; b++) {
298:           if (b > 0) {
299:             PetscViewerASCIIPrintf(viewer," ");
300:           }
301: #if !defined(PETSC_USE_COMPLEX)
302:           PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
303: #endif
304:         }
305:         PetscViewerASCIIPrintf(viewer,"\n");
306:       }
307:       for (j=1; j<size; j++) {
308:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
309:         MPI_Get_count(&status,MPIU_SCALAR,&n);
310:         for (i=0; i<n/bs; i++) {
311:           PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
312:           for (b=0; b<bs; b++) {
313:             if (b > 0) {
314:               PetscViewerASCIIPrintf(viewer," ");
315:             }
316: #if !defined(PETSC_USE_COMPLEX)
317:             PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
318: #endif
319:           }
320:           PetscViewerASCIIPrintf(viewer,"\n");
321:         }
322:       }
323:     } else {
324:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
325:       cnt = 0;
326:       for (i=0; i<xin->map->n; i++) {
327:         if (format == PETSC_VIEWER_ASCII_INDEX) {
328:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
329:         }
330: #if defined(PETSC_USE_COMPLEX)
331:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
332:           PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
333:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
334:           PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
335:         } else {
336:           PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
337:         }
338: #else
339:         PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
340: #endif
341:       }
342:       /* receive and print messages */
343:       for (j=1; j<size; j++) {
344:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
345:         MPI_Get_count(&status,MPIU_SCALAR,&n);
346:         if (format != PETSC_VIEWER_ASCII_COMMON) {
347:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
348:         }
349:         for (i=0; i<n; i++) {
350:           if (format == PETSC_VIEWER_ASCII_INDEX) {
351:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
352:           }
353: #if defined(PETSC_USE_COMPLEX)
354:           if (PetscImaginaryPart(values[i]) > 0.0) {
355:             PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
356:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
357:             PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
358:           } else {
359:             PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
360:           }
361: #else
362:           PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
363: #endif
364:         }
365:       }
366:     }
367:     PetscFree(values);
368:   } else {
369:     /* send values */
370:     MPI_Send(xarray,xin->map->n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
371:   }
372:   PetscViewerFlush(viewer);
373:   VecRestoreArray(xin,&xarray);
374:   return(0);
375: }

379: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
380: {
382:   PetscMPIInt    rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
383:   PetscInt       len,n = xin->map->n,j,tr[2];
384:   int            fdes;
385:   MPI_Status     status;
386:   PetscScalar    *values,*xarray;
387:   FILE           *file;
388: #if defined(PETSC_HAVE_MPIIO)
389:   PetscTruth     isMPIIO;
390: #endif

393:   VecGetArray(xin,&xarray);
394:   PetscViewerBinaryGetDescriptor(viewer,&fdes);

396:   /* determine maximum message to arrive */
397:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
398:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

400:   tr[0] = VEC_FILE_COOKIE;
401:   tr[1] = xin->map->N;
402:   PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);

404: #if defined(PETSC_HAVE_MPIIO)
405:   PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
406:   if (!isMPIIO) {
407: #endif
408: #if defined(PETSC_HANDSHAKE_IO)
409:     PetscInt message_count = 256;
410: #endif
411:     if (!rank) {
412:       PetscBinaryWrite(fdes,xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
413: 
414:       len = 0;
415:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
416:       PetscMalloc((len+1)*sizeof(PetscScalar),&values);
417:       mesgsize = PetscMPIIntCast(len);
418:       /* receive and save messages */
419:       for (j=1; j<size; j++) {
420: #if defined(PETSC_HANDSHAKE_IO)
421:         if (j >= message_count) {
422:           message_count += 256;
423:           MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
424:         }
425: #endif
426:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
427:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
428:         n = (PetscInt)mesglen;
429:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
430:       }
431: #if defined(PETSC_HANDSHAKE_IO)
432:       message_count = 0;
433:       MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
434: #endif

436:       PetscFree(values);
437:     } else {
438: #if defined(PETSC_HANDSHAKE_IO)
439:       while (1) {
440:         if (rank < message_count) break;
441:         MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
442:       }
443: #endif
444:       /* send values */
445:       mesgsize = PetscMPIIntCast(xin->map->n);
446:       MPI_Send(xarray,mesgsize,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
447: #if defined(PETSC_HANDSHAKE_IO)
448:       while (1) {
449:         if (message_count == 0) break;
450:         MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
451:       }
452: #endif
453:     }
454: #if defined(PETSC_HAVE_MPIIO)
455:   } else {
456:     MPI_Offset   off;
457:     MPI_File     mfdes;
458:     PetscMPIInt  gsizes[1],lsizes[1],lstarts[1];
459:     MPI_Datatype view;

461:     gsizes[0]  = PetscMPIIntCast(xin->map->N);
462:     lsizes[0]  = PetscMPIIntCast(n);
463:     lstarts[0] = PetscMPIIntCast(xin->map->rstart);
464:     MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
465:     MPI_Type_commit(&view);

467:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
468:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
469:     MPIU_File_write_all(mfdes,xarray,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
470:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
471:     MPI_Type_free(&view);
472:   }
473: #endif

475:   VecRestoreArray(xin,&xarray);
476:   if (!rank) {
477:     PetscViewerBinaryGetInfoPointer(viewer,&file);
478:     if (file && xin->map->bs > 1) {
479:       if (((PetscObject)xin)->prefix) {
480:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map->bs);
481:       } else {
482:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map->bs);
483:       }
484:     }
485:   }
486:   return(0);
487: }

491: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
492: {
493:   PetscDraw      draw;
494:   PetscTruth     isnull;

497: #if defined(PETSC_USE_64BIT_INDICES)
499:   PetscViewerDrawGetDraw(viewer,0,&draw);
500:   PetscDrawIsNull(draw,&isnull);
501:   if (isnull) return(0);
502:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
503: #else
504:   PetscMPIInt    size,rank;
505:   int            i,N = xin->map->N,*lens;
506:   PetscReal      *xx,*yy;
507:   PetscDrawLG    lg;
508:   PetscScalar    *xarray;

511:   PetscViewerDrawGetDraw(viewer,0,&draw);
512:   PetscDrawIsNull(draw,&isnull);
513:   if (isnull) return(0);

515:   VecGetArray(xin,&xarray);
516:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
517:   PetscDrawCheckResizedWindow(draw);
518:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
519:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
520:   if (!rank) {
521:     PetscDrawLGReset(lg);
522:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
523:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
524:     yy   = xx + N;
525:     PetscMalloc(size*sizeof(PetscInt),&lens);
526:     for (i=0; i<size; i++) {
527:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
528:     }
529: #if !defined(PETSC_USE_COMPLEX)
530:     MPI_Gatherv(xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
531: #else
532:     {
533:       PetscReal *xr;
534:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
535:       for (i=0; i<xin->map->n; i++) {
536:         xr[i] = PetscRealPart(xarray[i]);
537:       }
538:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
539:       PetscFree(xr);
540:     }
541: #endif
542:     PetscFree(lens);
543:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
544:     PetscFree(xx);
545:   } else {
546: #if !defined(PETSC_USE_COMPLEX)
547:     MPI_Gatherv(xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
548: #else
549:     {
550:       PetscReal *xr;
551:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
552:       for (i=0; i<xin->map->n; i++) {
553:         xr[i] = PetscRealPart(xarray[i]);
554:       }
555:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
556:       PetscFree(xr);
557:     }
558: #endif
559:   }
560:   PetscDrawLGDraw(lg);
561:   PetscDrawSynchronizedFlush(draw);
562:   VecRestoreArray(xin,&xarray);
563: #endif
564:   return(0);
565: }

568: /* I am assuming this is Extern 'C' because it is dynamically loaded.  If not, we can remove the DLLEXPORT tag */
571: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
572: {
574:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
575:   PetscInt       i,start,end;
576:   MPI_Status     status;
577:   PetscReal      coors[4],ymin,ymax,xmin,xmax,tmp;
578:   PetscDraw      draw;
579:   PetscTruth     isnull;
580:   PetscDrawAxis  axis;
581:   PetscScalar    *xarray;

584:   PetscViewerDrawGetDraw(viewer,0,&draw);
585:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

587:   VecGetArray(xin,&xarray);
588:   PetscDrawCheckResizedWindow(draw);
589:   xmin = 1.e20; xmax = -1.e20;
590:   for (i=0; i<xin->map->n; i++) {
591: #if defined(PETSC_USE_COMPLEX)
592:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
593:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
594: #else
595:     if (xarray[i] < xmin) xmin = xarray[i];
596:     if (xarray[i] > xmax) xmax = xarray[i];
597: #endif
598:   }
599:   if (xmin + 1.e-10 > xmax) {
600:     xmin -= 1.e-5;
601:     xmax += 1.e-5;
602:   }
603:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,((PetscObject)xin)->comm);
604:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,((PetscObject)xin)->comm);
605:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
606:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
607:   PetscDrawAxisCreate(draw,&axis);
608:   PetscLogObjectParent(draw,axis);
609:   if (!rank) {
610:     PetscDrawClear(draw);
611:     PetscDrawFlush(draw);
612:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
613:     PetscDrawAxisDraw(axis);
614:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
615:   }
616:   PetscDrawAxisDestroy(axis);
617:   MPI_Bcast(coors,4,MPIU_REAL,0,((PetscObject)xin)->comm);
618:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
619:   /* draw local part of vector */
620:   VecGetOwnershipRange(xin,&start,&end);
621:   if (rank < size-1) { /*send value to right */
622:     MPI_Send(&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,((PetscObject)xin)->comm);
623:   }
624:   for (i=1; i<xin->map->n; i++) {
625: #if !defined(PETSC_USE_COMPLEX)
626:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
627:                    xarray[i],PETSC_DRAW_RED);
628: #else
629:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
630:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
631: #endif
632:   }
633:   if (rank) { /* receive value from right */
634:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,((PetscObject)xin)->comm,&status);
635: #if !defined(PETSC_USE_COMPLEX)
636:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
637: #else
638:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
639: #endif
640:   }
641:   PetscDrawSynchronizedFlush(draw);
642:   PetscDrawPause(draw);
643:   VecRestoreArray(xin,&xarray);
644:   return(0);
645: }

648: #if defined(PETSC_HAVE_MATLAB_ENGINE)
651: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
652: {
654:   PetscMPIInt    rank,size,*lens;
655:   PetscInt       i,N = xin->map->N;
656:   PetscScalar    *xx,*xarray;

659:   VecGetArray(xin,&xarray);
660:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
661:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
662:   if (!rank) {
663:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
664:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
665:     for (i=0; i<size; i++) {
666:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
667:     }
668:     MPI_Gatherv(xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,((PetscObject)xin)->comm);
669:     PetscFree(lens);

671:     PetscObjectName((PetscObject)xin);
672:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

674:     PetscFree(xx);
675:   } else {
676:     MPI_Gatherv(xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,((PetscObject)xin)->comm);
677:   }
678:   VecRestoreArray(xin,&xarray);
679:   return(0);
680: }
681: #endif

683: #if defined(PETSC_HAVE_PNETCDF)
686: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
687: {
689:   int         n = xin->map->n,ncid,xdim,xdim_num=1,xin_id,xstart;
690:   PetscScalar *xarray;

693:   VecGetArray(xin,&xarray);
694:   PetscViewerNetcdfGetID(v,&ncid);
695:   if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
696:   /* define dimensions */
697:   ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map->N,&xdim);
698:   /* define variables */
699:   ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
700:   /* leave define mode */
701:   ncmpi_enddef(ncid);
702:   /* store the vector */
703:   VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
704:   ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
705:   VecRestoreArray(xin,&xarray);
706:   return(0);
707: }
708: #endif

710: #if defined(PETSC_HAVE_HDF4)
713: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
714: {
716:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
717:   int            len, i, j, k, cur, bs, n, N;
718:   MPI_Status     status;
719:   PetscScalar    *x;
720:   float          *xlf, *xf;


724:   bs = X->map->bs > 0 ? X->map->bs : 1;
725:   N  = X->map->N / bs;
726:   n  = X->map->n / bs;

728:   /* For now, always convert to float */
729:   PetscMalloc(N * sizeof(float), &xf);
730:   PetscMalloc(n * sizeof(float), &xlf);

732:   MPI_Comm_rank(((PetscObject)X)->comm, &rank);
733:   MPI_Comm_size(((PetscObject)X)->comm, &size);

735:   VecGetArray(X, &x);

737:   for (k = 0; k < bs; k++) {
738:     for (i = 0; i < n; i++) {
739:       xlf[i] = (float) x[i*bs + k];
740:     }
741:     if (!rank) {
742:       cur = 0;
743:       PetscMemcpy(xf + cur, xlf, n * sizeof(float));
744:       cur += n;
745:       for (j = 1; j < size; j++) {
746:         MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, ((PetscObject)X)->comm,&status);
747:         MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
748:       }
749:       if (cur != N) {
750:         SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
751:       }
752:       PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
753:     } else {
754:       MPI_Send(xlf, n, MPI_FLOAT, 0, tag, ((PetscObject)X)->comm);
755:     }
756:   }
757:   VecRestoreArray(X, &x);
758:   PetscFree(xlf);
759:   PetscFree(xf);
760:   return(0);
761: }
762: #endif

764: #if defined(PETSC_HAVE_HDF4)
767: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
768: {
770:   PetscErrorCode  bs, dims[1];

772:   bs = xin->map->bs > 0 ? xin->map->bs : 1;
773:   dims[0] = xin->map->N / bs;
774:   VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
775:   return(0);
776: }
777: #endif

779: #if defined(PETSC_HAVE_HDF5)

783: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
784: {
785:   hid_t  filespace; /* file dataspace identifier */
786:   hid_t         plist_id;  /* property list identifier */
787:   hid_t  dset_id;   /* dataset identifier */
788:   hid_t  memspace;  /* memory dataspace identifier */
789:   hid_t  file_id;
790:   herr_t status;
791:   /* PetscInt       bs        = xin->map->bs > 0 ? xin->map->bs : 1; */
792:   int            rank      = 1; /* Could have rank 2 for blocked vectors */
793:   hsize_t        dims[1]   = {xin->map->N};
794:   hsize_t        count[1]  = {xin->map->n};
795:   hsize_t        offset[1];
796:   PetscInt       low;
797:   PetscScalar   *x;

800:   PetscViewerHDF5GetFileId(viewer, &file_id);

802:   /* Create the dataspace for the dataset */
803:   filespace = H5Screate_simple(rank, dims, NULL);

805:   /* Create the dataset with default properties and close filespace */
806: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
807:   dset_id = H5Dcreate2(file_id, "Vec", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
808: #else
809:   dset_id = H5Dcreate(file_id, "Vec", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT);
810: #endif
811:   status = H5Sclose(filespace);CHKERRQ(status);

813:   /* Each process defines a dataset and writes it to the hyperslab in the file */
814:   memspace = H5Screate_simple(rank, count, NULL);

816:   /* Select hyperslab in the file */
817:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
818:   offset[0] = low;
819:   filespace = H5Dget_space(dset_id);
820:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

822:   /* Create property list for collective dataset write */
823:   plist_id = H5Pcreate(H5P_DATASET_XFER);
824: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
825:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
826: #endif
827:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

829:   VecGetArray(xin, &x);
830:   status = H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
831:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
832:   VecRestoreArray(xin, &x);

834:   /* Close/release resources */
835:   status = H5Pclose(plist_id);CHKERRQ(status);
836:   status = H5Sclose(filespace);CHKERRQ(status);
837:   status = H5Sclose(memspace);CHKERRQ(status);
838:   status = H5Dclose(dset_id);CHKERRQ(status);
839:   return(0);
840: }
841: #endif

845: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
846: {
848:   PetscTruth     iascii,isbinary,isdraw;
849: #if defined(PETSC_HAVE_MATHEMATICA)
850:   PetscTruth     ismathematica;
851: #endif
852: #if defined(PETSC_HAVE_NETCDF)
853:   PetscTruth     isnetcdf;
854: #endif
855: #if defined(PETSC_HAVE_HDF4)
856:   PetscTruth     ishdf4;
857: #endif
858: #if defined(PETSC_HAVE_HDF5)
859:   PetscTruth     ishdf5;
860: #endif
861: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
862:   PetscTruth     ismatlab;
863: #endif

866:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
867:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
868:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
869: #if defined(PETSC_HAVE_MATHEMATICA)
870:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
871: #endif
872: #if defined(PETSC_HAVE_NETCDF)
873:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
874: #endif
875: #if defined(PETSC_HAVE_HDF4)
876:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
877: #endif
878: #if defined(PETSC_HAVE_HDF5)
879:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
880: #endif
881: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
882:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
883: #endif
884:   if (iascii){
885:     VecView_MPI_ASCII(xin,viewer);
886:   } else if (isbinary) {
887:     VecView_MPI_Binary(xin,viewer);
888:   } else if (isdraw) {
889:     PetscViewerFormat format;

891:     PetscViewerGetFormat(viewer,&format);
892:     if (format == PETSC_VIEWER_DRAW_LG) {
893:       VecView_MPI_Draw_LG(xin,viewer);
894:     } else {
895:       VecView_MPI_Draw(xin,viewer);
896:     }
897: #if defined(PETSC_HAVE_MATHEMATICA)
898:   } else if (ismathematica) {
899:     PetscViewerMathematicaPutVector(viewer,xin);
900: #endif
901: #if defined(PETSC_HAVE_NETCDF)
902:   } else if (isnetcdf) {
903:     VecView_MPI_Netcdf(xin,viewer);
904: #endif
905: #if defined(PETSC_HAVE_HDF4)
906:   } else if (ishdf4) {
907:     VecView_MPI_HDF4(xin,viewer);
908: #endif
909: #if defined(PETSC_HAVE_HDF5)
910:   } else if (ishdf5) {
911:     VecView_MPI_HDF5(xin,viewer);
912: #endif
913: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
914:   } else if (ismatlab) {
915:     VecView_MPI_Matlab(xin,viewer);
916: #endif
917:   } else {
918:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
919:   }
920:   return(0);
921: }

925: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
926: {
928:   *N = xin->map->N;
929:   return(0);
930: }

934: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
935: {
936:   Vec_MPI     *x = (Vec_MPI *)xin->data;
937:   PetscScalar *xx = x->array;
938:   PetscInt    i,tmp,start = xin->map->range[xin->stash.rank];

941:   for (i=0; i<ni; i++) {
942:     if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
943:     tmp = ix[i] - start;
944: #if defined(PETSC_USE_DEBUG)
945:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
946: #endif
947:     y[i] = xx[tmp];
948:   }
949:   return(0);
950: }

954: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
955: {
957:   PetscMPIInt    rank = xin->stash.rank;
958:   PetscInt       *owners = xin->map->range,start = owners[rank];
959:   PetscInt       end = owners[rank+1],i,row;
960:   PetscScalar    *xx;

963: #if defined(PETSC_USE_DEBUG)
964:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
965:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
966:   } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
967:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
968:   }
969: #endif
970:   VecGetArray(xin,&xx);
971:   xin->stash.insertmode = addv;

973:   if (addv == INSERT_VALUES) {
974:     for (i=0; i<ni; i++) {
975:       if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
976: #if defined(PETSC_USE_DEBUG)
977:       if (ix[i] < 0) {
978:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
979:       }
980: #endif 
981:       if ((row = ix[i]) >= start && row < end) {
982:         xx[row-start] = y[i];
983:       } else if (!xin->stash.donotstash) {
984: #if defined(PETSC_USE_DEBUG)
985:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
986: #endif
987:         VecStashValue_Private(&xin->stash,row,y[i]);
988:       }
989:     }
990:   } else {
991:     for (i=0; i<ni; i++) {
992:       if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
993: #if defined(PETSC_USE_DEBUG)
994:       if (ix[i] < 0) {
995:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
996:       }
997: #endif 
998:       if ((row = ix[i]) >= start && row < end) {
999:         xx[row-start] += y[i];
1000:       } else if (!xin->stash.donotstash) {
1001: #if defined(PETSC_USE_DEBUG)
1002:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1003: #endif        
1004:         VecStashValue_Private(&xin->stash,row,y[i]);
1005:       }
1006:     }
1007:   }
1008:   VecRestoreArray(xin,&xx);
1009:   return(0);
1010: }

1014: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
1015: {
1016:   PetscMPIInt    rank = xin->stash.rank;
1017:   PetscInt       *owners = xin->map->range,start = owners[rank];
1019:   PetscInt       end = owners[rank+1],i,row,bs = xin->map->bs,j;
1020:   PetscScalar    *xx,*y = (PetscScalar*)yin;

1023:   VecGetArray(xin,&xx);
1024: #if defined(PETSC_USE_DEBUG)
1025:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
1026:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1027:   }
1028:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
1029:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1030:   }
1031: #endif
1032:   xin->stash.insertmode = addv;

1034:   if (addv == INSERT_VALUES) {
1035:     for (i=0; i<ni; i++) {
1036:       if ((row = bs*ix[i]) >= start && row < end) {
1037:         for (j=0; j<bs; j++) {
1038:           xx[row-start+j] = y[j];
1039:         }
1040:       } else if (!xin->stash.donotstash) {
1041:         if (ix[i] < 0) continue;
1042: #if defined(PETSC_USE_DEBUG)
1043:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1044: #endif
1045:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1046:       }
1047:       y += bs;
1048:     }
1049:   } else {
1050:     for (i=0; i<ni; i++) {
1051:       if ((row = bs*ix[i]) >= start && row < end) {
1052:         for (j=0; j<bs; j++) {
1053:           xx[row-start+j] += y[j];
1054:         }
1055:       } else if (!xin->stash.donotstash) {
1056:         if (ix[i] < 0) continue;
1057: #if defined(PETSC_USE_DEBUG)
1058:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1059: #endif
1060:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1061:       }
1062:       y += bs;
1063:     }
1064:   }
1065:   VecRestoreArray(xin,&xx);
1066:   return(0);
1067: }

1069: /*
1070:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1071: to make sure we never malloc an empty one.      
1072: */
1075: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1076: {
1078:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1079:   PetscMPIInt    size;
1080:   InsertMode     addv;
1081:   MPI_Comm       comm = ((PetscObject)xin)->comm;

1084:   if (xin->stash.donotstash) {
1085:     return(0);
1086:   }

1088:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
1089:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
1090:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1091:   }
1092:   xin->stash.insertmode = addv; /* in case this processor had no cache */
1093: 
1094:   bs = xin->map->bs;
1095:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
1096:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1097:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1098:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1099:     xin->bstash.bowners = bowners;
1100:   } else {
1101:     bowners = xin->bstash.bowners;
1102:   }
1103:   VecStashScatterBegin_Private(&xin->stash,owners);
1104:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1105:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1106:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1107:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1108:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

1110:   return(0);
1111: }

1115: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1116: {
1118:   PetscInt       base,i,j,*row,flg,bs;
1119:   PetscMPIInt    n;
1120:   PetscScalar    *val,*vv,*array,*xarray;

1123:   if (!vec->stash.donotstash) {
1124:     VecGetArray(vec,&xarray);
1125:     base = vec->map->range[vec->stash.rank];
1126:     bs   = vec->map->bs;

1128:     /* Process the stash */
1129:     while (1) {
1130:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1131:       if (!flg) break;
1132:       if (vec->stash.insertmode == ADD_VALUES) {
1133:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1134:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1135:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1136:       } else {
1137:         SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1138:       }
1139:     }
1140:     VecStashScatterEnd_Private(&vec->stash);

1142:     /* now process the block-stash */
1143:     while (1) {
1144:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1145:       if (!flg) break;
1146:       for (i=0; i<n; i++) {
1147:         array = xarray+row[i]*bs-base;
1148:         vv    = val+i*bs;
1149:         if (vec->stash.insertmode == ADD_VALUES) {
1150:           for (j=0; j<bs; j++) { array[j] += vv[j];}
1151:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1152:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
1153:         } else {
1154:           SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1155:         }
1156:       }
1157:     }
1158:     VecStashScatterEnd_Private(&vec->bstash);
1159:     VecRestoreArray(vec,&xarray);
1160:   }
1161:   vec->stash.insertmode = NOT_SET_VALUES;
1162:   return(0);
1163: }