Actual source code: vecio.c

  1: #define PETSCVEC_DLL
  2: /* 
  3:    This file contains simple binary input routines for vectors.  The
  4:    analogous output routines are within each vector implementation's 
  5:    VecView (with viewer types PETSC_VIEWER_BINARY)
  6:  */

 8:  #include petsc.h
 9:  #include petscsys.h
 10:  #include petscvec.h
 11:  #include private/vecimpl.h
 12: #if defined(PETSC_HAVE_PNETCDF)
 14: #include "pnetcdf.h"
 16: #endif
 17: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
 18: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
 19: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
 20: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);

 24: /*@C 
 25:   VecLoad - Loads a vector that has been stored in binary format
 26:   with VecView().

 28:   Collective on PetscViewer 

 30:   Input Parameters:
 31: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
 32:            NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
 33: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
 34:             using VECSEQ if the communicator in the Viewer is of size 1; otherwise
 35:             use VECMPI).

 37:   Output Parameter:
 38: . newvec - the newly loaded vector

 40:    Level: intermediate

 42:   Notes:
 43:   The input file must contain the full global vector, as
 44:   written by the routine VecView().

 46:   Notes for advanced users:
 47:   Most users should not need to know the details of the binary storage
 48:   format, since VecLoad() and VecView() completely hide these details.
 49:   But for anyone who's interested, the standard binary matrix storage
 50:   format is
 51: .vb
 52:      int    VEC_FILE_COOKIE
 53:      int    number of rows
 54:      PetscScalar *values of all entries
 55: .ve

 57:    Note for old style Cray vector machine users, the int's stored in the binary file are 32 bit
 58: integers; not 64 as they are represented in the memory, so if you
 59: write your own routines to read/write these binary files from the Cray
 60: you need to adjust the integer sizes that you read in, see
 61: PetscBinaryRead() and PetscBinaryWrite() to see how this may be
 62: done. This note is not for the Cray XT3 etc

 64:    In addition, PETSc automatically does the byte swapping for
 65: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 66: linux, Windows and the paragon; thus if you write your own binary
 67: read/write routines you have to swap the bytes; see PetscBinaryRead()
 68: and PetscBinaryWrite() to see how this may be done.

 70:   Concepts: vector^loading from file

 72: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 73: @*/
 74: PetscErrorCode  VecLoad(PetscViewer viewer, const VecType outtype,Vec *newvec)
 75: {
 77:   PetscTruth     isbinary,flg;
 78:   char           vtype[256];
 79:   const char    *prefix;
 80: #if defined(PETSC_HAVE_PNETCDF)
 81:   PetscTruth     isnetcdf;
 82: #endif

 87:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 88: #if defined(PETSC_HAVE_PNETCDF)
 89:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
 90:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
 91: #else
 92:   if (!isbinary)  SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
 93: #endif

 95: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
 96:   VecInitializePackage(PETSC_NULL);
 97: #endif

 99:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
100: #if defined(PETSC_HAVE_PNETCDF)
101:   if (isnetcdf) {
102:     VecLoad_Netcdf(viewer,newvec);
103:   } else
104: #endif
105:   {
106:     Vec            factory;
107:     MPI_Comm       comm;
108:     PetscErrorCode (*r)(PetscViewer, const VecType,Vec*);
109:     PetscMPIInt    size;

111:     PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
112:     PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
113:     if (flg) {
114:       outtype = vtype;
115:     }
116:     PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
117:     if (flg) {
118:       outtype = vtype;
119:     }
120:     PetscObjectGetComm((PetscObject)viewer,&comm);
121:     if (!outtype) {
122:       MPI_Comm_size(comm,&size);
123:       outtype = (size > 1) ? VECMPI : VECSEQ;
124:     }

126:     VecCreate(comm,&factory);
127:     VecSetSizes(factory,1,PETSC_DETERMINE);
128:     VecSetType(factory,outtype);
129:     r = factory->ops->load;
130:     VecDestroy(factory);
131:     if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
132:     (*r)(viewer,outtype,newvec);
133:   }
134:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
135:   return(0);
136: }

138: #if defined(PETSC_HAVE_PNETCDF)
141: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
142: {
144:   PetscMPIInt    rank;
145:   PetscInt       N,n,bs;
146:   PetscInt       ncid,start;
147:   Vec            vec;
148:   PetscScalar    *avec;
149:   MPI_Comm       comm;
150:   PetscTruth     flag;
151:   char           name[NC_MAX_NAME];

154:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
155:   PetscObjectGetComm((PetscObject)viewer,&comm);
156:   MPI_Comm_rank(comm,&rank);
157:   PetscViewerNetcdfGetID(viewer,&ncid);
158:   ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
159:   VecCreate(comm,&vec);
160:   VecSetSizes(vec,PETSC_DECIDE,N);
161:   if (!rank) {
162:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
163:     if (flag) {
164:       VecSetBlockSize(vec,bs);
165:     }
166:   }
167:   VecSetFromOptions(vec);
168:   VecGetLocalSize(vec,&n);
169:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
170:   VecGetArray(vec,&avec);
171:   ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
172:   VecRestoreArray(vec,&avec);
173:   *newvec = vec;
174:   VecAssemblyBegin(vec);
175:   VecAssemblyEnd(vec);
176:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
177:   return(0);
178: }
179: #endif

183: PetscErrorCode VecLoad_Binary(PetscViewer viewer, const VecType itype,Vec *newvec)
184: {
185:   PetscMPIInt    size,rank,tag;
186:   int            fd;
187:   PetscInt       i,rows,type,n,*range,bs,tr[2];
189:   Vec            vec;
190:   PetscScalar    *avec,*avecwork;
191:   MPI_Comm       comm;
192:   MPI_Request    request;
193:   MPI_Status     status;
194:   PetscTruth     flag;
195: #if defined(PETSC_HAVE_MPIIO)
196:   PetscTruth     useMPIIO;
197: #endif

200:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
201:   PetscViewerBinaryGetDescriptor(viewer,&fd);
202:   PetscObjectGetComm((PetscObject)viewer,&comm);
203:   MPI_Comm_rank(comm,&rank);
204:   MPI_Comm_size(comm,&size);

206:   /* Read vector header. */
207:   PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
208:   type = tr[0];
209:   rows = tr[1];
210:   if (type != VEC_FILE_COOKIE) {
211:       PetscLogEventEnd(VEC_Load,viewer,0,0,0);
212:       SETERRQ(PETSC_ERR_ARG_WRONG,"Not vector next in file");
213:   }
214:   VecCreate(comm,&vec);
215:   VecSetSizes(vec,PETSC_DECIDE,rows);
216:   PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
217:   if (flag) {
218:     VecSetBlockSize(vec,bs);
219:   }
220:   VecSetFromOptions(vec);
221:   VecGetLocalSize(vec,&n);
222:   PetscObjectGetNewTag((PetscObject)viewer,&tag);
223:   VecGetArray(vec,&avec);
224: #if defined(PETSC_HAVE_MPIIO)
225:   PetscViewerBinaryGetMPIIO(viewer,&useMPIIO);
226:   if (!useMPIIO) {
227: #endif
228:     if (!rank) {
229:       PetscBinaryRead(fd,avec,n,PETSC_SCALAR);

231:       if (size > 1) {
232:         /* read in other chuncks and send to other processors */
233:         /* determine maximum chunck owned by other */
234:         range = vec->map->range;
235:         n = 1;
236:         for (i=1; i<size; i++) {
237:           n = PetscMax(n,range[i+1] - range[i]);
238:         }
239:         PetscMalloc(n*sizeof(PetscScalar),&avecwork);
240:         for (i=1; i<size; i++) {
241:           n    = range[i+1] - range[i];
242:           PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
243:           MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
244:           MPI_Wait(&request,&status);
245:         }
246:         PetscFree(avecwork);
247:       }
248:     } else {
249:       MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
250:     }
251: #if defined(PETSC_HAVE_MPIIO)
252:   } else {
253:     PetscMPIInt  gsizes[1],lsizes[1],lstarts[1];
254:     MPI_Datatype view;
255:     MPI_File     mfdes;
256:     MPI_Aint     ub,ul;
257:     MPI_Offset   off;

259:     gsizes[0]  = PetscMPIIntCast(rows);
260:     lsizes[0]  = PetscMPIIntCast(n);
261:     lstarts[0] = PetscMPIIntCast(vec->map->rstart);
262:     MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
263:     MPI_Type_commit(&view);

265:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
266:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
267:     MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char *)"native",MPI_INFO_NULL);
268:     MPIU_File_read_all(mfdes,avec,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
269:     MPI_Type_get_extent(view,&ul,&ub);
270:     PetscViewerBinaryAddMPIIOOffset(viewer,ub);
271:     MPI_Type_free(&view);
272:   }
273: #endif
274:   VecRestoreArray(vec,&avec);
275:   *newvec = vec;
276:   VecAssemblyBegin(vec);
277:   VecAssemblyEnd(vec);
278:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
279:   return(0);
280: }

282: #if defined(PETSC_HAVE_PNETCDF)
285: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
286: {
288:   PetscMPIInt    rank;
289:   PetscInt       N,rows,n,bs;
290:   PetscInt       ncid,start;
291:   PetscScalar    *avec;
292:   MPI_Comm       comm;
293:   PetscTruth     flag;
294:   char           name[NC_MAX_NAME];

297:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
298:   PetscObjectGetComm((PetscObject)viewer,&comm);
299:   MPI_Comm_rank(comm,&rank);
300:   PetscViewerNetcdfGetID(viewer,&ncid);
301:   ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
302:   if (!rank) {
303:     VecGetSize(vec,&rows);
304:     if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
305:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
306:     if (flag) {
307:       VecSetBlockSize(vec,bs);
308:     }
309:   }
310:   VecSetFromOptions(vec);
311:   VecGetLocalSize(vec,&n);
312:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
313:   VecGetArray(vec,&avec);
314:   ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
315:   VecRestoreArray(vec,&avec);
316:   VecAssemblyBegin(vec);
317:   VecAssemblyEnd(vec);
318:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
319:   return(0);
320: }
321: #endif

323: #if defined(PETSC_HAVE_HDF5)
326: PetscErrorCode VecLoadIntoVector_HDF5(PetscViewer viewer, Vec xin)
327: {
328:   int            rank = 1; /* Could have rank 2 for blocked vectors */
329:   PetscInt       n, N, bs, low;
330:   PetscScalar   *x;
331:   PetscTruth     flag;
332:   hid_t          file_id, dset_id, filespace, memspace, plist_id;
333:   hsize_t        dims[1];
334:   hsize_t        count[1];
335:   hsize_t        offset[1];
336:   herr_t         status;

340:   PetscLogEventBegin(VEC_Load,viewer,xin,0,0);
341:   PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);
342:   if (flag) {
343:     VecSetBlockSize(xin, bs);
344:   }
345:   VecSetFromOptions(xin);

347:   PetscViewerHDF5GetFileId(viewer, &file_id);

349:   /* Create the dataset with default properties and close filespace */
350: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
351:   dset_id = H5Dopen2(file_id, "Vec", H5P_DEFAULT);
352: #else
353:   dset_id = H5Dopen(file_id, "Vec");
354: #endif

356:   /* Retrieve the dataspace for the dataset */
357:   VecGetSize(xin, &N);
358:   filespace = H5Dget_space(dset_id);
359:   H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
360:   if (N != (int) dims[0]) SETERRQ(PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length then input vector");

362:   /* Each process defines a dataset and writes it to the hyperslab in the file */
363:   VecGetLocalSize(xin, &n);
364:   count[0] = n;
365:   memspace = H5Screate_simple(rank, count, NULL);

367:   /* Select hyperslab in the file */
368:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
369:   offset[0] = low;
370:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

372:   /* Create property list for collective dataset read */
373:   plist_id = H5Pcreate(H5P_DATASET_XFER);
374: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
375:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
376:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
377: #endif

379:   VecGetArray(xin, &x);
380:   status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
381:   VecRestoreArray(xin, &x);

383:   /* Close/release resources */
384:   status = H5Pclose(plist_id);CHKERRQ(status);
385:   status = H5Sclose(filespace);CHKERRQ(status);
386:   status = H5Sclose(memspace);CHKERRQ(status);
387:   status = H5Dclose(dset_id);CHKERRQ(status);

389:   VecAssemblyBegin(xin);
390:   VecAssemblyEnd(xin);
391:   PetscLogEventEnd(VEC_Load,viewer,xin,0,0);
392:   return(0);
393: }
394: #endif

398: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
399: {
401:   PetscMPIInt    size,rank,tag;
402:   PetscInt       i,rows,type,n,*range;
403:   int            fd;
404:   PetscScalar    *avec;
405:   MPI_Comm       comm;
406:   MPI_Request    request;
407:   MPI_Status     status;

410:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);

412:   PetscViewerBinaryGetDescriptor(viewer,&fd);
413:   PetscObjectGetComm((PetscObject)viewer,&comm);
414:   MPI_Comm_rank(comm,&rank);
415:   MPI_Comm_size(comm,&size);

417:   if (!rank) {
418:     /* Read vector header. */
419:     PetscBinaryRead(fd,&type,1,PETSC_INT);
420:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
421:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
422:     VecGetSize(vec,&n);
423:     if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
424:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);

426:     VecSetFromOptions(vec);
427:     VecGetLocalSize(vec,&n);
428:     VecGetArray(vec,&avec);
429:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
430:     VecRestoreArray(vec,&avec);

432:     if (size > 1) {
433:       /* read in other chuncks and send to other processors */
434:       /* determine maximum chunck owned by other */
435:       range = vec->map->range;
436:       n = 1;
437:       for (i=1; i<size; i++) {
438:         n = PetscMax(n,range[i+1] - range[i]);
439:       }
440:       PetscMalloc(n*sizeof(PetscScalar),&avec);
441:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
442:       for (i=1; i<size; i++) {
443:         n    = range[i+1] - range[i];
444:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
445:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
446:         MPI_Wait(&request,&status);
447:       }
448:       PetscFree(avec);
449:     }
450:   } else {
451:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
452:     VecSetFromOptions(vec);
453:     VecGetLocalSize(vec,&n);
454:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
455:     VecGetArray(vec,&avec);
456:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
457:     VecRestoreArray(vec,&avec);
458:   }
459:   VecAssemblyBegin(vec);
460:   VecAssemblyEnd(vec);
461:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
462:   return(0);
463: }

467: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
468: {
469:   PetscTruth     isbinary;
470: #if defined(PETSC_HAVE_PNETCDF)
471:   PetscTruth     isnetcdf;
472: #endif
473: #if defined(PETSC_HAVE_HDF5)
474:   PetscTruth     ishdf5;
475: #endif

479:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
480: #if defined(PETSC_HAVE_PNETCDF)
481:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
482: #endif
483: #if defined(PETSC_HAVE_HDF5)
484:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
485: #endif

487:   if (isbinary) {
488:     VecLoadIntoVector_Binary(viewer,vec);
489: #if defined(PETSC_HAVE_PNETCDF)
490:   } else if (isnetcdf) {
491:     VecLoadIntoVector_Netcdf(viewer,vec);
492: #endif
493: #if defined(PETSC_HAVE_HDF5)
494:   } else if (ishdf5) {
495:     VecLoadIntoVector_HDF5(viewer,vec);
496: #endif
497:   } else {
498:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
499:   }
500:   return(0);
501: }