Actual source code: mpiadj.c

  1: #define PETSCMAT_DLL

  3: /*
  4:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 
  5: */
 6:  #include ../src/mat/impls/adj/mpi/mpiadj.h
 7:  #include petscsys.h

 11: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 12: {
 13:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 14:   PetscErrorCode    ierr;
 15:   PetscInt          i,j,m = A->rmap->n;
 16:   const char        *name;
 17:   PetscViewerFormat format;

 20:   PetscObjectGetName((PetscObject)A,&name);
 21:   PetscViewerGetFormat(viewer,&format);
 22:   if (format == PETSC_VIEWER_ASCII_INFO) {
 23:     return(0);
 24:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 25:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 26:   } else {
 27:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 28:     for (i=0; i<m; i++) {
 29:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
 30:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 31:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
 32:       }
 33:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 34:     }
 35:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 36:   }
 37:   PetscViewerFlush(viewer);
 38:   return(0);
 39: }

 43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
 44: {
 46:   PetscTruth     iascii;

 49:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 50:   if (iascii) {
 51:     MatView_MPIAdj_ASCII(A,viewer);
 52:   } else {
 53:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 54:   }
 55:   return(0);
 56: }

 60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
 61: {
 62:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

 66: #if defined(PETSC_USE_LOG)
 67:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
 68: #endif
 69:   PetscFree(a->diag);
 70:   if (a->freeaij) {
 71:     PetscFree(a->i);
 72:     PetscFree(a->j);
 73:     PetscFree(a->values);
 74:   }
 75:   PetscFree(a);
 76:   PetscObjectChangeTypeName((PetscObject)mat,0);
 77:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
 78:   return(0);
 79: }

 83: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
 84: {
 85:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

 89:   switch (op) {
 90:   case MAT_SYMMETRIC:
 91:   case MAT_STRUCTURALLY_SYMMETRIC:
 92:   case MAT_HERMITIAN:
 93:     a->symmetric = flg;
 94:     break;
 95:   case MAT_SYMMETRY_ETERNAL:
 96:     break;
 97:   default:
 98:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
 99:     break;
100:   }
101:   return(0);
102: }


105: /*
106:      Adds diagonal pointers to sparse matrix structure.
107: */

111: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
112: {
113:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
115:   PetscInt       i,j,m = A->rmap->n;

118:   PetscMalloc(m*sizeof(PetscInt),&a->diag);
119:   PetscLogObjectMemory(A,m*sizeof(PetscInt));
120:   for (i=0; i<A->rmap->n; i++) {
121:     for (j=a->i[i]; j<a->i[i+1]; j++) {
122:       if (a->j[j] == i) {
123:         a->diag[i] = j;
124:         break;
125:       }
126:     }
127:   }
128:   return(0);
129: }

133: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
134: {
135:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136:   PetscInt   *itmp;

139:   row -= A->rmap->rstart;

141:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

143:   *nz = a->i[row+1] - a->i[row];
144:   if (v) *v = PETSC_NULL;
145:   if (idx) {
146:     itmp = a->j + a->i[row];
147:     if (*nz) {
148:       *idx = itmp;
149:     }
150:     else *idx = 0;
151:   }
152:   return(0);
153: }

157: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
158: {
160:   return(0);
161: }

165: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
166: {
167:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
169:   PetscTruth     flag;

172:   /* If the  matrix dimensions are not equal,or no of nonzeros */
173:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
174:     flag = PETSC_FALSE;
175:   }
176: 
177:   /* if the a->i are the same */
178:   PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
179: 
180:   /* if a->j are the same */
181:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

183:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);
184:   return(0);
185: }

189: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
190: {
192:   PetscMPIInt    size;
193:   PetscInt       i;
194:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;

197:   MPI_Comm_size(((PetscObject)A)->comm,&size);
198:   if (size > 1) {*done = PETSC_FALSE; return(0);}
199:   *m    = A->rmap->n;
200:   *ia   = a->i;
201:   *ja   = a->j;
202:   *done = PETSC_TRUE;
203:   if (oshift) {
204:     for (i=0; i<(*ia)[*m]; i++) {
205:       (*ja)[i]++;
206:     }
207:     for (i=0; i<=(*m); i++) (*ia)[i]++;
208:   }
209:   return(0);
210: }

214: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
215: {
216:   PetscInt   i;
217:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

220:   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
221:   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
222:   if (oshift) {
223:     for (i=0; i<=(*m); i++) (*ia)[i]--;
224:     for (i=0; i<(*ia)[*m]; i++) {
225:       (*ja)[i]--;
226:     }
227:   }
228:   return(0);
229: }

233: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
234: {
235:   Mat               B;
236:   PetscErrorCode    ierr;
237:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
238:   const PetscInt    *rj;
239:   const PetscScalar *ra;
240:   MPI_Comm          comm;

243:   MatGetSize(A,PETSC_NULL,&N);
244:   MatGetLocalSize(A,&m,PETSC_NULL);
245:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
246: 
247:   /* count the number of nonzeros per row */
248:   for (i=0; i<m; i++) {
249:     MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
250:     for (j=0; j<len; j++) {
251:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
252:     }
253:     MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
254:     nzeros += len;
255:   }

257:   /* malloc space for nonzeros */
258:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
259:   PetscMalloc((N+1)*sizeof(PetscInt),&ia);
260:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);

262:   nzeros = 0;
263:   ia[0]  = 0;
264:   for (i=0; i<m; i++) {
265:     MatGetRow(A,i+rstart,&len,&rj,&ra);
266:     cnt     = 0;
267:     for (j=0; j<len; j++) {
268:       if (rj[j] != i+rstart) { /* if not diagonal */
269:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
270:         ja[nzeros+cnt++] = rj[j];
271:       }
272:     }
273:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
274:     nzeros += cnt;
275:     ia[i+1] = nzeros;
276:   }

278:   PetscObjectGetComm((PetscObject)A,&comm);
279:   MatCreate(comm,&B);
280:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
281:   MatSetType(B,type);
282:   MatMPIAdjSetPreallocation(B,ia,ja,a);

284:   if (reuse == MAT_REUSE_MATRIX) {
285:     MatHeaderReplace(A,B);
286:   } else {
287:     *newmat = B;
288:   }
289:   return(0);
290: }

292: /* -------------------------------------------------------------------*/
293: static struct _MatOps MatOps_Values = {0,
294:        MatGetRow_MPIAdj,
295:        MatRestoreRow_MPIAdj,
296:        0,
297: /* 4*/ 0,
298:        0,
299:        0,
300:        0,
301:        0,
302:        0,
303: /*10*/ 0,
304:        0,
305:        0,
306:        0,
307:        0,
308: /*15*/ 0,
309:        MatEqual_MPIAdj,
310:        0,
311:        0,
312:        0,
313: /*20*/ 0,
314:        0,
315:        0,
316:        MatSetOption_MPIAdj,
317:        0,
318: /*25*/ 0,
319:        0,
320:        0,
321:        0,
322:        0,
323: /*30*/ 0,
324:        0,
325:        0,
326:        0,
327:        0,
328: /*35*/ 0,
329:        0,
330:        0,
331:        0,
332:        0,
333: /*40*/ 0,
334:        0,
335:        0,
336:        0,
337:        0,
338: /*45*/ 0,
339:        0,
340:        0,
341:        0,
342:        0,
343: /*50*/ 0,
344:        MatGetRowIJ_MPIAdj,
345:        MatRestoreRowIJ_MPIAdj,
346:        0,
347:        0,
348: /*55*/ 0,
349:        0,
350:        0,
351:        0,
352:        0,
353: /*60*/ 0,
354:        MatDestroy_MPIAdj,
355:        MatView_MPIAdj,
356:        MatConvertFrom_MPIAdj,
357:        0,
358: /*65*/ 0,
359:        0,
360:        0,
361:        0,
362:        0,
363: /*70*/ 0,
364:        0,
365:        0,
366:        0,
367:        0,
368: /*75*/ 0,
369:        0,
370:        0,
371:        0,
372:        0,
373: /*80*/ 0,
374:        0,
375:        0,
376:        0,
377:        0,
378: /*85*/ 0,
379:        0,
380:        0,
381:        0,
382:        0,
383: /*90*/ 0,
384:        0,
385:        0,
386:        0,
387:        0,
388: /*95*/ 0,
389:        0,
390:        0,
391:        0};

396: PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
397: {
398:   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
400: #if defined(PETSC_USE_DEBUG)
401:   PetscInt       ii;
402: #endif

405:   PetscMapSetBlockSize(B->rmap,1);
406:   PetscMapSetBlockSize(B->cmap,1);
407:   PetscMapSetUp(B->rmap);
408:   PetscMapSetUp(B->cmap);

410: #if defined(PETSC_USE_DEBUG)
411:   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
412:   for (ii=1; ii<B->rmap->n; ii++) {
413:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
414:       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
415:     }
416:   }
417:   for (ii=0; ii<i[B->rmap->n]; ii++) {
418:     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
419:       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
420:     }
421:   }
422: #endif
423:   B->preallocated = PETSC_TRUE;

425:   b->j      = j;
426:   b->i      = i;
427:   b->values = values;

429:   b->nz               = i[B->rmap->n];
430:   b->diag             = 0;
431:   b->symmetric        = PETSC_FALSE;
432:   b->freeaij          = PETSC_TRUE;

434:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
435:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
436:   return(0);
437: }

440: /*MC
441:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
442:    intended for use constructing orderings and partitionings.

444:   Level: beginner

446: .seealso: MatCreateMPIAdj
447: M*/

452: PetscErrorCode  MatCreate_MPIAdj(Mat B)
453: {
454:   Mat_MPIAdj     *b;
456:   PetscMPIInt    size,rank;

459:   MPI_Comm_size(((PetscObject)B)->comm,&size);
460:   MPI_Comm_rank(((PetscObject)B)->comm,&rank);

462:   PetscNewLog(B,Mat_MPIAdj,&b);
463:   B->data             = (void*)b;
464:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
465:   B->mapping          = 0;
466:   B->assembled        = PETSC_FALSE;
467: 
468:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
469:                                     "MatMPIAdjSetPreallocation_MPIAdj",
470:                                      MatMPIAdjSetPreallocation_MPIAdj);
471:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
472:   return(0);
473: }

478: /*@C
479:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

481:    Collective on MPI_Comm

483:    Input Parameters:
484: +  A - the matrix
485: .  i - the indices into j for the start of each row
486: .  j - the column indices for each row (sorted for each row).
487:        The indices in i and j start with zero (NOT with one).
488: -  values - [optional] edge weights

490:    Level: intermediate

492: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
493: @*/
494: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
495: {
496:   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);

499:   PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
500:   if (f) {
501:     (*f)(B,i,j,values);
502:   }
503:   return(0);
504: }

508: /*@C
509:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
510:    The matrix does not have numerical values associated with it, but is
511:    intended for ordering (to reduce bandwidth etc) and partitioning.

513:    Collective on MPI_Comm

515:    Input Parameters:
516: +  comm - MPI communicator
517: .  m - number of local rows
518: .  N - number of global columns
519: .  i - the indices into j for the start of each row
520: .  j - the column indices for each row (sorted for each row).
521:        The indices in i and j start with zero (NOT with one).
522: -  values -[optional] edge weights

524:    Output Parameter:
525: .  A - the matrix 

527:    Level: intermediate

529:    Notes: This matrix object does not support most matrix operations, include
530:    MatSetValues().
531:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
532:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 
533:     call from Fortran you need not create the arrays with PetscMalloc().
534:    Should not include the matrix diagonals.

536:    If you already have a matrix, you can create its adjacency matrix by a call
537:    to MatConvert, specifying a type of MATMPIADJ.

539:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

541: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
542: @*/
543: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
544: {

548:   MatCreate(comm,A);
549:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
550:   MatSetType(*A,MATMPIADJ);
551:   MatMPIAdjSetPreallocation(*A,i,j,values);
552:   return(0);
553: }