Actual source code: pmetis.c

  1: #define PETSCMAT_DLL
  2: 
 3:  #include ../src/mat/impls/adj/mpi/mpiadj.h

  5: /* 
  6:    Currently using ParMetis-2.0. The following include file has
  7:    to be changed to par_kmetis.h for ParMetis-1.0
  8: */
 10: #include "parmetis.h"

 13: /*
 14:       The first 5 elements of this structure are the input control array to Metis
 15: */
 16: typedef struct {
 17:   int cuts;         /* number of cuts made (output) */
 18:   int foldfactor;
 19:   int parallel;     /* use parallel partitioner for coarse problem */
 20:   int indexing;     /* 0 indicates C indexing, 1 Fortran */
 21:   int printout;     /* indicates if one wishes Metis to print info */
 22:   MPI_Comm comm_pmetis;
 23: } MatPartitioning_Parmetis;

 25: /*
 26:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 27: */
 30: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 31: {
 32:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
 33:   PetscErrorCode           ierr;
 34:   int                      *locals,size,rank;
 35:   int                      *vtxdist,*xadj,*adjncy,itmp = 0;
 36:   int                      wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[3],  i,j;
 37:   Mat                      mat = part->adj,newmat;
 38:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mat->data;
 39:   PetscTruth               flg;
 40:   float                    *tpwgts,*ubvec;

 43:   MPI_Comm_size(((PetscObject)mat)->comm,&size);

 45:   PetscTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 46:   if (!flg) {
 47:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&newmat);
 48:     adj  = (Mat_MPIAdj *)newmat->data;
 49:   }

 51:   vtxdist = mat->rmap->range;
 52:   xadj    = adj->i;
 53:   adjncy  = adj->j;
 54:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);
 55:   if (!(vtxdist[rank+1] - vtxdist[rank])) {
 56:     SETERRQ(PETSC_ERR_LIB,"Does not support any processor with no entries");
 57:   }
 58: #if defined(PETSC_USE_DEBUG)
 59:   /* check that matrix has no diagonal entries */
 60:   {
 61:     int rstart;
 62:     MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
 63:     for (i=0; i<mat->rmap->n; i++) {
 64:       for (j=xadj[i]; j<xadj[i+1]; j++) {
 65:         if (adjncy[j] == i+rstart) SETERRQ1(PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
 66:       }
 67:     }
 68:   }
 69: #endif

 71:   PetscMalloc((mat->rmap->n+1)*sizeof(int),&locals);

 73:   if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
 74:   PetscMalloc(ncon*nparts*sizeof(float),&tpwgts);
 75:   for (i=0; i<ncon; i++) {
 76:     for (j=0; j<nparts; j++) {
 77:       if (part->part_weights) {
 78:         tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 79:       } else {
 80:         tpwgts[i*nparts+j] = 1./nparts;
 81:       }
 82:     }
 83:   }
 84:   PetscMalloc(ncon*sizeof(float),&ubvec);
 85:   for (i=0; i<ncon; i++) {
 86:     ubvec[i] = 1.05;
 87:   }
 88:   options[0] = 0;
 89:   /* ParMETIS has no error conditions ??? */
 90:   ParMETIS_V3_PartKway(vtxdist,xadj,adjncy,part->vertex_weights,adj->values,&wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,
 91:                        options,&parmetis->cuts,locals,&parmetis->comm_pmetis);
 92:   PetscFree(tpwgts);
 93:   PetscFree(ubvec);
 94:   if (PetscLogPrintInfo) {parmetis->printout = itmp;}

 96:   ISCreateGeneral(((PetscObject)part)->comm,mat->rmap->n,locals,partitioning);
 97:   PetscFree(locals);

 99:   if (!flg) {
100:     MatDestroy(newmat);
101:   }
102:   return(0);
103: }


108: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
109: {
110:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
112:   int rank;
113:   PetscTruth               iascii;

116:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);
117:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
118:   if (iascii) {
119:     if (parmetis->parallel == 2) {
120:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
121:     } else {
122:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
123:     }
124:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",parmetis->foldfactor);
125:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,parmetis->cuts);
126:     PetscViewerFlush(viewer);
127:   } else {
128:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
129:   }

131:   return(0);
132: }

136: /*@
137:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to 
138:          do the partitioning of the coarse grid.

140:   Collective on MatPartitioning

142:   Input Parameter:
143: .  part - the partitioning context

145:    Level: advanced

147: @*/
148: PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
149: {
150:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

153:   parmetis->parallel = 1;
154:   return(0);
155: }

159: /*@
160:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

162:   Input Parameter:
163: . part - the partitioning context

165:   Output Parameter:
166: . cut - the edge cut

168:    Level: advanced

170: @*/
171: PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
172: {
173:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *) part->data;

176:   *cut = parmetis->cuts;
177:   return(0);
178: }

182: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
183: {
185:   PetscTruth flag;

188:   PetscOptionsHead("Set ParMeTiS partitioning options");
189:     PetscOptionsName("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",&flag);
190:     if (flag) {
191:       MatPartitioningParmetisSetCoarseSequential(part);
192:     }
193:   PetscOptionsTail();
194:   return(0);
195: }


200: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
201: {
202:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

206:   MPI_Comm_free(&(parmetis->comm_pmetis));
207:   PetscFree(parmetis);
208:   return(0);
209: }


212: /*MC
213:    MAT_PARTITIONING_PARMETIS - Creates a partitioning context via the external package PARMETIS.

215:    Collective on MPI_Comm

217:    Input Parameter:
218: .  part - the partitioning context

220:    Options Database Keys:
221: +  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

223:    Level: beginner

225:    Notes: See http://www-users.cs.umn.edu/~karypis/metis/

227: .keywords: Partitioning, create, context

229: .seealso: MatPartitioningSetType(), MatPartitioningType

231: M*/

236: PetscErrorCode  MatPartitioningCreate_Parmetis(MatPartitioning part)
237: {
239:   MatPartitioning_Parmetis *parmetis;

242:   PetscNewLog(part,MatPartitioning_Parmetis,&parmetis);
243:   part->data                = (void*)parmetis;

245:   parmetis->cuts       = 0;   /* output variable */
246:   parmetis->foldfactor = 150; /*folding factor */
247:   parmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
248:   parmetis->indexing   = 0;   /* index numbering starts from 0 */
249:   parmetis->printout   = 0;   /* print no output while running */

251:   MPI_Comm_dup(((PetscObject)part)->comm,&(parmetis->comm_pmetis));

253:   part->ops->apply          = MatPartitioningApply_Parmetis;
254:   part->ops->view           = MatPartitioningView_Parmetis;
255:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
256:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
257:   return(0);
258: }